John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2019-2020 Valve Corporation |
| 3 | * Copyright (c) 2019-2020 LunarG, Inc. |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 4 | * |
| 5 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 6 | * you may not use this file except in compliance with the License. |
| 7 | * You may obtain a copy of the License at |
| 8 | * |
| 9 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | * |
| 11 | * Unless required by applicable law or agreed to in writing, software |
| 12 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | * See the License for the specific language governing permissions and |
| 15 | * limitations under the License. |
| 16 | * |
| 17 | * Author: John Zulauf <jzulauf@lunarg.com> |
| 18 | */ |
| 19 | |
| 20 | #pragma once |
| 21 | |
John Zulauf | 7635de3 | 2020-05-29 17:14:15 -0600 | [diff] [blame] | 22 | #include <limits> |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 23 | #include <map> |
| 24 | #include <memory> |
| 25 | #include <unordered_map> |
| 26 | #include <vulkan/vulkan.h> |
| 27 | |
| 28 | #include "synchronization_validation_types.h" |
| 29 | #include "state_tracker.h" |
| 30 | |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 31 | class SyncValidator; |
| 32 | |
John Zulauf | 2f952d2 | 2020-02-10 11:34:51 -0700 | [diff] [blame] | 33 | enum SyncHazard { |
| 34 | NONE = 0, |
| 35 | READ_AFTER_WRITE, |
| 36 | WRITE_AFTER_READ, |
| 37 | WRITE_AFTER_WRITE, |
| 38 | READ_RACING_WRITE, |
| 39 | WRITE_RACING_WRITE, |
| 40 | WRITE_RACING_READ, |
| 41 | }; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 42 | |
| 43 | // Useful Utilites for manipulating StageAccess parameters, suitable as base class to save typing |
| 44 | struct SyncStageAccess { |
John Zulauf | 1507ee4 | 2020-05-18 11:33:09 -0600 | [diff] [blame] | 45 | static inline SyncStageAccessFlagBits FlagBit(SyncStageAccessIndex stage_access) { |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 46 | return syncStageAccessInfoByStageAccessIndex[stage_access].stage_access_bit; |
| 47 | } |
John Zulauf | 1507ee4 | 2020-05-18 11:33:09 -0600 | [diff] [blame] | 48 | static inline SyncStageAccessFlags Flags(SyncStageAccessIndex stage_access) { |
| 49 | return static_cast<SyncStageAccessFlags>(FlagBit(stage_access)); |
| 50 | } |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 51 | |
| 52 | static bool IsRead(SyncStageAccessFlagBits stage_access_bit) { return 0 != (stage_access_bit & syncStageAccessReadMask); } |
| 53 | static bool IsRead(SyncStageAccessIndex stage_access_index) { return IsRead(FlagBit(stage_access_index)); } |
| 54 | |
| 55 | static bool IsWrite(SyncStageAccessFlagBits stage_access_bit) { return 0 != (stage_access_bit & syncStageAccessWriteMask); } |
John Zulauf | 1507ee4 | 2020-05-18 11:33:09 -0600 | [diff] [blame] | 56 | static bool HasWrite(SyncStageAccessFlags stage_access_mask) { return 0 != (stage_access_mask & syncStageAccessWriteMask); } |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 57 | static bool IsWrite(SyncStageAccessIndex stage_access_index) { return IsWrite(FlagBit(stage_access_index)); } |
| 58 | static VkPipelineStageFlagBits PipelineStageBit(SyncStageAccessIndex stage_access_index) { |
| 59 | return syncStageAccessInfoByStageAccessIndex[stage_access_index].stage_mask; |
| 60 | } |
| 61 | static SyncStageAccessFlags AccessScopeByStage(VkPipelineStageFlags stages); |
| 62 | static SyncStageAccessFlags AccessScopeByAccess(VkAccessFlags access); |
| 63 | static SyncStageAccessFlags AccessScope(VkPipelineStageFlags stages, VkAccessFlags access); |
| 64 | static SyncStageAccessFlags AccessScope(SyncStageAccessFlags stage_scope, VkAccessFlags accesses) { |
| 65 | return stage_scope & AccessScopeByAccess(accesses); |
| 66 | } |
| 67 | }; |
| 68 | |
John Zulauf | 5f13a79 | 2020-03-10 07:31:21 -0600 | [diff] [blame] | 69 | struct ResourceUsageTag { |
| 70 | uint64_t index; |
John Zulauf | cc6fecb | 2020-06-17 15:24:54 -0600 | [diff] [blame] | 71 | CMD_TYPE command; |
John Zulauf | 7635de3 | 2020-05-29 17:14:15 -0600 | [diff] [blame] | 72 | const static uint64_t kMaxIndex = std::numeric_limits<uint64_t>::max(); |
John Zulauf | 5f13a79 | 2020-03-10 07:31:21 -0600 | [diff] [blame] | 73 | ResourceUsageTag &operator++() { |
| 74 | index++; |
| 75 | return *this; |
| 76 | } |
| 77 | bool IsBefore(const ResourceUsageTag &rhs) const { return index < rhs.index; } |
John Zulauf | e5da6e5 | 2020-03-18 15:32:18 -0600 | [diff] [blame] | 78 | bool operator==(const ResourceUsageTag &rhs) const { return (index == rhs.index); } |
| 79 | bool operator!=(const ResourceUsageTag &rhs) const { return !(*this == rhs); } |
John Zulauf | cc6fecb | 2020-06-17 15:24:54 -0600 | [diff] [blame] | 80 | ResourceUsageTag() : index(0), command(CMD_NONE) {} |
| 81 | ResourceUsageTag(uint64_t index_, CMD_TYPE command_) : index(index_), command(command_) {} |
John Zulauf | 5f13a79 | 2020-03-10 07:31:21 -0600 | [diff] [blame] | 82 | }; |
| 83 | |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 84 | struct HazardResult { |
| 85 | SyncHazard hazard = NONE; |
John Zulauf | 37ceaed | 2020-07-03 16:18:15 -0600 | [diff] [blame^] | 86 | SyncStageAccessFlags prior_access = 0U; // TODO -- change to a NONE enum in ...Bits |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 87 | ResourceUsageTag tag = ResourceUsageTag(); |
John Zulauf | 37ceaed | 2020-07-03 16:18:15 -0600 | [diff] [blame^] | 88 | void Set(SyncHazard hazard_, SyncStageAccessFlags prior_, const ResourceUsageTag &tag_) { |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 89 | hazard = hazard_; |
John Zulauf | 37ceaed | 2020-07-03 16:18:15 -0600 | [diff] [blame^] | 90 | prior_access = prior_; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 91 | tag = tag_; |
| 92 | } |
| 93 | }; |
| 94 | |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 95 | struct SyncBarrier { |
| 96 | VkPipelineStageFlags src_exec_scope; |
| 97 | SyncStageAccessFlags src_access_scope; |
| 98 | VkPipelineStageFlags dst_exec_scope; |
| 99 | SyncStageAccessFlags dst_access_scope; |
| 100 | SyncBarrier() = default; |
| 101 | SyncBarrier &operator=(const SyncBarrier &) = default; |
| 102 | SyncBarrier(VkQueueFlags gueue_flags, const VkSubpassDependency2 &sub_pass_barrier); |
| 103 | }; |
John Zulauf | 6913342 | 2020-05-20 14:55:53 -0600 | [diff] [blame] | 104 | |
| 105 | // To represent ordering guarantees such as rasterization and store |
| 106 | struct SyncOrderingBarrier { |
| 107 | VkPipelineStageFlags exec_scope; |
| 108 | SyncStageAccessFlags access_scope; |
| 109 | SyncOrderingBarrier() = default; |
| 110 | SyncOrderingBarrier &operator=(const SyncOrderingBarrier &) = default; |
| 111 | }; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 112 | |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 113 | class ResourceAccessState : public SyncStageAccess { |
| 114 | protected: |
| 115 | // Mutliple read operations can be simlutaneously (and independently) synchronized, |
| 116 | // given the only the second execution scope creates a dependency chain, we have to track each, |
| 117 | // but only up to one per pipeline stage (as another read from the *same* stage become more recent, |
| 118 | // and applicable one for hazard detection |
| 119 | struct ReadState { |
| 120 | VkPipelineStageFlagBits stage; // The stage of this read |
John Zulauf | 37ceaed | 2020-07-03 16:18:15 -0600 | [diff] [blame^] | 121 | SyncStageAccessFlags access; // TODO: Change to FlagBits when we have a None bit enum |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 122 | VkPipelineStageFlags barriers; // all applicable barriered stages |
| 123 | ResourceUsageTag tag; |
John Zulauf | e5da6e5 | 2020-03-18 15:32:18 -0600 | [diff] [blame] | 124 | bool operator==(const ReadState &rhs) const { |
John Zulauf | 37ceaed | 2020-07-03 16:18:15 -0600 | [diff] [blame^] | 125 | bool same = (stage == rhs.stage) && (access == rhs.access) && (barriers == rhs.barriers) && (tag == rhs.tag); |
John Zulauf | e5da6e5 | 2020-03-18 15:32:18 -0600 | [diff] [blame] | 126 | return same; |
| 127 | } |
| 128 | bool operator!=(const ReadState &rhs) const { return !(*this == rhs); } |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 129 | }; |
| 130 | |
| 131 | public: |
| 132 | HazardResult DetectHazard(SyncStageAccessIndex usage_index) const; |
John Zulauf | 6913342 | 2020-05-20 14:55:53 -0600 | [diff] [blame] | 133 | HazardResult DetectHazard(SyncStageAccessIndex usage_index, const SyncOrderingBarrier &ordering) const; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 134 | |
John Zulauf | c920122 | 2020-05-13 15:13:03 -0600 | [diff] [blame] | 135 | HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags source_exec_scope, |
| 136 | SyncStageAccessFlags source_access_scope) const; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 137 | HazardResult DetectAsyncHazard(SyncStageAccessIndex usage_index) const; |
| 138 | |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 139 | void Update(SyncStageAccessIndex usage_index, const ResourceUsageTag &tag); |
John Zulauf | 5f13a79 | 2020-03-10 07:31:21 -0600 | [diff] [blame] | 140 | void Resolve(const ResourceAccessState &other); |
| 141 | void ApplyBarrier(const SyncBarrier &barrier); |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 142 | void ApplyExecutionBarrier(VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask); |
| 143 | void ApplyMemoryAccessBarrier(VkPipelineStageFlags src_stage_mask, SyncStageAccessFlags src_scope, |
| 144 | VkPipelineStageFlags dst_stage_mask, SyncStageAccessFlags dst_scope); |
| 145 | |
| 146 | ResourceAccessState() |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 147 | : write_barriers(~SyncStageAccessFlags(0)), |
| 148 | write_dependency_chain(0), |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 149 | write_tag(), |
John Zulauf | d14743a | 2020-07-03 09:42:39 -0600 | [diff] [blame] | 150 | last_write(0), |
| 151 | input_attachment_barriers(kNoAttachmentRead), |
| 152 | input_attachment_tag(), |
| 153 | last_read_count(0), |
| 154 | last_read_stages(0) {} |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 155 | |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 156 | bool HasWriteOp() const { return last_write != 0; } |
John Zulauf | e5da6e5 | 2020-03-18 15:32:18 -0600 | [diff] [blame] | 157 | bool operator==(const ResourceAccessState &rhs) const { |
| 158 | bool same = (write_barriers == rhs.write_barriers) && (write_dependency_chain == rhs.write_dependency_chain) && |
| 159 | (last_read_count == rhs.last_read_count) && (last_read_stages == rhs.last_read_stages) && |
John Zulauf | d14743a | 2020-07-03 09:42:39 -0600 | [diff] [blame] | 160 | (write_tag == rhs.write_tag) && (input_attachment_barriers == rhs.input_attachment_barriers) && |
| 161 | ((input_attachment_barriers == kNoAttachmentRead) || input_attachment_tag == rhs.input_attachment_tag); |
John Zulauf | e5da6e5 | 2020-03-18 15:32:18 -0600 | [diff] [blame] | 162 | for (uint32_t i = 0; same && i < last_read_count; i++) { |
| 163 | same |= last_reads[i] == rhs.last_reads[i]; |
| 164 | } |
| 165 | return same; |
| 166 | } |
| 167 | bool operator!=(const ResourceAccessState &rhs) const { return !(*this == rhs); } |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 168 | |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 169 | private: |
John Zulauf | d14743a | 2020-07-03 09:42:39 -0600 | [diff] [blame] | 170 | static constexpr VkPipelineStageFlags kNoAttachmentRead = ~VkPipelineStageFlags(0); |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 171 | bool IsWriteHazard(SyncStageAccessFlagBits usage) const { return 0 != (usage & ~write_barriers); } |
John Zulauf | d14743a | 2020-07-03 09:42:39 -0600 | [diff] [blame] | 172 | |
| 173 | static bool IsReadHazard(VkPipelineStageFlagBits stage, const VkPipelineStageFlags barriers) { |
| 174 | return 0 != (stage & ~barriers); |
| 175 | } |
| 176 | static bool IsReadHazard(VkPipelineStageFlags stage_mask, const VkPipelineStageFlags barriers) { |
| 177 | return stage_mask != (stage_mask & barriers); |
| 178 | } |
| 179 | |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 180 | bool IsReadHazard(VkPipelineStageFlagBits stage, const ReadState &read_access) const { |
John Zulauf | d14743a | 2020-07-03 09:42:39 -0600 | [diff] [blame] | 181 | return IsReadHazard(stage, read_access.barriers); |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 182 | } |
John Zulauf | 0cb5be2 | 2020-01-23 12:18:22 -0700 | [diff] [blame] | 183 | bool IsReadHazard(VkPipelineStageFlags stage_mask, const ReadState &read_access) const { |
John Zulauf | d14743a | 2020-07-03 09:42:39 -0600 | [diff] [blame] | 184 | return IsReadHazard(stage_mask, read_access.barriers); |
John Zulauf | 0cb5be2 | 2020-01-23 12:18:22 -0700 | [diff] [blame] | 185 | } |
John Zulauf | d14743a | 2020-07-03 09:42:39 -0600 | [diff] [blame] | 186 | |
| 187 | // TODO: Add a NONE (zero) enum to SyncStageAccessFlagBits for input_attachment_read and last_write |
| 188 | |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 189 | // With reads, each must be "safe" relative to it's prior write, so we need only |
| 190 | // save the most recent write operation (as anything *transitively* unsafe would arleady |
| 191 | // be included |
| 192 | SyncStageAccessFlags write_barriers; // union of applicable barrier masks since last write |
| 193 | VkPipelineStageFlags write_dependency_chain; // intiially zero, but accumulating the dstStages of barriers if they chain. |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 194 | ResourceUsageTag write_tag; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 195 | SyncStageAccessFlags last_write; // only the most recent write |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 196 | |
John Zulauf | d14743a | 2020-07-03 09:42:39 -0600 | [diff] [blame] | 197 | // This is special as it's a framebuffer-local read from a framebuffer-global pipeline stage |
| 198 | // As the only possible state for the input attachment stage/access is SYNC_FRAGMENT_SHADER_INPUT_ATTACHMENT_READ_BIT, |
| 199 | // encode the presence with the barriers mask, ~0 denotes no pending input attachment. Zero -- is the no-barrier state, |
| 200 | // otherwise reflects the barrier/dependency chain information. |
| 201 | VkPipelineStageFlags input_attachment_barriers; |
| 202 | ResourceUsageTag input_attachment_tag; |
| 203 | |
| 204 | uint32_t last_read_count; |
| 205 | VkPipelineStageFlags last_read_stages; |
| 206 | static constexpr size_t kStageCount = 32; // TODO: The manual count was 28 real stages. Add stage count to codegen |
| 207 | std::array<ReadState, kStageCount> last_reads; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 208 | }; |
| 209 | |
John Zulauf | 16adfc9 | 2020-04-08 10:28:33 -0600 | [diff] [blame] | 210 | using ResourceAccessRangeMap = sparse_container::range_map<VkDeviceSize, ResourceAccessState>; |
John Zulauf | 5c5e88d | 2019-12-26 11:22:02 -0700 | [diff] [blame] | 211 | using ResourceAccessRange = typename ResourceAccessRangeMap::key_type; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 212 | using ResourceRangeMergeIterator = sparse_container::parallel_iterator<ResourceAccessRangeMap, const ResourceAccessRangeMap>; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 213 | |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 214 | class AccessContext { |
John Zulauf | 5c5e88d | 2019-12-26 11:22:02 -0700 | [diff] [blame] | 215 | public: |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 216 | enum AddressType : int { kLinearAddress = 0, kIdealizedAddress = 1, kMaxAddressType = 1 }; |
John Zulauf | 6913342 | 2020-05-20 14:55:53 -0600 | [diff] [blame] | 217 | enum DetectOptions : uint32_t { |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 218 | kDetectPrevious = 1U << 0, |
| 219 | kDetectAsync = 1U << 1, |
| 220 | kDetectAll = (kDetectPrevious | kDetectAsync) |
John Zulauf | 16adfc9 | 2020-04-08 10:28:33 -0600 | [diff] [blame] | 221 | }; |
| 222 | |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 223 | struct TrackBack { |
| 224 | SyncBarrier barrier; |
John Zulauf | 1a22429 | 2020-06-30 14:52:13 -0600 | [diff] [blame] | 225 | const AccessContext *context; |
| 226 | TrackBack(const AccessContext *context_, VkQueueFlags queue_flags_, const VkSubpassDependency2 &subpass_barrier_) |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 227 | : barrier(queue_flags_, subpass_barrier_), context(context_) {} |
| 228 | TrackBack &operator=(const TrackBack &) = default; |
| 229 | TrackBack() = default; |
| 230 | }; |
John Zulauf | 5c5e88d | 2019-12-26 11:22:02 -0700 | [diff] [blame] | 231 | |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 232 | HazardResult DetectHazard(const BUFFER_STATE &buffer, SyncStageAccessIndex usage_index, const ResourceAccessRange &range) const; |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 233 | HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage, |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 234 | const VkImageSubresourceLayers &subresource, const VkOffset3D &offset, |
| 235 | const VkExtent3D &extent) const; |
John Zulauf | 6913342 | 2020-05-20 14:55:53 -0600 | [diff] [blame] | 236 | template <typename Detector> |
| 237 | HazardResult DetectHazard(Detector &detector, const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range, |
| 238 | const VkOffset3D &offset, const VkExtent3D &extent, DetectOptions options) const; |
John Zulauf | 1507ee4 | 2020-05-18 11:33:09 -0600 | [diff] [blame] | 239 | HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage, |
| 240 | const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset, |
| 241 | const VkExtent3D &extent) const; |
John Zulauf | 6913342 | 2020-05-20 14:55:53 -0600 | [diff] [blame] | 242 | HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage, |
| 243 | const VkImageSubresourceRange &subresource_range, const SyncOrderingBarrier &ordering, |
| 244 | const VkOffset3D &offset, const VkExtent3D &extent) const; |
John Zulauf | b027cdb | 2020-05-21 14:25:22 -0600 | [diff] [blame] | 245 | HazardResult DetectHazard(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const SyncOrderingBarrier &ordering, |
| 246 | const VkOffset3D &offset, const VkExtent3D &extent, VkImageAspectFlags aspect_mask = 0U) const; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 247 | HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope, |
| 248 | SyncStageAccessFlags src_access_scope, const VkImageSubresourceRange &subresource_range, |
| 249 | DetectOptions options) const; |
| 250 | HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope, |
| 251 | SyncStageAccessFlags src_stage_accesses, const VkImageMemoryBarrier &barrier) const; |
John Zulauf | 7635de3 | 2020-05-29 17:14:15 -0600 | [diff] [blame] | 252 | HazardResult DetectSubpassTransitionHazard(const TrackBack &track_back, const IMAGE_VIEW_STATE *attach_view) const; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 253 | |
John Zulauf | e5da6e5 | 2020-03-18 15:32:18 -0600 | [diff] [blame] | 254 | const TrackBack &GetDstExternalTrackBack() const { return dst_external_; } |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 255 | void Reset() { |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 256 | prev_.clear(); |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 257 | prev_by_subpass_.clear(); |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 258 | async_.clear(); |
John Zulauf | e5da6e5 | 2020-03-18 15:32:18 -0600 | [diff] [blame] | 259 | src_external_ = TrackBack(); |
John Zulauf | 16adfc9 | 2020-04-08 10:28:33 -0600 | [diff] [blame] | 260 | for (auto &map : access_state_maps_) { |
| 261 | map.clear(); |
| 262 | } |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 263 | } |
John Zulauf | 5f13a79 | 2020-03-10 07:31:21 -0600 | [diff] [blame] | 264 | // TODO: See if returning the lower_bound would be useful from a performance POV -- look at the lower_bound overhead |
| 265 | // Would need to add a "hint" overload to parallel_iterator::invalidate_[AB] call, if so. |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 266 | void ResolvePreviousAccess(AddressType type, const ResourceAccessRange &range, ResourceAccessRangeMap *descent_map, |
| 267 | const ResourceAccessState *infill_state) const; |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 268 | void ResolvePreviousAccess(const IMAGE_STATE &image_state, const VkImageSubresourceRange &subresource_range, |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 269 | AddressType address_type, ResourceAccessRangeMap *descent_map, |
| 270 | const ResourceAccessState *infill_state) const; |
| 271 | void ResolveAccessRange(AddressType type, const ResourceAccessRange &range, const SyncBarrier *barrier, |
| 272 | ResourceAccessRangeMap *resolve_map, const ResourceAccessState *infill_state, |
| 273 | bool recur_to_infill = true) const; |
| 274 | void UpdateAccessState(const BUFFER_STATE &buffer, SyncStageAccessIndex current_usage, const ResourceAccessRange &range, |
| 275 | const ResourceUsageTag &tag); |
| 276 | void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage, |
| 277 | const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset, const VkExtent3D &extent, |
| 278 | const ResourceUsageTag &tag); |
John Zulauf | 7635de3 | 2020-05-29 17:14:15 -0600 | [diff] [blame] | 279 | void UpdateAccessState(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const VkOffset3D &offset, |
| 280 | const VkExtent3D &extent, VkImageAspectFlags aspect_mask, const ResourceUsageTag &tag); |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 281 | void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage, |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 282 | const VkImageSubresourceLayers &subresource, const VkOffset3D &offset, const VkExtent3D &extent, |
| 283 | const ResourceUsageTag &tag); |
John Zulauf | 7635de3 | 2020-05-29 17:14:15 -0600 | [diff] [blame] | 284 | void UpdateAttachmentResolveAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area, |
| 285 | const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass, |
| 286 | const ResourceUsageTag &tag); |
John Zulauf | aff2066 | 2020-06-01 14:07:58 -0600 | [diff] [blame] | 287 | void UpdateAttachmentStoreAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area, |
| 288 | const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass, |
| 289 | const ResourceUsageTag &tag); |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 290 | |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 291 | void ResolveChildContexts(const std::vector<AccessContext> &contexts); |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 292 | |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 293 | void ApplyImageBarrier(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope, |
| 294 | VkPipelineStageFlags dst_exec_scope, SyncStageAccessFlags dst_accesse_scope, |
| 295 | const VkImageSubresourceRange &subresource_range); |
| 296 | |
| 297 | void ApplyImageBarrier(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope, |
| 298 | VkPipelineStageFlags dst_exec_scope, SyncStageAccessFlags dst_access_scope, |
| 299 | const VkImageSubresourceRange &subresource_range, bool layout_transition, const ResourceUsageTag &tag); |
| 300 | void ApplyImageBarrier(const IMAGE_STATE &image, const SyncBarrier &barrier, const VkImageSubresourceRange &subresource_range, |
| 301 | bool layout_transition, const ResourceUsageTag &tag); |
| 302 | |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 303 | template <typename Action> |
| 304 | void UpdateMemoryAccess(const BUFFER_STATE &buffer, const ResourceAccessRange &range, const Action action); |
| 305 | template <typename Action> |
| 306 | void UpdateMemoryAccess(const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range, const Action action); |
| 307 | |
| 308 | template <typename Action> |
| 309 | void ApplyGlobalBarriers(const Action &barrier_action); |
| 310 | |
John Zulauf | 16adfc9 | 2020-04-08 10:28:33 -0600 | [diff] [blame] | 311 | static AddressType ImageAddressType(const IMAGE_STATE &image); |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 312 | static VkDeviceSize ResourceBaseAddress(const BINDABLE &bindable); |
John Zulauf | 16adfc9 | 2020-04-08 10:28:33 -0600 | [diff] [blame] | 313 | |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 314 | AccessContext(uint32_t subpass, VkQueueFlags queue_flags, const std::vector<SubpassDependencyGraphNode> &dependencies, |
John Zulauf | 1a22429 | 2020-06-30 14:52:13 -0600 | [diff] [blame] | 315 | const std::vector<AccessContext> &contexts, const AccessContext *external_context); |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 316 | |
| 317 | AccessContext() { Reset(); } |
John Zulauf | 7635de3 | 2020-05-29 17:14:15 -0600 | [diff] [blame] | 318 | AccessContext(const AccessContext ©_from) = default; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 319 | |
John Zulauf | 16adfc9 | 2020-04-08 10:28:33 -0600 | [diff] [blame] | 320 | ResourceAccessRangeMap &GetAccessStateMap(AddressType type) { return access_state_maps_[type]; } |
| 321 | const ResourceAccessRangeMap &GetAccessStateMap(AddressType type) const { return access_state_maps_[type]; } |
| 322 | ResourceAccessRangeMap &GetLinearMap() { return GetAccessStateMap(AddressType::kLinearAddress); } |
| 323 | const ResourceAccessRangeMap &GetLinearMap() const { return GetAccessStateMap(AddressType::kLinearAddress); } |
| 324 | ResourceAccessRangeMap &GetIdealizedMap() { return GetAccessStateMap(AddressType::kIdealizedAddress); } |
| 325 | const ResourceAccessRangeMap &GetIdealizedMap() const { return GetAccessStateMap(AddressType::kIdealizedAddress); } |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 326 | const TrackBack *GetTrackBackFromSubpass(uint32_t subpass) const { |
| 327 | if (subpass == VK_SUBPASS_EXTERNAL) { |
| 328 | return &src_external_; |
| 329 | } else { |
| 330 | assert(subpass < prev_by_subpass_.size()); |
| 331 | return prev_by_subpass_[subpass]; |
| 332 | } |
| 333 | } |
John Zulauf | 16adfc9 | 2020-04-08 10:28:33 -0600 | [diff] [blame] | 334 | |
John Zulauf | 7635de3 | 2020-05-29 17:14:15 -0600 | [diff] [blame] | 335 | bool ValidateLayoutTransitions(const SyncValidator &sync_state, |
| 336 | |
| 337 | const RENDER_PASS_STATE &rp_state, |
| 338 | |
| 339 | const VkRect2D &render_area, |
| 340 | |
| 341 | uint32_t subpass, const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, |
| 342 | const char *func_name) const; |
John Zulauf | 1507ee4 | 2020-05-18 11:33:09 -0600 | [diff] [blame] | 343 | bool ValidateLoadOperation(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area, |
John Zulauf | 7635de3 | 2020-05-29 17:14:15 -0600 | [diff] [blame] | 344 | uint32_t subpass, const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, |
| 345 | const char *func_name) const; |
John Zulauf | aff2066 | 2020-06-01 14:07:58 -0600 | [diff] [blame] | 346 | bool ValidateStoreOperation(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area, |
| 347 | uint32_t subpass, const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, |
| 348 | const char *func_name) const; |
John Zulauf | b027cdb | 2020-05-21 14:25:22 -0600 | [diff] [blame] | 349 | bool ValidateResolveOperations(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area, |
| 350 | const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const char *func_name, |
| 351 | uint32_t subpass) const; |
John Zulauf | 1507ee4 | 2020-05-18 11:33:09 -0600 | [diff] [blame] | 352 | |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 353 | private: |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 354 | HazardResult DetectHazard(AddressType type, SyncStageAccessIndex usage_index, const ResourceAccessRange &range) const; |
| 355 | HazardResult DetectBarrierHazard(AddressType type, SyncStageAccessIndex current_usage, VkPipelineStageFlags src_exec_scope, |
| 356 | SyncStageAccessFlags src_access_scope, const ResourceAccessRange &range, |
| 357 | DetectOptions options) const; |
John Zulauf | 16adfc9 | 2020-04-08 10:28:33 -0600 | [diff] [blame] | 358 | |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 359 | template <typename Detector> |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 360 | HazardResult DetectHazard(AddressType type, const Detector &detector, const ResourceAccessRange &range, |
| 361 | DetectOptions options) const; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 362 | template <typename Detector> |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 363 | HazardResult DetectAsyncHazard(AddressType type, const Detector &detector, const ResourceAccessRange &range) const; |
John Zulauf | 5f13a79 | 2020-03-10 07:31:21 -0600 | [diff] [blame] | 364 | template <typename Detector> |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 365 | HazardResult DetectPreviousHazard(AddressType type, const Detector &detector, const ResourceAccessRange &range) const; |
John Zulauf | 16adfc9 | 2020-04-08 10:28:33 -0600 | [diff] [blame] | 366 | void UpdateAccessState(AddressType type, SyncStageAccessIndex current_usage, const ResourceAccessRange &range, |
| 367 | const ResourceUsageTag &tag); |
| 368 | constexpr static int kAddressTypeCount = AddressType::kMaxAddressType + 1; |
| 369 | static const std::array<AddressType, kAddressTypeCount> kAddressTypes; |
| 370 | std::array<ResourceAccessRangeMap, kAddressTypeCount> access_state_maps_; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 371 | std::vector<TrackBack> prev_; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 372 | std::vector<TrackBack *> prev_by_subpass_; |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 373 | std::vector<AccessContext *> async_; |
John Zulauf | e5da6e5 | 2020-03-18 15:32:18 -0600 | [diff] [blame] | 374 | TrackBack src_external_; |
| 375 | TrackBack dst_external_; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 376 | }; |
| 377 | |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 378 | class RenderPassAccessContext { |
| 379 | public: |
John Zulauf | 1a22429 | 2020-06-30 14:52:13 -0600 | [diff] [blame] | 380 | RenderPassAccessContext() : rp_state_(nullptr), current_subpass_(0) {} |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 381 | |
locke-lunarg | 61870c2 | 2020-06-09 14:51:50 -0600 | [diff] [blame] | 382 | bool ValidateDrawSubpassAttachment(const SyncValidator &sync_state, const CMD_BUFFER_STATE &cmd, const VkRect2D &render_area, |
| 383 | const char *func_name) const; |
locke-lunarg | 96dc963 | 2020-06-10 17:22:18 -0600 | [diff] [blame] | 384 | void RecordDrawSubpassAttachment(const CMD_BUFFER_STATE &cmd, const VkRect2D &render_area, const ResourceUsageTag &tag); |
John Zulauf | 1507ee4 | 2020-05-18 11:33:09 -0600 | [diff] [blame] | 385 | bool ValidateNextSubpass(const SyncValidator &sync_state, const VkRect2D &render_area, const char *command_name) const; |
John Zulauf | 7635de3 | 2020-05-29 17:14:15 -0600 | [diff] [blame] | 386 | bool ValidateEndRenderPass(const SyncValidator &sync_state, const VkRect2D &render_area, const char *func_name) const; |
| 387 | bool ValidateFinalSubpassLayoutTransitions(const SyncValidator &sync_state, const VkRect2D &render_area, |
| 388 | const char *func_name) const; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 389 | |
| 390 | void RecordLayoutTransitions(const ResourceUsageTag &tag); |
John Zulauf | 1507ee4 | 2020-05-18 11:33:09 -0600 | [diff] [blame] | 391 | void RecordLoadOperations(const VkRect2D &render_area, const ResourceUsageTag &tag); |
John Zulauf | 1a22429 | 2020-06-30 14:52:13 -0600 | [diff] [blame] | 392 | void RecordBeginRenderPass(const SyncValidator &state, const CMD_BUFFER_STATE &cb_state, const AccessContext *external_context, |
| 393 | VkQueueFlags queue_flags, const ResourceUsageTag &tag); |
John Zulauf | 1507ee4 | 2020-05-18 11:33:09 -0600 | [diff] [blame] | 394 | void RecordNextSubpass(const VkRect2D &render_area, const ResourceUsageTag &tag); |
John Zulauf | 1a22429 | 2020-06-30 14:52:13 -0600 | [diff] [blame] | 395 | void RecordEndRenderPass(AccessContext *external_context, const VkRect2D &render_area, const ResourceUsageTag &tag); |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 396 | |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 397 | AccessContext &CurrentContext() { return subpass_contexts_[current_subpass_]; } |
| 398 | const AccessContext &CurrentContext() const { return subpass_contexts_[current_subpass_]; } |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 399 | const std::vector<AccessContext> &GetContexts() const { return subpass_contexts_; } |
| 400 | uint32_t GetCurrentSubpass() const { return current_subpass_; } |
| 401 | const RENDER_PASS_STATE *GetRenderPassState() const { return rp_state_; } |
John Zulauf | 7635de3 | 2020-05-29 17:14:15 -0600 | [diff] [blame] | 402 | AccessContext *CreateStoreResolveProxy(const VkRect2D &render_area) const; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 403 | |
| 404 | private: |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 405 | const RENDER_PASS_STATE *rp_state_; |
| 406 | uint32_t current_subpass_; |
| 407 | std::vector<AccessContext> subpass_contexts_; |
| 408 | std::vector<const IMAGE_VIEW_STATE *> attachment_views_; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 409 | }; |
| 410 | |
| 411 | class CommandBufferAccessContext { |
| 412 | public: |
| 413 | CommandBufferAccessContext() |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 414 | : command_number_(0), |
| 415 | reset_count_(0), |
| 416 | render_pass_contexts_(), |
| 417 | cb_access_context_(), |
| 418 | current_context_(&cb_access_context_), |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 419 | current_renderpass_context_(), |
| 420 | cb_state_(), |
| 421 | queue_flags_() {} |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 422 | CommandBufferAccessContext(SyncValidator &sync_validator, std::shared_ptr<CMD_BUFFER_STATE> &cb_state, VkQueueFlags queue_flags) |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 423 | : CommandBufferAccessContext() { |
| 424 | cb_state_ = cb_state; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 425 | sync_state_ = &sync_validator; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 426 | queue_flags_ = queue_flags; |
| 427 | } |
John Zulauf | 5c5e88d | 2019-12-26 11:22:02 -0700 | [diff] [blame] | 428 | |
| 429 | void Reset() { |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 430 | command_number_ = 0; |
| 431 | reset_count_++; |
| 432 | cb_access_context_.Reset(); |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 433 | render_pass_contexts_.clear(); |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 434 | current_context_ = &cb_access_context_; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 435 | current_renderpass_context_ = nullptr; |
John Zulauf | 5c5e88d | 2019-12-26 11:22:02 -0700 | [diff] [blame] | 436 | } |
| 437 | |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 438 | AccessContext *GetCurrentAccessContext() { return current_context_; } |
| 439 | const AccessContext *GetCurrentAccessContext() const { return current_context_; } |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 440 | void RecordBeginRenderPass(const ResourceUsageTag &tag); |
| 441 | bool ValidateBeginRenderPass(const RENDER_PASS_STATE &render_pass, const VkRenderPassBeginInfo *pRenderPassBegin, |
| 442 | const VkSubpassBeginInfoKHR *pSubpassBeginInfo, const char *func_name) const; |
locke-lunarg | 61870c2 | 2020-06-09 14:51:50 -0600 | [diff] [blame] | 443 | bool ValidateDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint, const char *func_name) const; |
| 444 | void RecordDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint, const ResourceUsageTag &tag); |
| 445 | bool ValidateDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const char *func_name) const; |
| 446 | void RecordDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const ResourceUsageTag &tag); |
| 447 | bool ValidateDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const char *func_name) const; |
| 448 | void RecordDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const ResourceUsageTag &tag); |
| 449 | bool ValidateDrawSubpassAttachment(const char *func_name) const; |
| 450 | void RecordDrawSubpassAttachment(const ResourceUsageTag &tag); |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 451 | bool ValidateNextSubpass(const char *func_name) const; |
| 452 | bool ValidateEndRenderpass(const char *func_name) const; |
| 453 | void RecordNextSubpass(const RENDER_PASS_STATE &render_pass, const ResourceUsageTag &tag); |
| 454 | void RecordEndRenderPass(const RENDER_PASS_STATE &render_pass, const ResourceUsageTag &tag); |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 455 | CMD_BUFFER_STATE *GetCommandBufferState() { return cb_state_.get(); } |
| 456 | const CMD_BUFFER_STATE *GetCommandBufferState() const { return cb_state_.get(); } |
| 457 | VkQueueFlags GetQueueFlags() const { return queue_flags_; } |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 458 | inline ResourceUsageTag NextCommandTag(CMD_TYPE command) { |
| 459 | // TODO: add command encoding to ResourceUsageTag. |
| 460 | // What else we what to include. Do we want some sort of "parent" or global sequence number |
| 461 | command_number_++; |
John Zulauf | cc6fecb | 2020-06-17 15:24:54 -0600 | [diff] [blame] | 462 | const auto index = (static_cast<uint64_t>(reset_count_) << 32) | command_number_; |
| 463 | ResourceUsageTag next(index, command); |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 464 | return next; |
| 465 | } |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 466 | |
| 467 | private: |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 468 | uint32_t command_number_; |
| 469 | uint32_t reset_count_; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 470 | std::vector<RenderPassAccessContext> render_pass_contexts_; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 471 | AccessContext cb_access_context_; |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 472 | AccessContext *current_context_; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 473 | RenderPassAccessContext *current_renderpass_context_; |
| 474 | std::shared_ptr<CMD_BUFFER_STATE> cb_state_; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 475 | SyncValidator *sync_state_; |
| 476 | |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 477 | VkQueueFlags queue_flags_; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 478 | }; |
| 479 | |
| 480 | class SyncValidator : public ValidationStateTracker, public SyncStageAccess { |
| 481 | public: |
| 482 | SyncValidator() { container_type = LayerObjectTypeSyncValidation; } |
| 483 | using StateTracker = ValidationStateTracker; |
| 484 | |
| 485 | using StateTracker::AccessorTraitsTypes; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 486 | std::unordered_map<VkCommandBuffer, std::unique_ptr<CommandBufferAccessContext>> cb_access_state; |
| 487 | CommandBufferAccessContext *GetAccessContextImpl(VkCommandBuffer command_buffer, bool do_insert) { |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 488 | auto found_it = cb_access_state.find(command_buffer); |
| 489 | if (found_it == cb_access_state.end()) { |
| 490 | if (!do_insert) return nullptr; |
| 491 | // If we don't have one, make it. |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 492 | auto cb_state = GetShared<CMD_BUFFER_STATE>(command_buffer); |
| 493 | assert(cb_state.get()); |
| 494 | auto queue_flags = GetQueueFlags(*cb_state); |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 495 | std::unique_ptr<CommandBufferAccessContext> context(new CommandBufferAccessContext(*this, cb_state, queue_flags)); |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 496 | auto insert_pair = cb_access_state.insert(std::make_pair(command_buffer, std::move(context))); |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 497 | found_it = insert_pair.first; |
| 498 | } |
| 499 | return found_it->second.get(); |
| 500 | } |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 501 | CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) { |
| 502 | return GetAccessContextImpl(command_buffer, true); // true -> do_insert on not found |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 503 | } |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 504 | CommandBufferAccessContext *GetAccessContextNoInsert(VkCommandBuffer command_buffer) { |
| 505 | return GetAccessContextImpl(command_buffer, false); // false -> don't do_insert on not found |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 506 | } |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 507 | |
| 508 | const CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) const { |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 509 | const auto found_it = cb_access_state.find(command_buffer); |
| 510 | if (found_it == cb_access_state.end()) { |
| 511 | return nullptr; |
| 512 | } |
| 513 | return found_it->second.get(); |
| 514 | } |
| 515 | |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 516 | void ApplyGlobalBarriers(AccessContext *context, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 517 | SyncStageAccessFlags src_stage_scope, SyncStageAccessFlags dst_stage_scope, |
| 518 | uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers); |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 519 | void ApplyBufferBarriers(AccessContext *context, VkPipelineStageFlags src_stage_mask, SyncStageAccessFlags src_stage_scope, |
| 520 | VkPipelineStageFlags dst_stage_mask, SyncStageAccessFlags dst_stage_scope, uint32_t barrier_count, |
| 521 | const VkBufferMemoryBarrier *barriers); |
| 522 | void ApplyImageBarriers(AccessContext *context, VkPipelineStageFlags src_stage_mask, SyncStageAccessFlags src_stage_scope, |
| 523 | VkPipelineStageFlags dst_stage_mask, SyncStageAccessFlags dst_stage_scope, uint32_t barrier_count, |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 524 | const VkImageMemoryBarrier *barriers, const ResourceUsageTag &tag); |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 525 | |
John Zulauf | d1f85d4 | 2020-04-15 12:23:15 -0600 | [diff] [blame] | 526 | void ResetCommandBufferCallback(VkCommandBuffer command_buffer); |
| 527 | void FreeCommandBufferCallback(VkCommandBuffer command_buffer); |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 528 | void RecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 529 | const VkSubpassBeginInfo *pSubpassBeginInfo, CMD_TYPE command); |
| 530 | void RecordCmdNextSubpass(VkCommandBuffer commandBuffer, |
| 531 | |
| 532 | const VkSubpassBeginInfo *pSubpassBeginInfo, const VkSubpassEndInfo *pSubpassEndInfo, |
| 533 | CMD_TYPE command); |
| 534 | void RecordCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo, CMD_TYPE command); |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 535 | |
| 536 | void PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo, |
| 537 | const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result); |
| 538 | |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 539 | bool ValidateBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, |
| 540 | const VkSubpassBeginInfoKHR *pSubpassBeginInfo, const char *func_name) const; |
| 541 | |
| 542 | bool PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, |
| 543 | VkSubpassContents contents) const; |
| 544 | |
| 545 | bool PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, |
| 546 | const VkSubpassBeginInfoKHR *pSubpassBeginInfo) const; |
| 547 | |
| 548 | bool PreCallValidateCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, |
| 549 | const VkSubpassBeginInfoKHR *pSubpassBeginInfo) const; |
| 550 | |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 551 | bool PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, |
| 552 | const VkBufferCopy *pRegions) const; |
| 553 | |
| 554 | void PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, |
| 555 | const VkBufferCopy *pRegions); |
| 556 | |
John Zulauf | 5c5e88d | 2019-12-26 11:22:02 -0700 | [diff] [blame] | 557 | bool PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, |
| 558 | VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, |
| 559 | const VkImageCopy *pRegions) const; |
| 560 | |
| 561 | void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, |
| 562 | VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions); |
| 563 | |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 564 | bool PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, |
| 565 | VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, |
| 566 | uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, |
| 567 | uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, |
| 568 | uint32_t imageMemoryBarrierCount, |
| 569 | const VkImageMemoryBarrier *pImageMemoryBarriers) const; |
| 570 | |
| 571 | void PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, |
| 572 | VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, |
| 573 | uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, |
| 574 | uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, |
| 575 | uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers); |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 576 | |
| 577 | void PostCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo, |
| 578 | VkResult result); |
| 579 | |
| 580 | void PostCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, |
| 581 | VkSubpassContents contents); |
| 582 | void PostCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, |
| 583 | const VkSubpassBeginInfo *pSubpassBeginInfo); |
| 584 | void PostCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, |
| 585 | const VkSubpassBeginInfo *pSubpassBeginInfo); |
| 586 | |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 587 | bool ValidateCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo, |
| 588 | const VkSubpassEndInfoKHR *pSubpassEndInfo, const char *func_name) const; |
| 589 | bool PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const; |
| 590 | bool PreCallValidateCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo, |
| 591 | const VkSubpassEndInfoKHR *pSubpassEndInfo) const; |
| 592 | bool PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo, |
| 593 | const VkSubpassEndInfoKHR *pSubpassEndInfo) const; |
| 594 | |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 595 | void PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents); |
| 596 | void PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo, |
| 597 | const VkSubpassEndInfo *pSubpassEndInfo); |
| 598 | void PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo, |
| 599 | const VkSubpassEndInfo *pSubpassEndInfo); |
| 600 | |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 601 | bool ValidateCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo, |
| 602 | const char *func_name) const; |
| 603 | bool PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) const; |
| 604 | bool PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo) const; |
| 605 | bool PreCallValidateCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo) const; |
| 606 | |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 607 | void PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer); |
| 608 | void PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo); |
| 609 | void PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo); |
locke-lunarg | a19c71d | 2020-03-02 18:17:04 -0700 | [diff] [blame] | 610 | bool PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, |
| 611 | VkImageLayout dstImageLayout, uint32_t regionCount, |
| 612 | const VkBufferImageCopy *pRegions) const; |
| 613 | |
| 614 | void PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, |
| 615 | VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions); |
| 616 | |
| 617 | bool PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, |
| 618 | VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) const; |
| 619 | |
| 620 | void PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, |
| 621 | VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions); |
| 622 | |
| 623 | bool PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, |
| 624 | VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, |
| 625 | const VkImageBlit *pRegions, VkFilter filter) const; |
| 626 | |
| 627 | void PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, |
| 628 | VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions, |
| 629 | VkFilter filter); |
locke-lunarg | 36ba259 | 2020-04-03 09:42:04 -0600 | [diff] [blame] | 630 | |
locke-lunarg | 61870c2 | 2020-06-09 14:51:50 -0600 | [diff] [blame] | 631 | bool ValidateIndirectBuffer(const AccessContext &context, VkCommandBuffer commandBuffer, const VkDeviceSize struct_size, |
| 632 | const VkBuffer buffer, const VkDeviceSize offset, const uint32_t drawCount, const uint32_t stride, |
| 633 | const char *function) const; |
| 634 | void RecordIndirectBuffer(AccessContext &context, const ResourceUsageTag &tag, const VkDeviceSize struct_size, |
| 635 | const VkBuffer buffer, const VkDeviceSize offset, const uint32_t drawCount, uint32_t stride); |
locke-lunarg | 36ba259 | 2020-04-03 09:42:04 -0600 | [diff] [blame] | 636 | |
locke-lunarg | 61870c2 | 2020-06-09 14:51:50 -0600 | [diff] [blame] | 637 | bool ValidateCountBuffer(const AccessContext &context, VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 638 | const char *function) const; |
| 639 | void RecordCountBuffer(AccessContext &context, const ResourceUsageTag &tag, VkBuffer buffer, VkDeviceSize offset); |
locke-lunarg | 93d68af | 2020-05-12 17:18:03 -0600 | [diff] [blame] | 640 | |
locke-lunarg | 36ba259 | 2020-04-03 09:42:04 -0600 | [diff] [blame] | 641 | bool PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) const; |
| 642 | void PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z); |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 643 | |
| 644 | bool PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const; |
| 645 | void PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset); |
| 646 | |
| 647 | bool PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, |
| 648 | uint32_t firstInstance) const; |
| 649 | void PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, |
| 650 | uint32_t firstInstance); |
| 651 | |
| 652 | bool PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, |
| 653 | uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) const; |
| 654 | void PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, |
| 655 | uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance); |
| 656 | |
| 657 | bool PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, |
| 658 | uint32_t stride) const; |
| 659 | void PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, |
| 660 | uint32_t stride); |
| 661 | |
| 662 | bool PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 663 | uint32_t drawCount, uint32_t stride) const; |
| 664 | void PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 665 | uint32_t drawCount, uint32_t stride); |
| 666 | |
locke-lunarg | ff255f9 | 2020-05-13 18:53:52 -0600 | [diff] [blame] | 667 | bool ValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, |
| 668 | VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, |
| 669 | const char *function) const; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 670 | bool PreCallValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 671 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
| 672 | uint32_t stride) const; |
| 673 | void PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 674 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
| 675 | uint32_t stride); |
| 676 | bool PreCallValidateCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 677 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
| 678 | uint32_t stride) const; |
| 679 | void PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 680 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
| 681 | uint32_t stride); |
| 682 | bool PreCallValidateCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 683 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
| 684 | uint32_t stride) const; |
| 685 | void PreCallRecordCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 686 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
| 687 | uint32_t stride); |
| 688 | |
locke-lunarg | ff255f9 | 2020-05-13 18:53:52 -0600 | [diff] [blame] | 689 | bool ValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 690 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
| 691 | uint32_t stride, const char *function) const; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 692 | bool PreCallValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 693 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
| 694 | uint32_t stride) const; |
| 695 | void PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 696 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
| 697 | uint32_t stride); |
| 698 | bool PreCallValidateCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 699 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
| 700 | uint32_t stride) const; |
| 701 | void PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 702 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
| 703 | uint32_t stride); |
| 704 | bool PreCallValidateCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 705 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
| 706 | uint32_t stride) const; |
| 707 | void PreCallRecordCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 708 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
| 709 | uint32_t stride); |
| 710 | |
| 711 | bool PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, |
| 712 | const VkClearColorValue *pColor, uint32_t rangeCount, |
| 713 | const VkImageSubresourceRange *pRanges) const; |
| 714 | void PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, |
| 715 | const VkClearColorValue *pColor, uint32_t rangeCount, |
| 716 | const VkImageSubresourceRange *pRanges); |
| 717 | |
| 718 | bool PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, |
| 719 | const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount, |
| 720 | const VkImageSubresourceRange *pRanges) const; |
| 721 | void PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, |
| 722 | const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount, |
| 723 | const VkImageSubresourceRange *pRanges); |
| 724 | |
| 725 | bool PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, |
| 726 | uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, |
| 727 | VkDeviceSize stride, VkQueryResultFlags flags) const; |
| 728 | void PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, |
| 729 | uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, |
| 730 | VkQueryResultFlags flags); |
| 731 | |
| 732 | bool PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, |
| 733 | uint32_t data) const; |
| 734 | void PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, |
| 735 | uint32_t data); |
| 736 | |
| 737 | bool PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, |
| 738 | VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, |
| 739 | const VkImageResolve *pRegions) const; |
| 740 | void PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, |
| 741 | VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, |
| 742 | const VkImageResolve *pRegions); |
| 743 | |
| 744 | bool PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, |
| 745 | VkDeviceSize dataSize, const void *pData) const; |
| 746 | void PreCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, |
| 747 | VkDeviceSize dataSize, const void *pData); |
locke-lunarg | ff255f9 | 2020-05-13 18:53:52 -0600 | [diff] [blame] | 748 | |
| 749 | bool PreCallValidateCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, |
| 750 | VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) const; |
| 751 | void PreCallRecordCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, |
| 752 | VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 753 | }; |