blob: 35e3908302aff1b46a14fd9e30c3b47eaf1b87be [file] [log] [blame]
John Zulauf3d84f1b2020-03-09 13:33:25 -06001/*
2 * Copyright (c) 2019-2020 Valve Corporation
3 * Copyright (c) 2019-2020 LunarG, Inc.
John Zulauf9cb530d2019-09-30 14:14:10 -06004 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 * Author: John Zulauf <jzulauf@lunarg.com>
18 */
19
20#pragma once
21
John Zulauf7635de32020-05-29 17:14:15 -060022#include <limits>
John Zulauf9cb530d2019-09-30 14:14:10 -060023#include <map>
24#include <memory>
25#include <unordered_map>
26#include <vulkan/vulkan.h>
27
28#include "synchronization_validation_types.h"
29#include "state_tracker.h"
30
John Zulauf355e49b2020-04-24 15:11:15 -060031class SyncValidator;
John Zulauf59e25072020-07-17 10:55:21 -060032class ResourceAccessState;
John Zulauf355e49b2020-04-24 15:11:15 -060033
John Zulauf2f952d22020-02-10 11:34:51 -070034enum SyncHazard {
35 NONE = 0,
36 READ_AFTER_WRITE,
37 WRITE_AFTER_READ,
38 WRITE_AFTER_WRITE,
39 READ_RACING_WRITE,
40 WRITE_RACING_WRITE,
41 WRITE_RACING_READ,
42};
John Zulauf9cb530d2019-09-30 14:14:10 -060043
44// Useful Utilites for manipulating StageAccess parameters, suitable as base class to save typing
45struct SyncStageAccess {
John Zulauf1507ee42020-05-18 11:33:09 -060046 static inline SyncStageAccessFlagBits FlagBit(SyncStageAccessIndex stage_access) {
John Zulauf9cb530d2019-09-30 14:14:10 -060047 return syncStageAccessInfoByStageAccessIndex[stage_access].stage_access_bit;
48 }
John Zulauf1507ee42020-05-18 11:33:09 -060049 static inline SyncStageAccessFlags Flags(SyncStageAccessIndex stage_access) {
50 return static_cast<SyncStageAccessFlags>(FlagBit(stage_access));
51 }
John Zulauf9cb530d2019-09-30 14:14:10 -060052
53 static bool IsRead(SyncStageAccessFlagBits stage_access_bit) { return 0 != (stage_access_bit & syncStageAccessReadMask); }
54 static bool IsRead(SyncStageAccessIndex stage_access_index) { return IsRead(FlagBit(stage_access_index)); }
55
56 static bool IsWrite(SyncStageAccessFlagBits stage_access_bit) { return 0 != (stage_access_bit & syncStageAccessWriteMask); }
John Zulauf1507ee42020-05-18 11:33:09 -060057 static bool HasWrite(SyncStageAccessFlags stage_access_mask) { return 0 != (stage_access_mask & syncStageAccessWriteMask); }
John Zulauf9cb530d2019-09-30 14:14:10 -060058 static bool IsWrite(SyncStageAccessIndex stage_access_index) { return IsWrite(FlagBit(stage_access_index)); }
59 static VkPipelineStageFlagBits PipelineStageBit(SyncStageAccessIndex stage_access_index) {
60 return syncStageAccessInfoByStageAccessIndex[stage_access_index].stage_mask;
61 }
62 static SyncStageAccessFlags AccessScopeByStage(VkPipelineStageFlags stages);
63 static SyncStageAccessFlags AccessScopeByAccess(VkAccessFlags access);
64 static SyncStageAccessFlags AccessScope(VkPipelineStageFlags stages, VkAccessFlags access);
65 static SyncStageAccessFlags AccessScope(SyncStageAccessFlags stage_scope, VkAccessFlags accesses) {
66 return stage_scope & AccessScopeByAccess(accesses);
67 }
68};
69
John Zulauf5f13a792020-03-10 07:31:21 -060070struct ResourceUsageTag {
71 uint64_t index;
John Zulaufcc6fecb2020-06-17 15:24:54 -060072 CMD_TYPE command;
John Zulauf7635de32020-05-29 17:14:15 -060073 const static uint64_t kMaxIndex = std::numeric_limits<uint64_t>::max();
John Zulauf5f13a792020-03-10 07:31:21 -060074 ResourceUsageTag &operator++() {
75 index++;
76 return *this;
77 }
78 bool IsBefore(const ResourceUsageTag &rhs) const { return index < rhs.index; }
John Zulaufe5da6e52020-03-18 15:32:18 -060079 bool operator==(const ResourceUsageTag &rhs) const { return (index == rhs.index); }
80 bool operator!=(const ResourceUsageTag &rhs) const { return !(*this == rhs); }
John Zulaufcc6fecb2020-06-17 15:24:54 -060081 ResourceUsageTag() : index(0), command(CMD_NONE) {}
82 ResourceUsageTag(uint64_t index_, CMD_TYPE command_) : index(index_), command(command_) {}
John Zulauf5f13a792020-03-10 07:31:21 -060083};
84
John Zulauf9cb530d2019-09-30 14:14:10 -060085struct HazardResult {
John Zulauf59e25072020-07-17 10:55:21 -060086 std::unique_ptr<const ResourceAccessState> access_state;
87 SyncStageAccessIndex usage_index = std::numeric_limits<SyncStageAccessIndex>::max();
John Zulauf9cb530d2019-09-30 14:14:10 -060088 SyncHazard hazard = NONE;
John Zulauf37ceaed2020-07-03 16:18:15 -060089 SyncStageAccessFlags prior_access = 0U; // TODO -- change to a NONE enum in ...Bits
John Zulauf9cb530d2019-09-30 14:14:10 -060090 ResourceUsageTag tag = ResourceUsageTag();
John Zulauf59e25072020-07-17 10:55:21 -060091 void Set(const ResourceAccessState *access_state_, SyncStageAccessIndex usage_index_, SyncHazard hazard_,
92 SyncStageAccessFlags prior_, const ResourceUsageTag &tag_);
John Zulauf9cb530d2019-09-30 14:14:10 -060093};
94
John Zulauf3d84f1b2020-03-09 13:33:25 -060095struct SyncBarrier {
96 VkPipelineStageFlags src_exec_scope;
97 SyncStageAccessFlags src_access_scope;
98 VkPipelineStageFlags dst_exec_scope;
99 SyncStageAccessFlags dst_access_scope;
100 SyncBarrier() = default;
101 SyncBarrier &operator=(const SyncBarrier &) = default;
102 SyncBarrier(VkQueueFlags gueue_flags, const VkSubpassDependency2 &sub_pass_barrier);
103};
John Zulauf69133422020-05-20 14:55:53 -0600104
105// To represent ordering guarantees such as rasterization and store
106struct SyncOrderingBarrier {
107 VkPipelineStageFlags exec_scope;
108 SyncStageAccessFlags access_scope;
109 SyncOrderingBarrier() = default;
110 SyncOrderingBarrier &operator=(const SyncOrderingBarrier &) = default;
111};
John Zulauf3d84f1b2020-03-09 13:33:25 -0600112
John Zulauf9cb530d2019-09-30 14:14:10 -0600113class ResourceAccessState : public SyncStageAccess {
114 protected:
115 // Mutliple read operations can be simlutaneously (and independently) synchronized,
116 // given the only the second execution scope creates a dependency chain, we have to track each,
117 // but only up to one per pipeline stage (as another read from the *same* stage become more recent,
118 // and applicable one for hazard detection
119 struct ReadState {
120 VkPipelineStageFlagBits stage; // The stage of this read
John Zulauf37ceaed2020-07-03 16:18:15 -0600121 SyncStageAccessFlags access; // TODO: Change to FlagBits when we have a None bit enum
John Zulauf9cb530d2019-09-30 14:14:10 -0600122 VkPipelineStageFlags barriers; // all applicable barriered stages
123 ResourceUsageTag tag;
John Zulaufe5da6e52020-03-18 15:32:18 -0600124 bool operator==(const ReadState &rhs) const {
John Zulauf37ceaed2020-07-03 16:18:15 -0600125 bool same = (stage == rhs.stage) && (access == rhs.access) && (barriers == rhs.barriers) && (tag == rhs.tag);
John Zulaufe5da6e52020-03-18 15:32:18 -0600126 return same;
127 }
128 bool operator!=(const ReadState &rhs) const { return !(*this == rhs); }
John Zulauf9cb530d2019-09-30 14:14:10 -0600129 };
130
131 public:
132 HazardResult DetectHazard(SyncStageAccessIndex usage_index) const;
John Zulauf69133422020-05-20 14:55:53 -0600133 HazardResult DetectHazard(SyncStageAccessIndex usage_index, const SyncOrderingBarrier &ordering) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600134
John Zulaufc9201222020-05-13 15:13:03 -0600135 HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags source_exec_scope,
136 SyncStageAccessFlags source_access_scope) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600137 HazardResult DetectAsyncHazard(SyncStageAccessIndex usage_index) const;
138
John Zulauf9cb530d2019-09-30 14:14:10 -0600139 void Update(SyncStageAccessIndex usage_index, const ResourceUsageTag &tag);
John Zulauf5f13a792020-03-10 07:31:21 -0600140 void Resolve(const ResourceAccessState &other);
141 void ApplyBarrier(const SyncBarrier &barrier);
John Zulauf9cb530d2019-09-30 14:14:10 -0600142 void ApplyExecutionBarrier(VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask);
143 void ApplyMemoryAccessBarrier(VkPipelineStageFlags src_stage_mask, SyncStageAccessFlags src_scope,
144 VkPipelineStageFlags dst_stage_mask, SyncStageAccessFlags dst_scope);
145
146 ResourceAccessState()
John Zulauf355e49b2020-04-24 15:11:15 -0600147 : write_barriers(~SyncStageAccessFlags(0)),
148 write_dependency_chain(0),
John Zulauf355e49b2020-04-24 15:11:15 -0600149 write_tag(),
John Zulaufd14743a2020-07-03 09:42:39 -0600150 last_write(0),
151 input_attachment_barriers(kNoAttachmentRead),
152 input_attachment_tag(),
153 last_read_count(0),
154 last_read_stages(0) {}
John Zulauf9cb530d2019-09-30 14:14:10 -0600155
John Zulauf3d84f1b2020-03-09 13:33:25 -0600156 bool HasWriteOp() const { return last_write != 0; }
John Zulaufe5da6e52020-03-18 15:32:18 -0600157 bool operator==(const ResourceAccessState &rhs) const {
158 bool same = (write_barriers == rhs.write_barriers) && (write_dependency_chain == rhs.write_dependency_chain) &&
159 (last_read_count == rhs.last_read_count) && (last_read_stages == rhs.last_read_stages) &&
John Zulaufd14743a2020-07-03 09:42:39 -0600160 (write_tag == rhs.write_tag) && (input_attachment_barriers == rhs.input_attachment_barriers) &&
161 ((input_attachment_barriers == kNoAttachmentRead) || input_attachment_tag == rhs.input_attachment_tag);
John Zulaufe5da6e52020-03-18 15:32:18 -0600162 for (uint32_t i = 0; same && i < last_read_count; i++) {
163 same |= last_reads[i] == rhs.last_reads[i];
164 }
165 return same;
166 }
167 bool operator!=(const ResourceAccessState &rhs) const { return !(*this == rhs); }
John Zulauf59e25072020-07-17 10:55:21 -0600168 VkPipelineStageFlags GetReadBarriers(SyncStageAccessFlags usage) const;
169 SyncStageAccessFlags GetWriteBarriers() const { return write_barriers; }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600170
John Zulauf9cb530d2019-09-30 14:14:10 -0600171 private:
John Zulaufd14743a2020-07-03 09:42:39 -0600172 static constexpr VkPipelineStageFlags kNoAttachmentRead = ~VkPipelineStageFlags(0);
John Zulauf9cb530d2019-09-30 14:14:10 -0600173 bool IsWriteHazard(SyncStageAccessFlagBits usage) const { return 0 != (usage & ~write_barriers); }
John Zulaufd14743a2020-07-03 09:42:39 -0600174
175 static bool IsReadHazard(VkPipelineStageFlagBits stage, const VkPipelineStageFlags barriers) {
176 return 0 != (stage & ~barriers);
177 }
178 static bool IsReadHazard(VkPipelineStageFlags stage_mask, const VkPipelineStageFlags barriers) {
179 return stage_mask != (stage_mask & barriers);
180 }
181
John Zulauf9cb530d2019-09-30 14:14:10 -0600182 bool IsReadHazard(VkPipelineStageFlagBits stage, const ReadState &read_access) const {
John Zulaufd14743a2020-07-03 09:42:39 -0600183 return IsReadHazard(stage, read_access.barriers);
John Zulauf9cb530d2019-09-30 14:14:10 -0600184 }
John Zulauf0cb5be22020-01-23 12:18:22 -0700185 bool IsReadHazard(VkPipelineStageFlags stage_mask, const ReadState &read_access) const {
John Zulaufd14743a2020-07-03 09:42:39 -0600186 return IsReadHazard(stage_mask, read_access.barriers);
John Zulauf0cb5be22020-01-23 12:18:22 -0700187 }
John Zulaufd14743a2020-07-03 09:42:39 -0600188
189 // TODO: Add a NONE (zero) enum to SyncStageAccessFlagBits for input_attachment_read and last_write
190
John Zulauf9cb530d2019-09-30 14:14:10 -0600191 // With reads, each must be "safe" relative to it's prior write, so we need only
192 // save the most recent write operation (as anything *transitively* unsafe would arleady
193 // be included
194 SyncStageAccessFlags write_barriers; // union of applicable barrier masks since last write
195 VkPipelineStageFlags write_dependency_chain; // intiially zero, but accumulating the dstStages of barriers if they chain.
John Zulauf9cb530d2019-09-30 14:14:10 -0600196 ResourceUsageTag write_tag;
John Zulauf355e49b2020-04-24 15:11:15 -0600197 SyncStageAccessFlags last_write; // only the most recent write
John Zulauf9cb530d2019-09-30 14:14:10 -0600198
John Zulaufd14743a2020-07-03 09:42:39 -0600199 // This is special as it's a framebuffer-local read from a framebuffer-global pipeline stage
200 // As the only possible state for the input attachment stage/access is SYNC_FRAGMENT_SHADER_INPUT_ATTACHMENT_READ_BIT,
201 // encode the presence with the barriers mask, ~0 denotes no pending input attachment. Zero -- is the no-barrier state,
202 // otherwise reflects the barrier/dependency chain information.
203 VkPipelineStageFlags input_attachment_barriers;
204 ResourceUsageTag input_attachment_tag;
205
206 uint32_t last_read_count;
207 VkPipelineStageFlags last_read_stages;
208 static constexpr size_t kStageCount = 32; // TODO: The manual count was 28 real stages. Add stage count to codegen
209 std::array<ReadState, kStageCount> last_reads;
John Zulauf9cb530d2019-09-30 14:14:10 -0600210};
211
John Zulauf16adfc92020-04-08 10:28:33 -0600212using ResourceAccessRangeMap = sparse_container::range_map<VkDeviceSize, ResourceAccessState>;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700213using ResourceAccessRange = typename ResourceAccessRangeMap::key_type;
John Zulauf355e49b2020-04-24 15:11:15 -0600214using ResourceRangeMergeIterator = sparse_container::parallel_iterator<ResourceAccessRangeMap, const ResourceAccessRangeMap>;
John Zulauf9cb530d2019-09-30 14:14:10 -0600215
John Zulauf540266b2020-04-06 18:54:53 -0600216class AccessContext {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700217 public:
John Zulauf355e49b2020-04-24 15:11:15 -0600218 enum AddressType : int { kLinearAddress = 0, kIdealizedAddress = 1, kMaxAddressType = 1 };
John Zulauf69133422020-05-20 14:55:53 -0600219 enum DetectOptions : uint32_t {
John Zulauf355e49b2020-04-24 15:11:15 -0600220 kDetectPrevious = 1U << 0,
221 kDetectAsync = 1U << 1,
222 kDetectAll = (kDetectPrevious | kDetectAsync)
John Zulauf16adfc92020-04-08 10:28:33 -0600223 };
224
John Zulauf3d84f1b2020-03-09 13:33:25 -0600225 struct TrackBack {
226 SyncBarrier barrier;
John Zulauf1a224292020-06-30 14:52:13 -0600227 const AccessContext *context;
228 TrackBack(const AccessContext *context_, VkQueueFlags queue_flags_, const VkSubpassDependency2 &subpass_barrier_)
John Zulauf3d84f1b2020-03-09 13:33:25 -0600229 : barrier(queue_flags_, subpass_barrier_), context(context_) {}
230 TrackBack &operator=(const TrackBack &) = default;
231 TrackBack() = default;
232 };
John Zulauf5c5e88d2019-12-26 11:22:02 -0700233
John Zulauf355e49b2020-04-24 15:11:15 -0600234 HazardResult DetectHazard(const BUFFER_STATE &buffer, SyncStageAccessIndex usage_index, const ResourceAccessRange &range) const;
John Zulauf540266b2020-04-06 18:54:53 -0600235 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
John Zulauf3d84f1b2020-03-09 13:33:25 -0600236 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset,
237 const VkExtent3D &extent) const;
John Zulauf69133422020-05-20 14:55:53 -0600238 template <typename Detector>
239 HazardResult DetectHazard(Detector &detector, const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range,
240 const VkOffset3D &offset, const VkExtent3D &extent, DetectOptions options) const;
John Zulauf1507ee42020-05-18 11:33:09 -0600241 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
242 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset,
243 const VkExtent3D &extent) const;
John Zulauf69133422020-05-20 14:55:53 -0600244 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
245 const VkImageSubresourceRange &subresource_range, const SyncOrderingBarrier &ordering,
246 const VkOffset3D &offset, const VkExtent3D &extent) const;
John Zulaufb027cdb2020-05-21 14:25:22 -0600247 HazardResult DetectHazard(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const SyncOrderingBarrier &ordering,
248 const VkOffset3D &offset, const VkExtent3D &extent, VkImageAspectFlags aspect_mask = 0U) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600249 HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
250 SyncStageAccessFlags src_access_scope, const VkImageSubresourceRange &subresource_range,
251 DetectOptions options) const;
252 HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
253 SyncStageAccessFlags src_stage_accesses, const VkImageMemoryBarrier &barrier) const;
John Zulauf7635de32020-05-29 17:14:15 -0600254 HazardResult DetectSubpassTransitionHazard(const TrackBack &track_back, const IMAGE_VIEW_STATE *attach_view) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600255
John Zulaufe5da6e52020-03-18 15:32:18 -0600256 const TrackBack &GetDstExternalTrackBack() const { return dst_external_; }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600257 void Reset() {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600258 prev_.clear();
John Zulauf355e49b2020-04-24 15:11:15 -0600259 prev_by_subpass_.clear();
John Zulauf3d84f1b2020-03-09 13:33:25 -0600260 async_.clear();
John Zulaufe5da6e52020-03-18 15:32:18 -0600261 src_external_ = TrackBack();
John Zulauf16adfc92020-04-08 10:28:33 -0600262 for (auto &map : access_state_maps_) {
263 map.clear();
264 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600265 }
John Zulauf5f13a792020-03-10 07:31:21 -0600266 // TODO: See if returning the lower_bound would be useful from a performance POV -- look at the lower_bound overhead
267 // Would need to add a "hint" overload to parallel_iterator::invalidate_[AB] call, if so.
John Zulauf355e49b2020-04-24 15:11:15 -0600268 void ResolvePreviousAccess(AddressType type, const ResourceAccessRange &range, ResourceAccessRangeMap *descent_map,
269 const ResourceAccessState *infill_state) const;
John Zulauf540266b2020-04-06 18:54:53 -0600270 void ResolvePreviousAccess(const IMAGE_STATE &image_state, const VkImageSubresourceRange &subresource_range,
John Zulauf355e49b2020-04-24 15:11:15 -0600271 AddressType address_type, ResourceAccessRangeMap *descent_map,
272 const ResourceAccessState *infill_state) const;
273 void ResolveAccessRange(AddressType type, const ResourceAccessRange &range, const SyncBarrier *barrier,
274 ResourceAccessRangeMap *resolve_map, const ResourceAccessState *infill_state,
275 bool recur_to_infill = true) const;
276 void UpdateAccessState(const BUFFER_STATE &buffer, SyncStageAccessIndex current_usage, const ResourceAccessRange &range,
277 const ResourceUsageTag &tag);
278 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
279 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset, const VkExtent3D &extent,
280 const ResourceUsageTag &tag);
John Zulauf7635de32020-05-29 17:14:15 -0600281 void UpdateAccessState(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const VkOffset3D &offset,
282 const VkExtent3D &extent, VkImageAspectFlags aspect_mask, const ResourceUsageTag &tag);
John Zulauf540266b2020-04-06 18:54:53 -0600283 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
John Zulauf3d84f1b2020-03-09 13:33:25 -0600284 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset, const VkExtent3D &extent,
285 const ResourceUsageTag &tag);
John Zulauf7635de32020-05-29 17:14:15 -0600286 void UpdateAttachmentResolveAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
287 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass,
288 const ResourceUsageTag &tag);
John Zulaufaff20662020-06-01 14:07:58 -0600289 void UpdateAttachmentStoreAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
290 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass,
291 const ResourceUsageTag &tag);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600292
John Zulauf540266b2020-04-06 18:54:53 -0600293 void ResolveChildContexts(const std::vector<AccessContext> &contexts);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600294
John Zulauf355e49b2020-04-24 15:11:15 -0600295 void ApplyImageBarrier(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope,
296 VkPipelineStageFlags dst_exec_scope, SyncStageAccessFlags dst_accesse_scope,
297 const VkImageSubresourceRange &subresource_range);
298
299 void ApplyImageBarrier(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope,
300 VkPipelineStageFlags dst_exec_scope, SyncStageAccessFlags dst_access_scope,
301 const VkImageSubresourceRange &subresource_range, bool layout_transition, const ResourceUsageTag &tag);
302 void ApplyImageBarrier(const IMAGE_STATE &image, const SyncBarrier &barrier, const VkImageSubresourceRange &subresource_range,
303 bool layout_transition, const ResourceUsageTag &tag);
304
John Zulauf540266b2020-04-06 18:54:53 -0600305 template <typename Action>
306 void UpdateMemoryAccess(const BUFFER_STATE &buffer, const ResourceAccessRange &range, const Action action);
307 template <typename Action>
308 void UpdateMemoryAccess(const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range, const Action action);
309
310 template <typename Action>
311 void ApplyGlobalBarriers(const Action &barrier_action);
312
John Zulauf16adfc92020-04-08 10:28:33 -0600313 static AddressType ImageAddressType(const IMAGE_STATE &image);
John Zulauf355e49b2020-04-24 15:11:15 -0600314 static VkDeviceSize ResourceBaseAddress(const BINDABLE &bindable);
John Zulauf16adfc92020-04-08 10:28:33 -0600315
John Zulauf540266b2020-04-06 18:54:53 -0600316 AccessContext(uint32_t subpass, VkQueueFlags queue_flags, const std::vector<SubpassDependencyGraphNode> &dependencies,
John Zulauf1a224292020-06-30 14:52:13 -0600317 const std::vector<AccessContext> &contexts, const AccessContext *external_context);
John Zulauf540266b2020-04-06 18:54:53 -0600318
319 AccessContext() { Reset(); }
John Zulauf7635de32020-05-29 17:14:15 -0600320 AccessContext(const AccessContext &copy_from) = default;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600321
John Zulauf16adfc92020-04-08 10:28:33 -0600322 ResourceAccessRangeMap &GetAccessStateMap(AddressType type) { return access_state_maps_[type]; }
323 const ResourceAccessRangeMap &GetAccessStateMap(AddressType type) const { return access_state_maps_[type]; }
324 ResourceAccessRangeMap &GetLinearMap() { return GetAccessStateMap(AddressType::kLinearAddress); }
325 const ResourceAccessRangeMap &GetLinearMap() const { return GetAccessStateMap(AddressType::kLinearAddress); }
326 ResourceAccessRangeMap &GetIdealizedMap() { return GetAccessStateMap(AddressType::kIdealizedAddress); }
327 const ResourceAccessRangeMap &GetIdealizedMap() const { return GetAccessStateMap(AddressType::kIdealizedAddress); }
John Zulauf355e49b2020-04-24 15:11:15 -0600328 const TrackBack *GetTrackBackFromSubpass(uint32_t subpass) const {
329 if (subpass == VK_SUBPASS_EXTERNAL) {
330 return &src_external_;
331 } else {
332 assert(subpass < prev_by_subpass_.size());
333 return prev_by_subpass_[subpass];
334 }
335 }
John Zulauf16adfc92020-04-08 10:28:33 -0600336
John Zulauf7635de32020-05-29 17:14:15 -0600337 bool ValidateLayoutTransitions(const SyncValidator &sync_state,
338
339 const RENDER_PASS_STATE &rp_state,
340
341 const VkRect2D &render_area,
342
343 uint32_t subpass, const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
344 const char *func_name) const;
John Zulauf1507ee42020-05-18 11:33:09 -0600345 bool ValidateLoadOperation(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
John Zulauf7635de32020-05-29 17:14:15 -0600346 uint32_t subpass, const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
347 const char *func_name) const;
John Zulaufaff20662020-06-01 14:07:58 -0600348 bool ValidateStoreOperation(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
349 uint32_t subpass, const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
350 const char *func_name) const;
John Zulaufb027cdb2020-05-21 14:25:22 -0600351 bool ValidateResolveOperations(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
352 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const char *func_name,
353 uint32_t subpass) const;
John Zulauf1507ee42020-05-18 11:33:09 -0600354
John Zulauf3d84f1b2020-03-09 13:33:25 -0600355 private:
John Zulauf355e49b2020-04-24 15:11:15 -0600356 HazardResult DetectHazard(AddressType type, SyncStageAccessIndex usage_index, const ResourceAccessRange &range) const;
357 HazardResult DetectBarrierHazard(AddressType type, SyncStageAccessIndex current_usage, VkPipelineStageFlags src_exec_scope,
358 SyncStageAccessFlags src_access_scope, const ResourceAccessRange &range,
359 DetectOptions options) const;
John Zulauf16adfc92020-04-08 10:28:33 -0600360
John Zulauf3d84f1b2020-03-09 13:33:25 -0600361 template <typename Detector>
John Zulauf355e49b2020-04-24 15:11:15 -0600362 HazardResult DetectHazard(AddressType type, const Detector &detector, const ResourceAccessRange &range,
363 DetectOptions options) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600364 template <typename Detector>
John Zulauf355e49b2020-04-24 15:11:15 -0600365 HazardResult DetectAsyncHazard(AddressType type, const Detector &detector, const ResourceAccessRange &range) const;
John Zulauf5f13a792020-03-10 07:31:21 -0600366 template <typename Detector>
John Zulauf355e49b2020-04-24 15:11:15 -0600367 HazardResult DetectPreviousHazard(AddressType type, const Detector &detector, const ResourceAccessRange &range) const;
John Zulauf16adfc92020-04-08 10:28:33 -0600368 void UpdateAccessState(AddressType type, SyncStageAccessIndex current_usage, const ResourceAccessRange &range,
369 const ResourceUsageTag &tag);
370 constexpr static int kAddressTypeCount = AddressType::kMaxAddressType + 1;
371 static const std::array<AddressType, kAddressTypeCount> kAddressTypes;
372 std::array<ResourceAccessRangeMap, kAddressTypeCount> access_state_maps_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600373 std::vector<TrackBack> prev_;
John Zulauf355e49b2020-04-24 15:11:15 -0600374 std::vector<TrackBack *> prev_by_subpass_;
John Zulauf540266b2020-04-06 18:54:53 -0600375 std::vector<AccessContext *> async_;
John Zulaufe5da6e52020-03-18 15:32:18 -0600376 TrackBack src_external_;
377 TrackBack dst_external_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600378};
379
John Zulauf355e49b2020-04-24 15:11:15 -0600380class RenderPassAccessContext {
381 public:
John Zulauf1a224292020-06-30 14:52:13 -0600382 RenderPassAccessContext() : rp_state_(nullptr), current_subpass_(0) {}
John Zulauf355e49b2020-04-24 15:11:15 -0600383
locke-lunarg61870c22020-06-09 14:51:50 -0600384 bool ValidateDrawSubpassAttachment(const SyncValidator &sync_state, const CMD_BUFFER_STATE &cmd, const VkRect2D &render_area,
385 const char *func_name) const;
locke-lunarg96dc9632020-06-10 17:22:18 -0600386 void RecordDrawSubpassAttachment(const CMD_BUFFER_STATE &cmd, const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf1507ee42020-05-18 11:33:09 -0600387 bool ValidateNextSubpass(const SyncValidator &sync_state, const VkRect2D &render_area, const char *command_name) const;
John Zulauf7635de32020-05-29 17:14:15 -0600388 bool ValidateEndRenderPass(const SyncValidator &sync_state, const VkRect2D &render_area, const char *func_name) const;
389 bool ValidateFinalSubpassLayoutTransitions(const SyncValidator &sync_state, const VkRect2D &render_area,
390 const char *func_name) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600391
392 void RecordLayoutTransitions(const ResourceUsageTag &tag);
John Zulauf1507ee42020-05-18 11:33:09 -0600393 void RecordLoadOperations(const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf1a224292020-06-30 14:52:13 -0600394 void RecordBeginRenderPass(const SyncValidator &state, const CMD_BUFFER_STATE &cb_state, const AccessContext *external_context,
395 VkQueueFlags queue_flags, const ResourceUsageTag &tag);
John Zulauf1507ee42020-05-18 11:33:09 -0600396 void RecordNextSubpass(const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf1a224292020-06-30 14:52:13 -0600397 void RecordEndRenderPass(AccessContext *external_context, const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf355e49b2020-04-24 15:11:15 -0600398
John Zulauf540266b2020-04-06 18:54:53 -0600399 AccessContext &CurrentContext() { return subpass_contexts_[current_subpass_]; }
400 const AccessContext &CurrentContext() const { return subpass_contexts_[current_subpass_]; }
John Zulauf355e49b2020-04-24 15:11:15 -0600401 const std::vector<AccessContext> &GetContexts() const { return subpass_contexts_; }
402 uint32_t GetCurrentSubpass() const { return current_subpass_; }
403 const RENDER_PASS_STATE *GetRenderPassState() const { return rp_state_; }
John Zulauf7635de32020-05-29 17:14:15 -0600404 AccessContext *CreateStoreResolveProxy(const VkRect2D &render_area) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600405
406 private:
John Zulauf355e49b2020-04-24 15:11:15 -0600407 const RENDER_PASS_STATE *rp_state_;
408 uint32_t current_subpass_;
409 std::vector<AccessContext> subpass_contexts_;
410 std::vector<const IMAGE_VIEW_STATE *> attachment_views_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600411};
412
413class CommandBufferAccessContext {
414 public:
415 CommandBufferAccessContext()
John Zulauf355e49b2020-04-24 15:11:15 -0600416 : command_number_(0),
417 reset_count_(0),
418 render_pass_contexts_(),
419 cb_access_context_(),
420 current_context_(&cb_access_context_),
John Zulauf3d84f1b2020-03-09 13:33:25 -0600421 current_renderpass_context_(),
422 cb_state_(),
423 queue_flags_() {}
John Zulauf355e49b2020-04-24 15:11:15 -0600424 CommandBufferAccessContext(SyncValidator &sync_validator, std::shared_ptr<CMD_BUFFER_STATE> &cb_state, VkQueueFlags queue_flags)
John Zulauf3d84f1b2020-03-09 13:33:25 -0600425 : CommandBufferAccessContext() {
426 cb_state_ = cb_state;
John Zulauf355e49b2020-04-24 15:11:15 -0600427 sync_state_ = &sync_validator;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600428 queue_flags_ = queue_flags;
429 }
John Zulauf5c5e88d2019-12-26 11:22:02 -0700430
431 void Reset() {
John Zulauf355e49b2020-04-24 15:11:15 -0600432 command_number_ = 0;
433 reset_count_++;
434 cb_access_context_.Reset();
John Zulauf3d84f1b2020-03-09 13:33:25 -0600435 render_pass_contexts_.clear();
John Zulauf355e49b2020-04-24 15:11:15 -0600436 current_context_ = &cb_access_context_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600437 current_renderpass_context_ = nullptr;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700438 }
439
John Zulauf540266b2020-04-06 18:54:53 -0600440 AccessContext *GetCurrentAccessContext() { return current_context_; }
441 const AccessContext *GetCurrentAccessContext() const { return current_context_; }
John Zulauf355e49b2020-04-24 15:11:15 -0600442 void RecordBeginRenderPass(const ResourceUsageTag &tag);
443 bool ValidateBeginRenderPass(const RENDER_PASS_STATE &render_pass, const VkRenderPassBeginInfo *pRenderPassBegin,
444 const VkSubpassBeginInfoKHR *pSubpassBeginInfo, const char *func_name) const;
locke-lunarg61870c22020-06-09 14:51:50 -0600445 bool ValidateDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint, const char *func_name) const;
446 void RecordDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint, const ResourceUsageTag &tag);
447 bool ValidateDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const char *func_name) const;
448 void RecordDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const ResourceUsageTag &tag);
449 bool ValidateDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const char *func_name) const;
450 void RecordDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const ResourceUsageTag &tag);
451 bool ValidateDrawSubpassAttachment(const char *func_name) const;
452 void RecordDrawSubpassAttachment(const ResourceUsageTag &tag);
John Zulauf355e49b2020-04-24 15:11:15 -0600453 bool ValidateNextSubpass(const char *func_name) const;
454 bool ValidateEndRenderpass(const char *func_name) const;
455 void RecordNextSubpass(const RENDER_PASS_STATE &render_pass, const ResourceUsageTag &tag);
456 void RecordEndRenderPass(const RENDER_PASS_STATE &render_pass, const ResourceUsageTag &tag);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600457 CMD_BUFFER_STATE *GetCommandBufferState() { return cb_state_.get(); }
458 const CMD_BUFFER_STATE *GetCommandBufferState() const { return cb_state_.get(); }
459 VkQueueFlags GetQueueFlags() const { return queue_flags_; }
John Zulauf355e49b2020-04-24 15:11:15 -0600460 inline ResourceUsageTag NextCommandTag(CMD_TYPE command) {
461 // TODO: add command encoding to ResourceUsageTag.
462 // What else we what to include. Do we want some sort of "parent" or global sequence number
463 command_number_++;
John Zulaufcc6fecb2020-06-17 15:24:54 -0600464 const auto index = (static_cast<uint64_t>(reset_count_) << 32) | command_number_;
465 ResourceUsageTag next(index, command);
John Zulauf355e49b2020-04-24 15:11:15 -0600466 return next;
467 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600468
469 private:
John Zulauf355e49b2020-04-24 15:11:15 -0600470 uint32_t command_number_;
471 uint32_t reset_count_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600472 std::vector<RenderPassAccessContext> render_pass_contexts_;
John Zulauf355e49b2020-04-24 15:11:15 -0600473 AccessContext cb_access_context_;
John Zulauf540266b2020-04-06 18:54:53 -0600474 AccessContext *current_context_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600475 RenderPassAccessContext *current_renderpass_context_;
476 std::shared_ptr<CMD_BUFFER_STATE> cb_state_;
John Zulauf355e49b2020-04-24 15:11:15 -0600477 SyncValidator *sync_state_;
478
John Zulauf3d84f1b2020-03-09 13:33:25 -0600479 VkQueueFlags queue_flags_;
John Zulauf9cb530d2019-09-30 14:14:10 -0600480};
481
482class SyncValidator : public ValidationStateTracker, public SyncStageAccess {
483 public:
484 SyncValidator() { container_type = LayerObjectTypeSyncValidation; }
485 using StateTracker = ValidationStateTracker;
486
487 using StateTracker::AccessorTraitsTypes;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600488 std::unordered_map<VkCommandBuffer, std::unique_ptr<CommandBufferAccessContext>> cb_access_state;
489 CommandBufferAccessContext *GetAccessContextImpl(VkCommandBuffer command_buffer, bool do_insert) {
John Zulauf9cb530d2019-09-30 14:14:10 -0600490 auto found_it = cb_access_state.find(command_buffer);
491 if (found_it == cb_access_state.end()) {
492 if (!do_insert) return nullptr;
493 // If we don't have one, make it.
John Zulauf3d84f1b2020-03-09 13:33:25 -0600494 auto cb_state = GetShared<CMD_BUFFER_STATE>(command_buffer);
495 assert(cb_state.get());
496 auto queue_flags = GetQueueFlags(*cb_state);
John Zulauf355e49b2020-04-24 15:11:15 -0600497 std::unique_ptr<CommandBufferAccessContext> context(new CommandBufferAccessContext(*this, cb_state, queue_flags));
John Zulauf3d84f1b2020-03-09 13:33:25 -0600498 auto insert_pair = cb_access_state.insert(std::make_pair(command_buffer, std::move(context)));
John Zulauf9cb530d2019-09-30 14:14:10 -0600499 found_it = insert_pair.first;
500 }
501 return found_it->second.get();
502 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600503 CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) {
504 return GetAccessContextImpl(command_buffer, true); // true -> do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600505 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600506 CommandBufferAccessContext *GetAccessContextNoInsert(VkCommandBuffer command_buffer) {
507 return GetAccessContextImpl(command_buffer, false); // false -> don't do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600508 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600509
510 const CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) const {
John Zulauf9cb530d2019-09-30 14:14:10 -0600511 const auto found_it = cb_access_state.find(command_buffer);
512 if (found_it == cb_access_state.end()) {
513 return nullptr;
514 }
515 return found_it->second.get();
516 }
517
John Zulauf540266b2020-04-06 18:54:53 -0600518 void ApplyGlobalBarriers(AccessContext *context, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
John Zulauf9cb530d2019-09-30 14:14:10 -0600519 SyncStageAccessFlags src_stage_scope, SyncStageAccessFlags dst_stage_scope,
520 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers);
John Zulauf540266b2020-04-06 18:54:53 -0600521 void ApplyBufferBarriers(AccessContext *context, VkPipelineStageFlags src_stage_mask, SyncStageAccessFlags src_stage_scope,
522 VkPipelineStageFlags dst_stage_mask, SyncStageAccessFlags dst_stage_scope, uint32_t barrier_count,
523 const VkBufferMemoryBarrier *barriers);
524 void ApplyImageBarriers(AccessContext *context, VkPipelineStageFlags src_stage_mask, SyncStageAccessFlags src_stage_scope,
525 VkPipelineStageFlags dst_stage_mask, SyncStageAccessFlags dst_stage_scope, uint32_t barrier_count,
John Zulauf355e49b2020-04-24 15:11:15 -0600526 const VkImageMemoryBarrier *barriers, const ResourceUsageTag &tag);
John Zulauf9cb530d2019-09-30 14:14:10 -0600527
John Zulaufd1f85d42020-04-15 12:23:15 -0600528 void ResetCommandBufferCallback(VkCommandBuffer command_buffer);
529 void FreeCommandBufferCallback(VkCommandBuffer command_buffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600530 void RecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
John Zulauf355e49b2020-04-24 15:11:15 -0600531 const VkSubpassBeginInfo *pSubpassBeginInfo, CMD_TYPE command);
532 void RecordCmdNextSubpass(VkCommandBuffer commandBuffer,
533
534 const VkSubpassBeginInfo *pSubpassBeginInfo, const VkSubpassEndInfo *pSubpassEndInfo,
535 CMD_TYPE command);
536 void RecordCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo, CMD_TYPE command);
John Zulauf33fc1d52020-07-17 11:01:10 -0600537 bool SupressedBoundDescriptorWAW(const HazardResult &hazard) const;
John Zulauf9cb530d2019-09-30 14:14:10 -0600538
539 void PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
540 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result);
541
John Zulauf355e49b2020-04-24 15:11:15 -0600542 bool ValidateBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
543 const VkSubpassBeginInfoKHR *pSubpassBeginInfo, const char *func_name) const;
544
545 bool PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
546 VkSubpassContents contents) const;
547
548 bool PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
549 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) const;
550
551 bool PreCallValidateCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
552 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) const;
553
John Zulauf9cb530d2019-09-30 14:14:10 -0600554 bool PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
555 const VkBufferCopy *pRegions) const;
556
557 void PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
558 const VkBufferCopy *pRegions);
559
John Zulauf5c5e88d2019-12-26 11:22:02 -0700560 bool PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
561 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
562 const VkImageCopy *pRegions) const;
563
564 void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
565 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions);
566
John Zulauf9cb530d2019-09-30 14:14:10 -0600567 bool PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
568 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
569 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
570 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
571 uint32_t imageMemoryBarrierCount,
572 const VkImageMemoryBarrier *pImageMemoryBarriers) const;
573
574 void PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
575 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
576 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
577 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
578 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600579
580 void PostCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo,
581 VkResult result);
582
583 void PostCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
584 VkSubpassContents contents);
585 void PostCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
586 const VkSubpassBeginInfo *pSubpassBeginInfo);
587 void PostCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
588 const VkSubpassBeginInfo *pSubpassBeginInfo);
589
John Zulauf355e49b2020-04-24 15:11:15 -0600590 bool ValidateCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
591 const VkSubpassEndInfoKHR *pSubpassEndInfo, const char *func_name) const;
592 bool PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const;
593 bool PreCallValidateCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
594 const VkSubpassEndInfoKHR *pSubpassEndInfo) const;
595 bool PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
596 const VkSubpassEndInfoKHR *pSubpassEndInfo) const;
597
John Zulauf3d84f1b2020-03-09 13:33:25 -0600598 void PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents);
599 void PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
600 const VkSubpassEndInfo *pSubpassEndInfo);
601 void PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
602 const VkSubpassEndInfo *pSubpassEndInfo);
603
John Zulauf355e49b2020-04-24 15:11:15 -0600604 bool ValidateCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo,
605 const char *func_name) const;
606 bool PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) const;
607 bool PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo) const;
608 bool PreCallValidateCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo) const;
609
John Zulauf3d84f1b2020-03-09 13:33:25 -0600610 void PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer);
611 void PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo);
612 void PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo);
locke-lunarga19c71d2020-03-02 18:17:04 -0700613 bool PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
614 VkImageLayout dstImageLayout, uint32_t regionCount,
615 const VkBufferImageCopy *pRegions) const;
616
617 void PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
618 VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions);
619
620 bool PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
621 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) const;
622
623 void PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
624 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions);
625
626 bool PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
627 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
628 const VkImageBlit *pRegions, VkFilter filter) const;
629
630 void PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
631 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions,
632 VkFilter filter);
locke-lunarg36ba2592020-04-03 09:42:04 -0600633
locke-lunarg61870c22020-06-09 14:51:50 -0600634 bool ValidateIndirectBuffer(const AccessContext &context, VkCommandBuffer commandBuffer, const VkDeviceSize struct_size,
635 const VkBuffer buffer, const VkDeviceSize offset, const uint32_t drawCount, const uint32_t stride,
636 const char *function) const;
637 void RecordIndirectBuffer(AccessContext &context, const ResourceUsageTag &tag, const VkDeviceSize struct_size,
638 const VkBuffer buffer, const VkDeviceSize offset, const uint32_t drawCount, uint32_t stride);
locke-lunarg36ba2592020-04-03 09:42:04 -0600639
locke-lunarg61870c22020-06-09 14:51:50 -0600640 bool ValidateCountBuffer(const AccessContext &context, VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
641 const char *function) const;
642 void RecordCountBuffer(AccessContext &context, const ResourceUsageTag &tag, VkBuffer buffer, VkDeviceSize offset);
locke-lunarg93d68af2020-05-12 17:18:03 -0600643
locke-lunarg36ba2592020-04-03 09:42:04 -0600644 bool PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) const;
645 void PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z);
locke-lunarge1a67022020-04-29 00:15:36 -0600646
647 bool PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const;
648 void PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset);
649
650 bool PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
651 uint32_t firstInstance) const;
652 void PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
653 uint32_t firstInstance);
654
655 bool PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
656 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) const;
657 void PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
658 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance);
659
660 bool PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
661 uint32_t stride) const;
662 void PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
663 uint32_t stride);
664
665 bool PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
666 uint32_t drawCount, uint32_t stride) const;
667 void PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
668 uint32_t drawCount, uint32_t stride);
669
locke-lunargff255f92020-05-13 18:53:52 -0600670 bool ValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer,
671 VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride,
672 const char *function) const;
locke-lunarge1a67022020-04-29 00:15:36 -0600673 bool PreCallValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
674 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
675 uint32_t stride) const;
676 void PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
677 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
678 uint32_t stride);
679 bool PreCallValidateCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
680 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
681 uint32_t stride) const;
682 void PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
683 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
684 uint32_t stride);
685 bool PreCallValidateCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
686 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
687 uint32_t stride) const;
688 void PreCallRecordCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
689 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
690 uint32_t stride);
691
locke-lunargff255f92020-05-13 18:53:52 -0600692 bool ValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
693 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
694 uint32_t stride, const char *function) const;
locke-lunarge1a67022020-04-29 00:15:36 -0600695 bool PreCallValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
696 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
697 uint32_t stride) const;
698 void PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
699 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
700 uint32_t stride);
701 bool PreCallValidateCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
702 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
703 uint32_t stride) const;
704 void PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
705 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
706 uint32_t stride);
707 bool PreCallValidateCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
708 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
709 uint32_t stride) const;
710 void PreCallRecordCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
711 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
712 uint32_t stride);
713
714 bool PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
715 const VkClearColorValue *pColor, uint32_t rangeCount,
716 const VkImageSubresourceRange *pRanges) const;
717 void PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
718 const VkClearColorValue *pColor, uint32_t rangeCount,
719 const VkImageSubresourceRange *pRanges);
720
721 bool PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
722 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
723 const VkImageSubresourceRange *pRanges) const;
724 void PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
725 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
726 const VkImageSubresourceRange *pRanges);
727
728 bool PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
729 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
730 VkDeviceSize stride, VkQueryResultFlags flags) const;
731 void PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
732 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride,
733 VkQueryResultFlags flags);
734
735 bool PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size,
736 uint32_t data) const;
737 void PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size,
738 uint32_t data);
739
740 bool PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
741 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
742 const VkImageResolve *pRegions) const;
743 void PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
744 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
745 const VkImageResolve *pRegions);
746
747 bool PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
748 VkDeviceSize dataSize, const void *pData) const;
749 void PreCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
750 VkDeviceSize dataSize, const void *pData);
locke-lunargff255f92020-05-13 18:53:52 -0600751
752 bool PreCallValidateCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
753 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) const;
754 void PreCallRecordCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
755 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker);
John Zulauf9cb530d2019-09-30 14:14:10 -0600756};