blob: 434c63f7ffb69ec3c758b58685539da574243076 [file] [log] [blame]
John Zulauf3d84f1b2020-03-09 13:33:25 -06001/*
John Zulaufab7756b2020-12-29 16:10:16 -07002 * Copyright (c) 2019-2021 Valve Corporation
3 * Copyright (c) 2019-2021 LunarG, Inc.
John Zulauf9cb530d2019-09-30 14:14:10 -06004 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 * Author: John Zulauf <jzulauf@lunarg.com>
John Zulaufab7756b2020-12-29 16:10:16 -070018 * Author: Locke Lin <locke@lunarg.com>
19 * Author: Jeremy Gebben <jeremyg@lunarg.com>
John Zulauf9cb530d2019-09-30 14:14:10 -060020 */
21
22#pragma once
23
John Zulauf7635de32020-05-29 17:14:15 -060024#include <limits>
John Zulauf9cb530d2019-09-30 14:14:10 -060025#include <map>
26#include <memory>
27#include <unordered_map>
28#include <vulkan/vulkan.h>
29
30#include "synchronization_validation_types.h"
31#include "state_tracker.h"
32
John Zulauf355e49b2020-04-24 15:11:15 -060033class SyncValidator;
John Zulauf59e25072020-07-17 10:55:21 -060034class ResourceAccessState;
John Zulauf355e49b2020-04-24 15:11:15 -060035
John Zulauf2f952d22020-02-10 11:34:51 -070036enum SyncHazard {
37 NONE = 0,
38 READ_AFTER_WRITE,
39 WRITE_AFTER_READ,
40 WRITE_AFTER_WRITE,
41 READ_RACING_WRITE,
42 WRITE_RACING_WRITE,
43 WRITE_RACING_READ,
44};
John Zulauf9cb530d2019-09-30 14:14:10 -060045
46// Useful Utilites for manipulating StageAccess parameters, suitable as base class to save typing
47struct SyncStageAccess {
Jeremy Gebbend0de1f82020-11-09 08:21:07 -070048 static inline SyncStageAccessFlags FlagBit(SyncStageAccessIndex stage_access) {
John Zulauf9cb530d2019-09-30 14:14:10 -060049 return syncStageAccessInfoByStageAccessIndex[stage_access].stage_access_bit;
50 }
John Zulauf1507ee42020-05-18 11:33:09 -060051 static inline SyncStageAccessFlags Flags(SyncStageAccessIndex stage_access) {
52 return static_cast<SyncStageAccessFlags>(FlagBit(stage_access));
53 }
John Zulauf9cb530d2019-09-30 14:14:10 -060054
Jeremy Gebbend0de1f82020-11-09 08:21:07 -070055 static bool IsRead(const SyncStageAccessFlags &stage_access_bit) { return (stage_access_bit & syncStageAccessReadMask).any(); }
John Zulauf9cb530d2019-09-30 14:14:10 -060056 static bool IsRead(SyncStageAccessIndex stage_access_index) { return IsRead(FlagBit(stage_access_index)); }
57
Jeremy Gebbend0de1f82020-11-09 08:21:07 -070058 static bool IsWrite(const SyncStageAccessFlags &stage_access_bit) {
59 return (stage_access_bit & syncStageAccessWriteMask).any();
60 }
61 static bool HasWrite(const SyncStageAccessFlags &stage_access_mask) {
62 return (stage_access_mask & syncStageAccessWriteMask).any();
63 }
John Zulauf9cb530d2019-09-30 14:14:10 -060064 static bool IsWrite(SyncStageAccessIndex stage_access_index) { return IsWrite(FlagBit(stage_access_index)); }
65 static VkPipelineStageFlagBits PipelineStageBit(SyncStageAccessIndex stage_access_index) {
66 return syncStageAccessInfoByStageAccessIndex[stage_access_index].stage_mask;
67 }
68 static SyncStageAccessFlags AccessScopeByStage(VkPipelineStageFlags stages);
69 static SyncStageAccessFlags AccessScopeByAccess(VkAccessFlags access);
70 static SyncStageAccessFlags AccessScope(VkPipelineStageFlags stages, VkAccessFlags access);
Jeremy Gebbend0de1f82020-11-09 08:21:07 -070071 static SyncStageAccessFlags AccessScope(const SyncStageAccessFlags &stage_scope, VkAccessFlags accesses) {
John Zulauf9cb530d2019-09-30 14:14:10 -060072 return stage_scope & AccessScopeByAccess(accesses);
73 }
74};
75
John Zulauf5f13a792020-03-10 07:31:21 -060076struct ResourceUsageTag {
77 uint64_t index;
John Zulaufcc6fecb2020-06-17 15:24:54 -060078 CMD_TYPE command;
Jeremy Gebben4bb73502020-12-14 11:17:50 -070079
80 static constexpr uint64_t kResetShift = 33;
81 static constexpr uint64_t kCommandShift = 1;
82 static constexpr uint64_t kCommandMask = 0xffffffff;
83
John Zulauf7635de32020-05-29 17:14:15 -060084 const static uint64_t kMaxIndex = std::numeric_limits<uint64_t>::max();
John Zulauf5f13a792020-03-10 07:31:21 -060085 ResourceUsageTag &operator++() {
86 index++;
87 return *this;
88 }
89 bool IsBefore(const ResourceUsageTag &rhs) const { return index < rhs.index; }
John Zulaufe5da6e52020-03-18 15:32:18 -060090 bool operator==(const ResourceUsageTag &rhs) const { return (index == rhs.index); }
91 bool operator!=(const ResourceUsageTag &rhs) const { return !(*this == rhs); }
Jeremy Gebben4bb73502020-12-14 11:17:50 -070092
93 CMD_TYPE GetCommand() const { return command; }
94 uint32_t GetResetNum() const { return index >> kResetShift; }
95 uint32_t GetSeqNum() const { return (index >> kCommandShift) & kCommandMask; }
96 uint32_t GetSubCommand() const { return (index & 1); }
97
98 ResourceUsageTag NextSubCommand() const {
99 assert((index & 1) == 0);
100 ResourceUsageTag next = *this;
101 next.index++;
102 return next;
103 }
104
John Zulaufcc6fecb2020-06-17 15:24:54 -0600105 ResourceUsageTag() : index(0), command(CMD_NONE) {}
106 ResourceUsageTag(uint64_t index_, CMD_TYPE command_) : index(index_), command(command_) {}
Jeremy Gebben4bb73502020-12-14 11:17:50 -0700107 ResourceUsageTag(uint32_t reset_count, uint32_t command_num, CMD_TYPE command_)
108 : index(((uint64_t)reset_count << kResetShift) | (command_num << kCommandShift)), command(command_) {}
John Zulauf5f13a792020-03-10 07:31:21 -0600109};
110
John Zulauf9cb530d2019-09-30 14:14:10 -0600111struct HazardResult {
John Zulauf59e25072020-07-17 10:55:21 -0600112 std::unique_ptr<const ResourceAccessState> access_state;
113 SyncStageAccessIndex usage_index = std::numeric_limits<SyncStageAccessIndex>::max();
John Zulauf9cb530d2019-09-30 14:14:10 -0600114 SyncHazard hazard = NONE;
John Zulauf37ceaed2020-07-03 16:18:15 -0600115 SyncStageAccessFlags prior_access = 0U; // TODO -- change to a NONE enum in ...Bits
John Zulauf9cb530d2019-09-30 14:14:10 -0600116 ResourceUsageTag tag = ResourceUsageTag();
John Zulauf59e25072020-07-17 10:55:21 -0600117 void Set(const ResourceAccessState *access_state_, SyncStageAccessIndex usage_index_, SyncHazard hazard_,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700118 const SyncStageAccessFlags &prior_, const ResourceUsageTag &tag_);
John Zulauf9cb530d2019-09-30 14:14:10 -0600119};
120
John Zulauf3d84f1b2020-03-09 13:33:25 -0600121struct SyncBarrier {
122 VkPipelineStageFlags src_exec_scope;
123 SyncStageAccessFlags src_access_scope;
124 VkPipelineStageFlags dst_exec_scope;
125 SyncStageAccessFlags dst_access_scope;
126 SyncBarrier() = default;
127 SyncBarrier &operator=(const SyncBarrier &) = default;
128 SyncBarrier(VkQueueFlags gueue_flags, const VkSubpassDependency2 &sub_pass_barrier);
John Zulaufa0a98292020-09-18 09:30:10 -0600129 void Merge(const SyncBarrier &other) {
130 src_exec_scope |= other.src_exec_scope;
131 src_access_scope |= other.src_access_scope;
132 dst_exec_scope |= other.dst_exec_scope;
133 dst_access_scope |= other.dst_access_scope;
134 }
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700135 SyncBarrier(VkPipelineStageFlags src_exec_scope_, const SyncStageAccessFlags &src_access_scope_,
136 VkPipelineStageFlags dst_exec_scope_, const SyncStageAccessFlags &dst_access_scope_)
John Zulaufa0a98292020-09-18 09:30:10 -0600137 : src_exec_scope(src_exec_scope_),
138 src_access_scope(src_access_scope_),
139 dst_exec_scope(dst_exec_scope_),
140 dst_access_scope(dst_access_scope_) {}
John Zulauf89311b42020-09-29 16:28:47 -0600141 SyncBarrier(const SyncBarrier &other) = default;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600142};
John Zulauf69133422020-05-20 14:55:53 -0600143
John Zulauf43cc7462020-12-03 12:33:12 -0700144enum class AccessAddressType : uint32_t { kLinear = 0, kIdealized = 1, kMaxType = 1, kTypeCount = kMaxType + 1 };
145
John Zulauf4a6105a2020-11-17 15:11:05 -0700146struct SyncEventState {
147 enum IgnoreReason { NotIgnored = 0, ResetWaitRace, SetRace, MissingStageBits };
148 using EventPointer = std::shared_ptr<EVENT_STATE>;
149 using ScopeMap = sparse_container::range_map<VkDeviceSize, bool>;
150 EventPointer event;
151 CMD_TYPE last_command; // Only Event commands are valid here.
152 CMD_TYPE unsynchronized_set;
153 VkPipelineStageFlags barriers;
154 VkPipelineStageFlags stage_mask_param;
155 VkPipelineStageFlags stage_mask;
156 VkPipelineStageFlags exec_scope;
157 SyncStageAccessFlags stage_accesses;
158 ResourceUsageTag first_scope_tag;
159 std::array<ScopeMap, static_cast<size_t>(AccessAddressType::kTypeCount)> first_scope;
160 SyncEventState(const EventPointer &event_state)
161 : event(event_state),
162 last_command(CMD_NONE),
163 unsynchronized_set(CMD_NONE),
164 barriers(0U),
165 stage_mask_param(0U),
166 stage_mask(0U),
167 exec_scope(0U),
168 stage_accesses() {}
169 SyncEventState() : SyncEventState(EventPointer()) {}
170 void ResetFirstScope();
171 const ScopeMap &FirstScope(AccessAddressType address_type) const { return first_scope[static_cast<size_t>(address_type)]; }
172 IgnoreReason IsIgnoredByWait(VkPipelineStageFlags srcStageMask) const;
173 bool HasBarrier(VkPipelineStageFlags stageMask, VkPipelineStageFlags exec_scope) const;
174};
175
John Zulauf69133422020-05-20 14:55:53 -0600176// To represent ordering guarantees such as rasterization and store
177struct SyncOrderingBarrier {
178 VkPipelineStageFlags exec_scope;
179 SyncStageAccessFlags access_scope;
180 SyncOrderingBarrier() = default;
181 SyncOrderingBarrier &operator=(const SyncOrderingBarrier &) = default;
182};
John Zulauf3d84f1b2020-03-09 13:33:25 -0600183
John Zulauf9cb530d2019-09-30 14:14:10 -0600184class ResourceAccessState : public SyncStageAccess {
185 protected:
186 // Mutliple read operations can be simlutaneously (and independently) synchronized,
187 // given the only the second execution scope creates a dependency chain, we have to track each,
188 // but only up to one per pipeline stage (as another read from the *same* stage become more recent,
189 // and applicable one for hazard detection
190 struct ReadState {
191 VkPipelineStageFlagBits stage; // The stage of this read
John Zulauf37ceaed2020-07-03 16:18:15 -0600192 SyncStageAccessFlags access; // TODO: Change to FlagBits when we have a None bit enum
John Zulauff51fbb62020-10-02 14:43:24 -0600193 // TODO: Revisit whether this needs to support multiple reads per stage
John Zulauf9cb530d2019-09-30 14:14:10 -0600194 VkPipelineStageFlags barriers; // all applicable barriered stages
195 ResourceUsageTag tag;
John Zulauf89311b42020-09-29 16:28:47 -0600196 VkPipelineStageFlags pending_dep_chain; // Should be zero except during barrier application
197 // Excluded from comparison
198 ReadState() = default;
John Zulaufab7756b2020-12-29 16:10:16 -0700199 ReadState(VkPipelineStageFlagBits stage_, SyncStageAccessFlags access_, VkPipelineStageFlags barriers_,
200 const ResourceUsageTag &tag_)
201 : stage(stage_), access(access_), barriers(barriers_), tag(tag_), pending_dep_chain(0) {}
John Zulaufe5da6e52020-03-18 15:32:18 -0600202 bool operator==(const ReadState &rhs) const {
John Zulauf37ceaed2020-07-03 16:18:15 -0600203 bool same = (stage == rhs.stage) && (access == rhs.access) && (barriers == rhs.barriers) && (tag == rhs.tag);
John Zulaufe5da6e52020-03-18 15:32:18 -0600204 return same;
205 }
John Zulauf4a6105a2020-11-17 15:11:05 -0700206 bool IsReadBarrierHazard(VkPipelineStageFlags src_exec_scope) const {
207 // If the read stage is not in the src sync scope
208 // *AND* not execution chained with an existing sync barrier (that's the or)
209 // then the barrier access is unsafe (R/W after R)
210 return (src_exec_scope & (stage | barriers)) == 0;
211 }
212
John Zulaufe5da6e52020-03-18 15:32:18 -0600213 bool operator!=(const ReadState &rhs) const { return !(*this == rhs); }
John Zulauf4285ee92020-09-23 10:20:52 -0600214 inline void Set(VkPipelineStageFlagBits stage_, SyncStageAccessFlags access_, VkPipelineStageFlags barriers_,
215 const ResourceUsageTag &tag_) {
216 stage = stage_;
217 access = access_;
218 barriers = barriers_;
219 tag = tag_;
John Zulauf89311b42020-09-29 16:28:47 -0600220 pending_dep_chain = 0; // If this is a new read, we aren't applying a barrier set.
John Zulauf4285ee92020-09-23 10:20:52 -0600221 }
John Zulauf9cb530d2019-09-30 14:14:10 -0600222 };
223
224 public:
225 HazardResult DetectHazard(SyncStageAccessIndex usage_index) const;
John Zulauf69133422020-05-20 14:55:53 -0600226 HazardResult DetectHazard(SyncStageAccessIndex usage_index, const SyncOrderingBarrier &ordering) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600227
John Zulaufc9201222020-05-13 15:13:03 -0600228 HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags source_exec_scope,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700229 const SyncStageAccessFlags &source_access_scope) const;
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700230 HazardResult DetectAsyncHazard(SyncStageAccessIndex usage_index, const ResourceUsageTag &start_tag) const;
John Zulauf4a6105a2020-11-17 15:11:05 -0700231 HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags source_exec_scope,
232 const SyncStageAccessFlags &source_access_scope, const ResourceUsageTag &event_tag) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600233
John Zulauf9cb530d2019-09-30 14:14:10 -0600234 void Update(SyncStageAccessIndex usage_index, const ResourceUsageTag &tag);
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700235 void SetWrite(const SyncStageAccessFlags &usage_bit, const ResourceUsageTag &tag);
John Zulauf5f13a792020-03-10 07:31:21 -0600236 void Resolve(const ResourceAccessState &other);
John Zulaufb02c1eb2020-10-06 16:33:36 -0600237 void ApplyBarriers(const std::vector<SyncBarrier> &barriers, bool layout_transition);
238 void ApplyBarriers(const std::vector<SyncBarrier> &barriers, const ResourceUsageTag &tag);
John Zulauf89311b42020-09-29 16:28:47 -0600239 void ApplyBarrier(const SyncBarrier &barrier, bool layout_transition);
John Zulauf4a6105a2020-11-17 15:11:05 -0700240 void ApplyBarrier(const ResourceUsageTag &scope_tag, const SyncBarrier &barrier, bool layout_transition);
John Zulauf89311b42020-09-29 16:28:47 -0600241 void ApplyPendingBarriers(const ResourceUsageTag &tag);
John Zulauf9cb530d2019-09-30 14:14:10 -0600242
243 ResourceAccessState()
John Zulauf355e49b2020-04-24 15:11:15 -0600244 : write_barriers(~SyncStageAccessFlags(0)),
245 write_dependency_chain(0),
John Zulauf355e49b2020-04-24 15:11:15 -0600246 write_tag(),
John Zulaufd14743a2020-07-03 09:42:39 -0600247 last_write(0),
John Zulauff51fbb62020-10-02 14:43:24 -0600248 input_attachment_read(false),
John Zulauf361fb532020-07-22 10:45:39 -0600249 last_read_stages(0),
John Zulauf89311b42020-09-29 16:28:47 -0600250 read_execution_barriers(0),
251 pending_write_dep_chain(0),
252 pending_layout_transition(false),
253 pending_write_barriers(0) {}
John Zulauf9cb530d2019-09-30 14:14:10 -0600254
John Zulaufb02c1eb2020-10-06 16:33:36 -0600255 bool HasPendingState() const {
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700256 return (0 != pending_layout_transition) || pending_write_barriers.any() || (0 != pending_write_dep_chain);
John Zulaufb02c1eb2020-10-06 16:33:36 -0600257 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600258 bool HasWriteOp() const { return last_write != 0; }
John Zulaufe5da6e52020-03-18 15:32:18 -0600259 bool operator==(const ResourceAccessState &rhs) const {
260 bool same = (write_barriers == rhs.write_barriers) && (write_dependency_chain == rhs.write_dependency_chain) &&
John Zulaufab7756b2020-12-29 16:10:16 -0700261 (last_reads == rhs.last_reads) && (last_read_stages == rhs.last_read_stages) && (write_tag == rhs.write_tag) &&
262 (input_attachment_read == rhs.input_attachment_read) &&
John Zulauf361fb532020-07-22 10:45:39 -0600263 (read_execution_barriers == rhs.read_execution_barriers);
John Zulaufe5da6e52020-03-18 15:32:18 -0600264 return same;
265 }
266 bool operator!=(const ResourceAccessState &rhs) const { return !(*this == rhs); }
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700267 VkPipelineStageFlags GetReadBarriers(const SyncStageAccessFlags &usage) const;
John Zulauf59e25072020-07-17 10:55:21 -0600268 SyncStageAccessFlags GetWriteBarriers() const { return write_barriers; }
John Zulauf4a6105a2020-11-17 15:11:05 -0700269 bool InSourceScopeOrChain(VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope) const {
270 return ReadInSourceScopeOrChain(src_exec_scope) || WriteInSourceScopeOrChain(src_exec_scope, src_access_scope);
271 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600272
John Zulauf9cb530d2019-09-30 14:14:10 -0600273 private:
John Zulauf4285ee92020-09-23 10:20:52 -0600274 static constexpr VkPipelineStageFlags kInvalidAttachmentStage = ~VkPipelineStageFlags(0);
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700275 bool IsWriteHazard(SyncStageAccessFlags usage) const { return (usage & ~write_barriers).any(); }
276 bool IsRAWHazard(VkPipelineStageFlagBits usage_stage, const SyncStageAccessFlags &usage) const;
John Zulauf4a6105a2020-11-17 15:11:05 -0700277 bool IsWriteBarrierHazard(VkPipelineStageFlags src_exec_scope, const SyncStageAccessFlags &src_access_scope) const {
278 // If the previous write is *not* in the 1st access scope
279 // *AND* the current barrier is not in the dependency chain
280 // *AND* the there is no prior memory barrier for the previous write in the dependency chain
281 // then the barrier access is unsafe (R/W after W)
282 return ((last_write & src_access_scope) == 0) && ((src_exec_scope & write_dependency_chain) == 0) && (write_barriers == 0);
283 }
284 bool ReadInSourceScopeOrChain(VkPipelineStageFlags src_exec_scope) const {
285 return (0 != (src_exec_scope & (last_read_stages | read_execution_barriers)));
286 }
287 bool WriteInSourceScopeOrChain(VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope) const {
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700288 return (src_access_scope & last_write).any() || (write_dependency_chain & src_exec_scope);
John Zulaufa0a98292020-09-18 09:30:10 -0600289 }
John Zulaufd14743a2020-07-03 09:42:39 -0600290
291 static bool IsReadHazard(VkPipelineStageFlagBits stage, const VkPipelineStageFlags barriers) {
292 return 0 != (stage & ~barriers);
293 }
294 static bool IsReadHazard(VkPipelineStageFlags stage_mask, const VkPipelineStageFlags barriers) {
295 return stage_mask != (stage_mask & barriers);
296 }
297
John Zulauf9cb530d2019-09-30 14:14:10 -0600298 bool IsReadHazard(VkPipelineStageFlagBits stage, const ReadState &read_access) const {
John Zulaufd14743a2020-07-03 09:42:39 -0600299 return IsReadHazard(stage, read_access.barriers);
John Zulauf9cb530d2019-09-30 14:14:10 -0600300 }
John Zulauf0cb5be22020-01-23 12:18:22 -0700301 bool IsReadHazard(VkPipelineStageFlags stage_mask, const ReadState &read_access) const {
John Zulaufd14743a2020-07-03 09:42:39 -0600302 return IsReadHazard(stage_mask, read_access.barriers);
John Zulauf0cb5be22020-01-23 12:18:22 -0700303 }
John Zulauf4285ee92020-09-23 10:20:52 -0600304 VkPipelineStageFlags GetOrderedStages(const SyncOrderingBarrier &ordering) const;
John Zulaufd14743a2020-07-03 09:42:39 -0600305
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700306 // TODO: Add a NONE (zero) enum to SyncStageAccessFlags for input_attachment_read and last_write
John Zulaufd14743a2020-07-03 09:42:39 -0600307
John Zulauf9cb530d2019-09-30 14:14:10 -0600308 // With reads, each must be "safe" relative to it's prior write, so we need only
309 // save the most recent write operation (as anything *transitively* unsafe would arleady
310 // be included
311 SyncStageAccessFlags write_barriers; // union of applicable barrier masks since last write
312 VkPipelineStageFlags write_dependency_chain; // intiially zero, but accumulating the dstStages of barriers if they chain.
John Zulauf9cb530d2019-09-30 14:14:10 -0600313 ResourceUsageTag write_tag;
John Zulauf355e49b2020-04-24 15:11:15 -0600314 SyncStageAccessFlags last_write; // only the most recent write
John Zulauf9cb530d2019-09-30 14:14:10 -0600315
John Zulauff51fbb62020-10-02 14:43:24 -0600316 // TODO Input Attachment cleanup for multiple reads in a given stage
317 // Tracks whether the fragment shader read is input attachment read
318 bool input_attachment_read;
John Zulaufd14743a2020-07-03 09:42:39 -0600319
John Zulaufd14743a2020-07-03 09:42:39 -0600320 VkPipelineStageFlags last_read_stages;
John Zulauf361fb532020-07-22 10:45:39 -0600321 VkPipelineStageFlags read_execution_barriers;
John Zulaufab7756b2020-12-29 16:10:16 -0700322 small_vector<ReadState, 3> last_reads;
John Zulauf89311b42020-09-29 16:28:47 -0600323
324 // Pending execution state to support independent parallel barriers
325 VkPipelineStageFlags pending_write_dep_chain;
326 bool pending_layout_transition;
327 SyncStageAccessFlags pending_write_barriers;
John Zulauf9cb530d2019-09-30 14:14:10 -0600328};
329
John Zulauf16adfc92020-04-08 10:28:33 -0600330using ResourceAccessRangeMap = sparse_container::range_map<VkDeviceSize, ResourceAccessState>;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700331using ResourceAccessRange = typename ResourceAccessRangeMap::key_type;
John Zulauf355e49b2020-04-24 15:11:15 -0600332using ResourceRangeMergeIterator = sparse_container::parallel_iterator<ResourceAccessRangeMap, const ResourceAccessRangeMap>;
John Zulauf9cb530d2019-09-30 14:14:10 -0600333
John Zulauf540266b2020-04-06 18:54:53 -0600334class AccessContext {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700335 public:
John Zulauf69133422020-05-20 14:55:53 -0600336 enum DetectOptions : uint32_t {
John Zulauf355e49b2020-04-24 15:11:15 -0600337 kDetectPrevious = 1U << 0,
338 kDetectAsync = 1U << 1,
339 kDetectAll = (kDetectPrevious | kDetectAsync)
John Zulauf16adfc92020-04-08 10:28:33 -0600340 };
John Zulauf43cc7462020-12-03 12:33:12 -0700341 using MapArray = std::array<ResourceAccessRangeMap, static_cast<size_t>(AccessAddressType::kTypeCount)>;
John Zulauf16adfc92020-04-08 10:28:33 -0600342
John Zulaufbaea94f2020-09-15 17:55:16 -0600343 // WIP TODO WIP Multi-dep -- change track back to support barrier vector, not just last.
John Zulauf3d84f1b2020-03-09 13:33:25 -0600344 struct TrackBack {
John Zulaufa0a98292020-09-18 09:30:10 -0600345 std::vector<SyncBarrier> barriers;
John Zulauf1a224292020-06-30 14:52:13 -0600346 const AccessContext *context;
John Zulaufbaea94f2020-09-15 17:55:16 -0600347 TrackBack(const AccessContext *context_, VkQueueFlags queue_flags_,
John Zulaufa0a98292020-09-18 09:30:10 -0600348 const std::vector<const VkSubpassDependency2 *> &subpass_dependencies_)
349 : barriers(), context(context_) {
350 barriers.reserve(subpass_dependencies_.size());
351 for (const VkSubpassDependency2 *dependency : subpass_dependencies_) {
352 assert(dependency);
353 barriers.emplace_back(queue_flags_, *dependency);
354 }
355 }
356
John Zulauf3d84f1b2020-03-09 13:33:25 -0600357 TrackBack &operator=(const TrackBack &) = default;
358 TrackBack() = default;
359 };
John Zulauf5c5e88d2019-12-26 11:22:02 -0700360
John Zulauf355e49b2020-04-24 15:11:15 -0600361 HazardResult DetectHazard(const BUFFER_STATE &buffer, SyncStageAccessIndex usage_index, const ResourceAccessRange &range) const;
John Zulauf540266b2020-04-06 18:54:53 -0600362 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
John Zulauf3d84f1b2020-03-09 13:33:25 -0600363 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset,
364 const VkExtent3D &extent) const;
John Zulauf69133422020-05-20 14:55:53 -0600365 template <typename Detector>
366 HazardResult DetectHazard(Detector &detector, const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range,
367 const VkOffset3D &offset, const VkExtent3D &extent, DetectOptions options) const;
John Zulauf1507ee42020-05-18 11:33:09 -0600368 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
369 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset,
370 const VkExtent3D &extent) const;
John Zulauf69133422020-05-20 14:55:53 -0600371 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
372 const VkImageSubresourceRange &subresource_range, const SyncOrderingBarrier &ordering,
373 const VkOffset3D &offset, const VkExtent3D &extent) const;
John Zulaufb027cdb2020-05-21 14:25:22 -0600374 HazardResult DetectHazard(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const SyncOrderingBarrier &ordering,
375 const VkOffset3D &offset, const VkExtent3D &extent, VkImageAspectFlags aspect_mask = 0U) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600376 HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700377 const SyncStageAccessFlags &src_access_scope,
John Zulauf4a6105a2020-11-17 15:11:05 -0700378 const VkImageSubresourceRange &subresource_range, const SyncEventState &sync_event,
379 DetectOptions options) const;
380 HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
381 const SyncStageAccessFlags &src_access_scope,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700382 const VkImageSubresourceRange &subresource_range, DetectOptions options) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600383 HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700384 const SyncStageAccessFlags &src_stage_accesses,
385 const VkImageMemoryBarrier &barrier) const;
John Zulauf7635de32020-05-29 17:14:15 -0600386 HazardResult DetectSubpassTransitionHazard(const TrackBack &track_back, const IMAGE_VIEW_STATE *attach_view) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600387
John Zulaufb02c1eb2020-10-06 16:33:36 -0600388 void RecordLayoutTransitions(const RENDER_PASS_STATE &rp_state, uint32_t subpass,
389 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const ResourceUsageTag &tag);
390
John Zulaufe5da6e52020-03-18 15:32:18 -0600391 const TrackBack &GetDstExternalTrackBack() const { return dst_external_; }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600392 void Reset() {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600393 prev_.clear();
John Zulauf355e49b2020-04-24 15:11:15 -0600394 prev_by_subpass_.clear();
John Zulauf3d84f1b2020-03-09 13:33:25 -0600395 async_.clear();
John Zulaufe5da6e52020-03-18 15:32:18 -0600396 src_external_ = TrackBack();
John Zulaufa0a98292020-09-18 09:30:10 -0600397 dst_external_ = TrackBack();
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700398 start_tag_ = ResourceUsageTag();
John Zulauf16adfc92020-04-08 10:28:33 -0600399 for (auto &map : access_state_maps_) {
400 map.clear();
401 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600402 }
John Zulaufb02c1eb2020-10-06 16:33:36 -0600403
404 // Follow the context previous to access the access state, supporting "lazy" import into the context. Not intended for
405 // subpass layout transition, as the pending state handling is more complex
John Zulauf5f13a792020-03-10 07:31:21 -0600406 // TODO: See if returning the lower_bound would be useful from a performance POV -- look at the lower_bound overhead
407 // Would need to add a "hint" overload to parallel_iterator::invalidate_[AB] call, if so.
John Zulauf43cc7462020-12-03 12:33:12 -0700408 void ResolvePreviousAccess(AccessAddressType type, const ResourceAccessRange &range, ResourceAccessRangeMap *descent_map,
John Zulauf355e49b2020-04-24 15:11:15 -0600409 const ResourceAccessState *infill_state) const;
John Zulauf4a6105a2020-11-17 15:11:05 -0700410 void ResolvePreviousAccesses();
John Zulaufb02c1eb2020-10-06 16:33:36 -0600411 template <typename BarrierAction>
412 void ResolveAccessRange(const IMAGE_STATE &image_state, const VkImageSubresourceRange &subresource_range,
John Zulauf43cc7462020-12-03 12:33:12 -0700413 BarrierAction &barrier_action, AccessAddressType address_type, ResourceAccessRangeMap *descent_map,
John Zulaufb02c1eb2020-10-06 16:33:36 -0600414 const ResourceAccessState *infill_state) const;
415 template <typename BarrierAction>
John Zulauf43cc7462020-12-03 12:33:12 -0700416 void ResolveAccessRange(AccessAddressType type, const ResourceAccessRange &range, BarrierAction &barrier_action,
John Zulauf355e49b2020-04-24 15:11:15 -0600417 ResourceAccessRangeMap *resolve_map, const ResourceAccessState *infill_state,
418 bool recur_to_infill = true) const;
John Zulaufb02c1eb2020-10-06 16:33:36 -0600419
John Zulauf355e49b2020-04-24 15:11:15 -0600420 void UpdateAccessState(const BUFFER_STATE &buffer, SyncStageAccessIndex current_usage, const ResourceAccessRange &range,
421 const ResourceUsageTag &tag);
422 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
423 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset, const VkExtent3D &extent,
424 const ResourceUsageTag &tag);
John Zulauf7635de32020-05-29 17:14:15 -0600425 void UpdateAccessState(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const VkOffset3D &offset,
426 const VkExtent3D &extent, VkImageAspectFlags aspect_mask, const ResourceUsageTag &tag);
John Zulauf540266b2020-04-06 18:54:53 -0600427 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
John Zulauf3d84f1b2020-03-09 13:33:25 -0600428 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset, const VkExtent3D &extent,
429 const ResourceUsageTag &tag);
John Zulauf7635de32020-05-29 17:14:15 -0600430 void UpdateAttachmentResolveAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
431 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass,
432 const ResourceUsageTag &tag);
John Zulaufaff20662020-06-01 14:07:58 -0600433 void UpdateAttachmentStoreAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
434 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass,
435 const ResourceUsageTag &tag);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600436
John Zulauf540266b2020-04-06 18:54:53 -0600437 void ResolveChildContexts(const std::vector<AccessContext> &contexts);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600438
John Zulauf540266b2020-04-06 18:54:53 -0600439 template <typename Action>
John Zulauf89311b42020-09-29 16:28:47 -0600440 void UpdateResourceAccess(const BUFFER_STATE &buffer, const ResourceAccessRange &range, const Action action);
John Zulauf540266b2020-04-06 18:54:53 -0600441 template <typename Action>
John Zulauf89311b42020-09-29 16:28:47 -0600442 void UpdateResourceAccess(const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range, const Action action);
John Zulauf540266b2020-04-06 18:54:53 -0600443
444 template <typename Action>
445 void ApplyGlobalBarriers(const Action &barrier_action);
John Zulauf43cc7462020-12-03 12:33:12 -0700446 static AccessAddressType ImageAddressType(const IMAGE_STATE &image);
John Zulauf16adfc92020-04-08 10:28:33 -0600447
John Zulauf540266b2020-04-06 18:54:53 -0600448 AccessContext(uint32_t subpass, VkQueueFlags queue_flags, const std::vector<SubpassDependencyGraphNode> &dependencies,
John Zulauf1a224292020-06-30 14:52:13 -0600449 const std::vector<AccessContext> &contexts, const AccessContext *external_context);
John Zulauf540266b2020-04-06 18:54:53 -0600450
451 AccessContext() { Reset(); }
John Zulauf7635de32020-05-29 17:14:15 -0600452 AccessContext(const AccessContext &copy_from) = default;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600453
John Zulauf43cc7462020-12-03 12:33:12 -0700454 ResourceAccessRangeMap &GetAccessStateMap(AccessAddressType type) { return access_state_maps_[static_cast<size_t>(type)]; }
455 const ResourceAccessRangeMap &GetAccessStateMap(AccessAddressType type) const {
456 return access_state_maps_[static_cast<size_t>(type)];
457 }
458 ResourceAccessRangeMap &GetLinearMap() { return GetAccessStateMap(AccessAddressType::kLinear); }
459 const ResourceAccessRangeMap &GetLinearMap() const { return GetAccessStateMap(AccessAddressType::kLinear); }
460 ResourceAccessRangeMap &GetIdealizedMap() { return GetAccessStateMap(AccessAddressType::kIdealized); }
461 const ResourceAccessRangeMap &GetIdealizedMap() const { return GetAccessStateMap(AccessAddressType::kIdealized); }
John Zulauf355e49b2020-04-24 15:11:15 -0600462 const TrackBack *GetTrackBackFromSubpass(uint32_t subpass) const {
463 if (subpass == VK_SUBPASS_EXTERNAL) {
464 return &src_external_;
465 } else {
466 assert(subpass < prev_by_subpass_.size());
467 return prev_by_subpass_[subpass];
468 }
469 }
John Zulauf16adfc92020-04-08 10:28:33 -0600470
John Zulauf7635de32020-05-29 17:14:15 -0600471 bool ValidateLayoutTransitions(const SyncValidator &sync_state,
472
473 const RENDER_PASS_STATE &rp_state,
474
475 const VkRect2D &render_area,
476
477 uint32_t subpass, const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
478 const char *func_name) const;
John Zulauf1507ee42020-05-18 11:33:09 -0600479 bool ValidateLoadOperation(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
John Zulauf7635de32020-05-29 17:14:15 -0600480 uint32_t subpass, const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
481 const char *func_name) const;
John Zulaufaff20662020-06-01 14:07:58 -0600482 bool ValidateStoreOperation(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
483 uint32_t subpass, const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
484 const char *func_name) const;
John Zulaufb027cdb2020-05-21 14:25:22 -0600485 bool ValidateResolveOperations(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
486 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const char *func_name,
487 uint32_t subpass) const;
John Zulauf1507ee42020-05-18 11:33:09 -0600488
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700489 void SetStartTag(const ResourceUsageTag &tag) { start_tag_ = tag; }
John Zulauf4a6105a2020-11-17 15:11:05 -0700490 template <typename Action>
491 void ForAll(Action &&action);
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700492
John Zulauf3d84f1b2020-03-09 13:33:25 -0600493 private:
494 template <typename Detector>
John Zulauf43cc7462020-12-03 12:33:12 -0700495 HazardResult DetectHazard(AccessAddressType type, const Detector &detector, const ResourceAccessRange &range,
John Zulauf355e49b2020-04-24 15:11:15 -0600496 DetectOptions options) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600497 template <typename Detector>
John Zulauf43cc7462020-12-03 12:33:12 -0700498 HazardResult DetectAsyncHazard(AccessAddressType type, const Detector &detector, const ResourceAccessRange &range) const;
John Zulauf5f13a792020-03-10 07:31:21 -0600499 template <typename Detector>
John Zulauf43cc7462020-12-03 12:33:12 -0700500 HazardResult DetectPreviousHazard(AccessAddressType type, const Detector &detector, const ResourceAccessRange &range) const;
501 void UpdateAccessState(AccessAddressType type, SyncStageAccessIndex current_usage, const ResourceAccessRange &range,
John Zulauf16adfc92020-04-08 10:28:33 -0600502 const ResourceUsageTag &tag);
John Zulaufb02c1eb2020-10-06 16:33:36 -0600503
504 MapArray access_state_maps_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600505 std::vector<TrackBack> prev_;
John Zulauf355e49b2020-04-24 15:11:15 -0600506 std::vector<TrackBack *> prev_by_subpass_;
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700507 std::vector<const AccessContext *> async_;
John Zulaufe5da6e52020-03-18 15:32:18 -0600508 TrackBack src_external_;
509 TrackBack dst_external_;
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700510 ResourceUsageTag start_tag_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600511};
512
John Zulauf355e49b2020-04-24 15:11:15 -0600513class RenderPassAccessContext {
514 public:
John Zulauf1a224292020-06-30 14:52:13 -0600515 RenderPassAccessContext() : rp_state_(nullptr), current_subpass_(0) {}
John Zulauf355e49b2020-04-24 15:11:15 -0600516
locke-lunarg61870c22020-06-09 14:51:50 -0600517 bool ValidateDrawSubpassAttachment(const SyncValidator &sync_state, const CMD_BUFFER_STATE &cmd, const VkRect2D &render_area,
518 const char *func_name) const;
locke-lunarg96dc9632020-06-10 17:22:18 -0600519 void RecordDrawSubpassAttachment(const CMD_BUFFER_STATE &cmd, const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf1507ee42020-05-18 11:33:09 -0600520 bool ValidateNextSubpass(const SyncValidator &sync_state, const VkRect2D &render_area, const char *command_name) const;
John Zulauf7635de32020-05-29 17:14:15 -0600521 bool ValidateEndRenderPass(const SyncValidator &sync_state, const VkRect2D &render_area, const char *func_name) const;
522 bool ValidateFinalSubpassLayoutTransitions(const SyncValidator &sync_state, const VkRect2D &render_area,
523 const char *func_name) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600524
525 void RecordLayoutTransitions(const ResourceUsageTag &tag);
John Zulauf1507ee42020-05-18 11:33:09 -0600526 void RecordLoadOperations(const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf1a224292020-06-30 14:52:13 -0600527 void RecordBeginRenderPass(const SyncValidator &state, const CMD_BUFFER_STATE &cb_state, const AccessContext *external_context,
528 VkQueueFlags queue_flags, const ResourceUsageTag &tag);
John Zulauf1507ee42020-05-18 11:33:09 -0600529 void RecordNextSubpass(const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf1a224292020-06-30 14:52:13 -0600530 void RecordEndRenderPass(AccessContext *external_context, const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf355e49b2020-04-24 15:11:15 -0600531
John Zulauf540266b2020-04-06 18:54:53 -0600532 AccessContext &CurrentContext() { return subpass_contexts_[current_subpass_]; }
533 const AccessContext &CurrentContext() const { return subpass_contexts_[current_subpass_]; }
John Zulauf355e49b2020-04-24 15:11:15 -0600534 const std::vector<AccessContext> &GetContexts() const { return subpass_contexts_; }
535 uint32_t GetCurrentSubpass() const { return current_subpass_; }
536 const RENDER_PASS_STATE *GetRenderPassState() const { return rp_state_; }
John Zulauf7635de32020-05-29 17:14:15 -0600537 AccessContext *CreateStoreResolveProxy(const VkRect2D &render_area) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600538
539 private:
John Zulauf355e49b2020-04-24 15:11:15 -0600540 const RENDER_PASS_STATE *rp_state_;
541 uint32_t current_subpass_;
542 std::vector<AccessContext> subpass_contexts_;
543 std::vector<const IMAGE_VIEW_STATE *> attachment_views_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600544};
545
546class CommandBufferAccessContext {
547 public:
548 CommandBufferAccessContext()
John Zulauf355e49b2020-04-24 15:11:15 -0600549 : command_number_(0),
550 reset_count_(0),
551 render_pass_contexts_(),
552 cb_access_context_(),
553 current_context_(&cb_access_context_),
John Zulauf3d84f1b2020-03-09 13:33:25 -0600554 current_renderpass_context_(),
555 cb_state_(),
556 queue_flags_() {}
John Zulauf355e49b2020-04-24 15:11:15 -0600557 CommandBufferAccessContext(SyncValidator &sync_validator, std::shared_ptr<CMD_BUFFER_STATE> &cb_state, VkQueueFlags queue_flags)
John Zulauf3d84f1b2020-03-09 13:33:25 -0600558 : CommandBufferAccessContext() {
559 cb_state_ = cb_state;
John Zulauf355e49b2020-04-24 15:11:15 -0600560 sync_state_ = &sync_validator;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600561 queue_flags_ = queue_flags;
562 }
John Zulauf5c5e88d2019-12-26 11:22:02 -0700563
564 void Reset() {
John Zulauf355e49b2020-04-24 15:11:15 -0600565 command_number_ = 0;
566 reset_count_++;
567 cb_access_context_.Reset();
John Zulauf3d84f1b2020-03-09 13:33:25 -0600568 render_pass_contexts_.clear();
John Zulauf355e49b2020-04-24 15:11:15 -0600569 current_context_ = &cb_access_context_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600570 current_renderpass_context_ = nullptr;
John Zulauf4a6105a2020-11-17 15:11:05 -0700571 event_state_.clear();
John Zulauf5c5e88d2019-12-26 11:22:02 -0700572 }
573
John Zulauf540266b2020-04-06 18:54:53 -0600574 AccessContext *GetCurrentAccessContext() { return current_context_; }
575 const AccessContext *GetCurrentAccessContext() const { return current_context_; }
John Zulauf355e49b2020-04-24 15:11:15 -0600576 void RecordBeginRenderPass(const ResourceUsageTag &tag);
John Zulauf4a6105a2020-11-17 15:11:05 -0700577 void ApplyBufferBarriers(const SyncEventState &sync_event, VkPipelineStageFlags dst_exec_scope,
578 const SyncStageAccessFlags &dst_stage_accesses, uint32_t barrier_count,
579 const VkBufferMemoryBarrier *barriers);
580 void ApplyGlobalBarriers(SyncEventState &sync_event, VkPipelineStageFlags dstStageMask, VkPipelineStageFlags dst_exec_scope,
581 const SyncStageAccessFlags &dst_stage_accesses, uint32_t memory_barrier_count,
582 const VkMemoryBarrier *pMemoryBarriers, const ResourceUsageTag &tag);
583 void ApplyGlobalBarriersToEvents(VkPipelineStageFlags srcStageMask, VkPipelineStageFlags src_exec_scope,
584 VkPipelineStageFlags dstStageMask, VkPipelineStageFlags dst_exec_scope);
585 void ApplyImageBarriers(const SyncEventState &sync_event, VkPipelineStageFlags dst_exec_scope,
586 const SyncStageAccessFlags &dst_stage_accesses, uint32_t barrier_count,
587 const VkImageMemoryBarrier *barriers, const ResourceUsageTag &tag);
John Zulauf355e49b2020-04-24 15:11:15 -0600588 bool ValidateBeginRenderPass(const RENDER_PASS_STATE &render_pass, const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -0800589 const VkSubpassBeginInfo *pSubpassBeginInfo, const char *func_name) const;
locke-lunarg61870c22020-06-09 14:51:50 -0600590 bool ValidateDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint, const char *func_name) const;
591 void RecordDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint, const ResourceUsageTag &tag);
592 bool ValidateDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const char *func_name) const;
593 void RecordDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const ResourceUsageTag &tag);
594 bool ValidateDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const char *func_name) const;
595 void RecordDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const ResourceUsageTag &tag);
596 bool ValidateDrawSubpassAttachment(const char *func_name) const;
597 void RecordDrawSubpassAttachment(const ResourceUsageTag &tag);
John Zulauf355e49b2020-04-24 15:11:15 -0600598 bool ValidateNextSubpass(const char *func_name) const;
599 bool ValidateEndRenderpass(const char *func_name) const;
600 void RecordNextSubpass(const RENDER_PASS_STATE &render_pass, const ResourceUsageTag &tag);
601 void RecordEndRenderPass(const RENDER_PASS_STATE &render_pass, const ResourceUsageTag &tag);
John Zulauf49beb112020-11-04 16:06:31 -0700602
603 bool ValidateSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const;
John Zulauf4a6105a2020-11-17 15:11:05 -0700604 void RecordSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask, const ResourceUsageTag &tag);
John Zulauf49beb112020-11-04 16:06:31 -0700605 bool ValidateResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const;
606 void RecordResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
John Zulauf4a6105a2020-11-17 15:11:05 -0700607 bool ValidateWaitEvents(uint32_t eventCount, const VkEvent *pEvents, VkPipelineStageFlags srcStageMask,
608 VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
609 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
610 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) const;
John Zulauf49beb112020-11-04 16:06:31 -0700611 void RecordWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
612 VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount,
613 const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount,
614 const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount,
John Zulauf4a6105a2020-11-17 15:11:05 -0700615 const VkImageMemoryBarrier *pImageMemoryBarriers, const ResourceUsageTag &tag);
616 void RecordDestroyEvent(VkEvent event);
John Zulauf49beb112020-11-04 16:06:31 -0700617
John Zulauf3d84f1b2020-03-09 13:33:25 -0600618 CMD_BUFFER_STATE *GetCommandBufferState() { return cb_state_.get(); }
619 const CMD_BUFFER_STATE *GetCommandBufferState() const { return cb_state_.get(); }
620 VkQueueFlags GetQueueFlags() const { return queue_flags_; }
John Zulauf355e49b2020-04-24 15:11:15 -0600621 inline ResourceUsageTag NextCommandTag(CMD_TYPE command) {
622 // TODO: add command encoding to ResourceUsageTag.
623 // What else we what to include. Do we want some sort of "parent" or global sequence number
624 command_number_++;
Jeremy Gebben6ea9d9e2020-12-11 09:41:01 -0700625 // The lowest bit is a sub-command number used to separate operations at the end of the previous renderpass
626 // from the start of the new one in VkCmdNextRenderpass().
Jeremy Gebben4bb73502020-12-14 11:17:50 -0700627 ResourceUsageTag next(reset_count_, command_number_, command);
John Zulauf355e49b2020-04-24 15:11:15 -0600628 return next;
629 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600630
631 private:
John Zulauf4a6105a2020-11-17 15:11:05 -0700632 SyncEventState *GetEventState(VkEvent);
633 const SyncEventState *GetEventState(VkEvent) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600634 uint32_t command_number_;
635 uint32_t reset_count_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600636 std::vector<RenderPassAccessContext> render_pass_contexts_;
John Zulauf355e49b2020-04-24 15:11:15 -0600637 AccessContext cb_access_context_;
John Zulauf540266b2020-04-06 18:54:53 -0600638 AccessContext *current_context_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600639 RenderPassAccessContext *current_renderpass_context_;
640 std::shared_ptr<CMD_BUFFER_STATE> cb_state_;
John Zulauf355e49b2020-04-24 15:11:15 -0600641 SyncValidator *sync_state_;
642
John Zulauf3d84f1b2020-03-09 13:33:25 -0600643 VkQueueFlags queue_flags_;
John Zulauf4a6105a2020-11-17 15:11:05 -0700644 std::unordered_map<VkEvent, std::unique_ptr<SyncEventState>> event_state_;
John Zulauf9cb530d2019-09-30 14:14:10 -0600645};
646
647class SyncValidator : public ValidationStateTracker, public SyncStageAccess {
648 public:
649 SyncValidator() { container_type = LayerObjectTypeSyncValidation; }
650 using StateTracker = ValidationStateTracker;
651
652 using StateTracker::AccessorTraitsTypes;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600653 std::unordered_map<VkCommandBuffer, std::unique_ptr<CommandBufferAccessContext>> cb_access_state;
654 CommandBufferAccessContext *GetAccessContextImpl(VkCommandBuffer command_buffer, bool do_insert) {
John Zulauf9cb530d2019-09-30 14:14:10 -0600655 auto found_it = cb_access_state.find(command_buffer);
656 if (found_it == cb_access_state.end()) {
657 if (!do_insert) return nullptr;
658 // If we don't have one, make it.
John Zulauf3d84f1b2020-03-09 13:33:25 -0600659 auto cb_state = GetShared<CMD_BUFFER_STATE>(command_buffer);
660 assert(cb_state.get());
661 auto queue_flags = GetQueueFlags(*cb_state);
John Zulauf355e49b2020-04-24 15:11:15 -0600662 std::unique_ptr<CommandBufferAccessContext> context(new CommandBufferAccessContext(*this, cb_state, queue_flags));
John Zulauf3d84f1b2020-03-09 13:33:25 -0600663 auto insert_pair = cb_access_state.insert(std::make_pair(command_buffer, std::move(context)));
John Zulauf9cb530d2019-09-30 14:14:10 -0600664 found_it = insert_pair.first;
665 }
666 return found_it->second.get();
667 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600668 CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) {
669 return GetAccessContextImpl(command_buffer, true); // true -> do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600670 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600671 CommandBufferAccessContext *GetAccessContextNoInsert(VkCommandBuffer command_buffer) {
672 return GetAccessContextImpl(command_buffer, false); // false -> don't do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600673 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600674
675 const CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) const {
John Zulauf9cb530d2019-09-30 14:14:10 -0600676 const auto found_it = cb_access_state.find(command_buffer);
677 if (found_it == cb_access_state.end()) {
678 return nullptr;
679 }
680 return found_it->second.get();
681 }
682
John Zulauf540266b2020-04-06 18:54:53 -0600683 void ApplyGlobalBarriers(AccessContext *context, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
John Zulauf9cb530d2019-09-30 14:14:10 -0600684 SyncStageAccessFlags src_stage_scope, SyncStageAccessFlags dst_stage_scope,
John Zulauf89311b42020-09-29 16:28:47 -0600685 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, const ResourceUsageTag &tag);
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700686 void ApplyBufferBarriers(AccessContext *context, VkPipelineStageFlags src_stage_mask,
687 const SyncStageAccessFlags &src_stage_scope, VkPipelineStageFlags dst_stage_mask,
688 const SyncStageAccessFlags &dst_stage_scope, uint32_t barrier_count,
John Zulauf540266b2020-04-06 18:54:53 -0600689 const VkBufferMemoryBarrier *barriers);
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700690 void ApplyImageBarriers(AccessContext *context, VkPipelineStageFlags src_stage_mask,
691 const SyncStageAccessFlags &src_stage_scope, VkPipelineStageFlags dst_stage_mask,
692 const SyncStageAccessFlags &dst_stage_scope, uint32_t barrier_count,
John Zulauf355e49b2020-04-24 15:11:15 -0600693 const VkImageMemoryBarrier *barriers, const ResourceUsageTag &tag);
John Zulauf9cb530d2019-09-30 14:14:10 -0600694
John Zulaufd1f85d42020-04-15 12:23:15 -0600695 void ResetCommandBufferCallback(VkCommandBuffer command_buffer);
696 void FreeCommandBufferCallback(VkCommandBuffer command_buffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600697 void RecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
John Zulauf355e49b2020-04-24 15:11:15 -0600698 const VkSubpassBeginInfo *pSubpassBeginInfo, CMD_TYPE command);
699 void RecordCmdNextSubpass(VkCommandBuffer commandBuffer,
700
701 const VkSubpassBeginInfo *pSubpassBeginInfo, const VkSubpassEndInfo *pSubpassEndInfo,
702 CMD_TYPE command);
703 void RecordCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo, CMD_TYPE command);
John Zulauf33fc1d52020-07-17 11:01:10 -0600704 bool SupressedBoundDescriptorWAW(const HazardResult &hazard) const;
John Zulauf9cb530d2019-09-30 14:14:10 -0600705
706 void PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600707 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result) override;
John Zulauf9cb530d2019-09-30 14:14:10 -0600708
John Zulauf355e49b2020-04-24 15:11:15 -0600709 bool ValidateBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -0800710 const VkSubpassBeginInfo *pSubpassBeginInfo, const char *func_name) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600711
712 bool PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600713 VkSubpassContents contents) const override;
John Zulauf355e49b2020-04-24 15:11:15 -0600714
715 bool PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -0800716 const VkSubpassBeginInfo *pSubpassBeginInfo) const override;
John Zulauf355e49b2020-04-24 15:11:15 -0600717
718 bool PreCallValidateCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -0800719 const VkSubpassBeginInfo *pSubpassBeginInfo) const override;
John Zulauf355e49b2020-04-24 15:11:15 -0600720
John Zulauf9cb530d2019-09-30 14:14:10 -0600721 bool PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600722 const VkBufferCopy *pRegions) const override;
John Zulauf9cb530d2019-09-30 14:14:10 -0600723
724 void PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600725 const VkBufferCopy *pRegions) override;
John Zulauf9cb530d2019-09-30 14:14:10 -0600726
John Zulauf4a6105a2020-11-17 15:11:05 -0700727 void PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) override;
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600728 bool PreCallValidateCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR *pCopyBufferInfos) const override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400729
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600730 void PreCallRecordCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR *pCopyBufferInfos) override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400731
John Zulauf5c5e88d2019-12-26 11:22:02 -0700732 bool PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
733 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600734 const VkImageCopy *pRegions) const override;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700735
736 void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600737 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions) override;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700738
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600739 bool PreCallValidateCmdCopyImage2KHR(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR *pCopyImageInfo) const override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400740
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600741 void PreCallRecordCmdCopyImage2KHR(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR *pCopyImageInfo) override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400742
John Zulauf9cb530d2019-09-30 14:14:10 -0600743 bool PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
744 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
745 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
746 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
747 uint32_t imageMemoryBarrierCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600748 const VkImageMemoryBarrier *pImageMemoryBarriers) const override;
John Zulauf9cb530d2019-09-30 14:14:10 -0600749
750 void PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
751 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
752 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
753 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600754 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600755
756 void PostCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600757 VkResult result) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600758
759 void PostCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600760 VkSubpassContents contents) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600761 void PostCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600762 const VkSubpassBeginInfo *pSubpassBeginInfo) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600763 void PostCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600764 const VkSubpassBeginInfo *pSubpassBeginInfo) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600765
Mike Schuchardt2df08912020-12-15 16:28:09 -0800766 bool ValidateCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
767 const VkSubpassEndInfo *pSubpassEndInfo, const char *func_name) const;
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600768 bool PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const override;
Mike Schuchardt2df08912020-12-15 16:28:09 -0800769 bool PreCallValidateCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
770 const VkSubpassEndInfo *pSubpassEndInfo) const override;
771 bool PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
772 const VkSubpassEndInfo *pSubpassEndInfo) const override;
John Zulauf355e49b2020-04-24 15:11:15 -0600773
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600774 void PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600775 void PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600776 const VkSubpassEndInfo *pSubpassEndInfo) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600777 void PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600778 const VkSubpassEndInfo *pSubpassEndInfo) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600779
Mike Schuchardt2df08912020-12-15 16:28:09 -0800780 bool ValidateCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo,
John Zulauf355e49b2020-04-24 15:11:15 -0600781 const char *func_name) const;
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600782 bool PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) const override;
Mike Schuchardt2df08912020-12-15 16:28:09 -0800783 bool PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) const override;
784 bool PreCallValidateCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) const override;
John Zulauf355e49b2020-04-24 15:11:15 -0600785
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600786 void PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) override;
787 void PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) override;
788 void PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400789
790 template <typename BufferImageCopyRegionType>
791 bool ValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
792 VkImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopyRegionType *pRegions,
793 CopyCommandVersion version) const;
locke-lunarga19c71d2020-03-02 18:17:04 -0700794 bool PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
795 VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600796 const VkBufferImageCopy *pRegions) const override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400797 bool PreCallValidateCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600798 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) const override;
locke-lunarga19c71d2020-03-02 18:17:04 -0700799
Jeff Leger178b1e52020-10-05 12:22:23 -0400800 template <typename BufferImageCopyRegionType>
801 void RecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
802 VkImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopyRegionType *pRegions,
803 CopyCommandVersion version);
locke-lunarga19c71d2020-03-02 18:17:04 -0700804 void PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600805 VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions) override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400806 void PreCallRecordCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600807 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) override;
locke-lunarga19c71d2020-03-02 18:17:04 -0700808
Jeff Leger178b1e52020-10-05 12:22:23 -0400809 template <typename BufferImageCopyRegionType>
810 bool ValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
811 VkBuffer dstBuffer, uint32_t regionCount, const BufferImageCopyRegionType *pRegions,
812 CopyCommandVersion version) const;
locke-lunarga19c71d2020-03-02 18:17:04 -0700813 bool PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600814 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) const override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400815 bool PreCallValidateCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600816 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) const override;
locke-lunarga19c71d2020-03-02 18:17:04 -0700817
Jeff Leger178b1e52020-10-05 12:22:23 -0400818 template <typename BufferImageCopyRegionType>
819 void RecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
820 VkBuffer dstBuffer, uint32_t regionCount, const BufferImageCopyRegionType *pRegions,
821 CopyCommandVersion version);
locke-lunarga19c71d2020-03-02 18:17:04 -0700822 void PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600823 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400824 void PreCallRecordCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600825 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400826
827 template <typename RegionType>
828 bool ValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
829 VkImageLayout dstImageLayout, uint32_t regionCount, const RegionType *pRegions, VkFilter filter,
830 const char *apiName) const;
locke-lunarga19c71d2020-03-02 18:17:04 -0700831
832 bool PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
833 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600834 const VkImageBlit *pRegions, VkFilter filter) const override;
835 bool PreCallValidateCmdBlitImage2KHR(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR *pBlitImageInfo) const override;
locke-lunarga19c71d2020-03-02 18:17:04 -0700836
Jeff Leger178b1e52020-10-05 12:22:23 -0400837 template <typename RegionType>
838 void RecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
839 VkImageLayout dstImageLayout, uint32_t regionCount, const RegionType *pRegions, VkFilter filter,
840 ResourceUsageTag tag);
locke-lunarga19c71d2020-03-02 18:17:04 -0700841 void PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
842 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600843 VkFilter filter) override;
844 void PreCallRecordCmdBlitImage2KHR(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR *pBlitImageInfo) override;
locke-lunarg36ba2592020-04-03 09:42:04 -0600845
locke-lunarg61870c22020-06-09 14:51:50 -0600846 bool ValidateIndirectBuffer(const AccessContext &context, VkCommandBuffer commandBuffer, const VkDeviceSize struct_size,
847 const VkBuffer buffer, const VkDeviceSize offset, const uint32_t drawCount, const uint32_t stride,
848 const char *function) const;
849 void RecordIndirectBuffer(AccessContext &context, const ResourceUsageTag &tag, const VkDeviceSize struct_size,
850 const VkBuffer buffer, const VkDeviceSize offset, const uint32_t drawCount, uint32_t stride);
locke-lunarg36ba2592020-04-03 09:42:04 -0600851
locke-lunarg61870c22020-06-09 14:51:50 -0600852 bool ValidateCountBuffer(const AccessContext &context, VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
853 const char *function) const;
854 void RecordCountBuffer(AccessContext &context, const ResourceUsageTag &tag, VkBuffer buffer, VkDeviceSize offset);
locke-lunarg93d68af2020-05-12 17:18:03 -0600855
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600856 bool PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) const override;
857 void PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600858
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600859 bool PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const override;
860 void PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600861
862 bool PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600863 uint32_t firstInstance) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600864 void PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600865 uint32_t firstInstance) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600866
867 bool PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600868 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600869 void PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600870 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600871
872 bool PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600873 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600874 void PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600875 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600876
877 bool PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600878 uint32_t drawCount, uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600879 void PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600880 uint32_t drawCount, uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600881
locke-lunargff255f92020-05-13 18:53:52 -0600882 bool ValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer,
883 VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride,
884 const char *function) const;
locke-lunarge1a67022020-04-29 00:15:36 -0600885 bool PreCallValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
886 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600887 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600888 void PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
889 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600890 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600891 bool PreCallValidateCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
892 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600893 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600894 void PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
895 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600896 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600897 bool PreCallValidateCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
898 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600899 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600900 void PreCallRecordCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
901 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600902 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600903
locke-lunargff255f92020-05-13 18:53:52 -0600904 bool ValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
905 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
906 uint32_t stride, const char *function) const;
locke-lunarge1a67022020-04-29 00:15:36 -0600907 bool PreCallValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
908 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600909 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600910 void PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
911 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600912 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600913 bool PreCallValidateCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
914 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600915 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600916 void PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
917 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600918 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600919 bool PreCallValidateCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
920 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600921 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600922 void PreCallRecordCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
923 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600924 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600925
926 bool PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
927 const VkClearColorValue *pColor, uint32_t rangeCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600928 const VkImageSubresourceRange *pRanges) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600929 void PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
930 const VkClearColorValue *pColor, uint32_t rangeCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600931 const VkImageSubresourceRange *pRanges) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600932
933 bool PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
934 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600935 const VkImageSubresourceRange *pRanges) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600936 void PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
937 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600938 const VkImageSubresourceRange *pRanges) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600939
940 bool PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
941 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600942 VkDeviceSize stride, VkQueryResultFlags flags) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600943 void PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
944 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600945 VkQueryResultFlags flags) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600946
947 bool PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600948 uint32_t data) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600949 void PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600950 uint32_t data) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600951
952 bool PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
953 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600954 const VkImageResolve *pRegions) const override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400955
locke-lunarge1a67022020-04-29 00:15:36 -0600956 void PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
957 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600958 const VkImageResolve *pRegions) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600959
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600960 bool PreCallValidateCmdResolveImage2KHR(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR *pResolveImageInfo) const override;
961 void PreCallRecordCmdResolveImage2KHR(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR *pResolveImageInfo) override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400962
locke-lunarge1a67022020-04-29 00:15:36 -0600963 bool PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600964 VkDeviceSize dataSize, const void *pData) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600965 void PreCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600966 VkDeviceSize dataSize, const void *pData) override;
locke-lunargff255f92020-05-13 18:53:52 -0600967
968 bool PreCallValidateCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600969 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) const override;
locke-lunargff255f92020-05-13 18:53:52 -0600970 void PreCallRecordCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600971 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) override;
John Zulauf49beb112020-11-04 16:06:31 -0700972
973 bool PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const override;
974 void PostCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) override;
975
976 bool PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const override;
977 void PostCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) override;
978
979 bool PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
980 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
981 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
982 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
983 uint32_t imageMemoryBarrierCount,
984 const VkImageMemoryBarrier *pImageMemoryBarriers) const override;
985 void PostCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
986 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
987 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
988 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
989 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) override;
John Zulauf9cb530d2019-09-30 14:14:10 -0600990};