blob: b1ef6ef45c66cfc5712d25a521df63afb6512446 [file] [log] [blame]
John Zulauf3d84f1b2020-03-09 13:33:25 -06001/*
John Zulaufab7756b2020-12-29 16:10:16 -07002 * Copyright (c) 2019-2021 Valve Corporation
3 * Copyright (c) 2019-2021 LunarG, Inc.
John Zulauf9cb530d2019-09-30 14:14:10 -06004 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 * Author: John Zulauf <jzulauf@lunarg.com>
John Zulaufab7756b2020-12-29 16:10:16 -070018 * Author: Locke Lin <locke@lunarg.com>
19 * Author: Jeremy Gebben <jeremyg@lunarg.com>
John Zulauf9cb530d2019-09-30 14:14:10 -060020 */
21
22#pragma once
23
John Zulauf7635de32020-05-29 17:14:15 -060024#include <limits>
John Zulauf9cb530d2019-09-30 14:14:10 -060025#include <map>
26#include <memory>
27#include <unordered_map>
28#include <vulkan/vulkan.h>
29
30#include "synchronization_validation_types.h"
31#include "state_tracker.h"
32
John Zulauf355e49b2020-04-24 15:11:15 -060033class SyncValidator;
John Zulauf59e25072020-07-17 10:55:21 -060034class ResourceAccessState;
John Zulauf355e49b2020-04-24 15:11:15 -060035
John Zulauf2f952d22020-02-10 11:34:51 -070036enum SyncHazard {
37 NONE = 0,
38 READ_AFTER_WRITE,
39 WRITE_AFTER_READ,
40 WRITE_AFTER_WRITE,
41 READ_RACING_WRITE,
42 WRITE_RACING_WRITE,
43 WRITE_RACING_READ,
44};
John Zulauf9cb530d2019-09-30 14:14:10 -060045
46// Useful Utilites for manipulating StageAccess parameters, suitable as base class to save typing
47struct SyncStageAccess {
Jeremy Gebbend0de1f82020-11-09 08:21:07 -070048 static inline SyncStageAccessFlags FlagBit(SyncStageAccessIndex stage_access) {
John Zulauf9cb530d2019-09-30 14:14:10 -060049 return syncStageAccessInfoByStageAccessIndex[stage_access].stage_access_bit;
50 }
John Zulauf1507ee42020-05-18 11:33:09 -060051 static inline SyncStageAccessFlags Flags(SyncStageAccessIndex stage_access) {
52 return static_cast<SyncStageAccessFlags>(FlagBit(stage_access));
53 }
John Zulauf9cb530d2019-09-30 14:14:10 -060054
Jeremy Gebbend0de1f82020-11-09 08:21:07 -070055 static bool IsRead(const SyncStageAccessFlags &stage_access_bit) { return (stage_access_bit & syncStageAccessReadMask).any(); }
John Zulauf9cb530d2019-09-30 14:14:10 -060056 static bool IsRead(SyncStageAccessIndex stage_access_index) { return IsRead(FlagBit(stage_access_index)); }
57
Jeremy Gebbend0de1f82020-11-09 08:21:07 -070058 static bool IsWrite(const SyncStageAccessFlags &stage_access_bit) {
59 return (stage_access_bit & syncStageAccessWriteMask).any();
60 }
61 static bool HasWrite(const SyncStageAccessFlags &stage_access_mask) {
62 return (stage_access_mask & syncStageAccessWriteMask).any();
63 }
John Zulauf9cb530d2019-09-30 14:14:10 -060064 static bool IsWrite(SyncStageAccessIndex stage_access_index) { return IsWrite(FlagBit(stage_access_index)); }
65 static VkPipelineStageFlagBits PipelineStageBit(SyncStageAccessIndex stage_access_index) {
66 return syncStageAccessInfoByStageAccessIndex[stage_access_index].stage_mask;
67 }
68 static SyncStageAccessFlags AccessScopeByStage(VkPipelineStageFlags stages);
69 static SyncStageAccessFlags AccessScopeByAccess(VkAccessFlags access);
70 static SyncStageAccessFlags AccessScope(VkPipelineStageFlags stages, VkAccessFlags access);
Jeremy Gebbend0de1f82020-11-09 08:21:07 -070071 static SyncStageAccessFlags AccessScope(const SyncStageAccessFlags &stage_scope, VkAccessFlags accesses) {
John Zulauf9cb530d2019-09-30 14:14:10 -060072 return stage_scope & AccessScopeByAccess(accesses);
73 }
74};
75
John Zulauf5f13a792020-03-10 07:31:21 -060076struct ResourceUsageTag {
John Zulauff4aecca2021-01-05 16:21:58 -070077 using TagIndex = uint64_t;
78 constexpr static TagIndex kMaxIndex = std::numeric_limits<TagIndex>::max();
79 // Special values for submit_index
80 // Every unsubmitted primary is *after* every submited primary
81 // Every recorded secondary is *after* every executing primary
82 constexpr static TagIndex kUnsubmittedPrimary = kMaxIndex - 1;
83 constexpr static TagIndex kUnexecutedSecondary = kMaxIndex;
84 // TODO: determine submit_index encoding
85 TagIndex submit_index;
86 TagIndex index; // the index of the command within the command buffer itself (primary or secondary)
87 TagIndex secondary_index; // the index of the command within a executed secondary imported into a primary context
88 CMD_TYPE command; // the command within the commmand buffer itself (primary or secondary)
89 CMD_TYPE secondary_command; // the command within the secondary imported into a primary (iff command == CMD_EXECUTECOMMANDS)
Jeremy Gebben4bb73502020-12-14 11:17:50 -070090
John Zulauff4aecca2021-01-05 16:21:58 -070091 // This is the command information encoding for index (primary) and secondary
Jeremy Gebben4bb73502020-12-14 11:17:50 -070092 static constexpr uint64_t kResetShift = 33;
93 static constexpr uint64_t kCommandShift = 1;
94 static constexpr uint64_t kCommandMask = 0xffffffff;
John Zulauff4aecca2021-01-05 16:21:58 -070095 static constexpr uint64_t kSubCommandMask = (1 < kCommandShift) - 1U;
96 struct SequenceId {
97 uint32_t reset_count;
98 uint32_t seq_num;
99 uint32_t sub_command;
100 };
101 static TagIndex EncodeSequenceId(TagIndex reset_count, uint32_t command_num, uint32_t sub_command) {
102 const TagIndex command_index = ((static_cast<TagIndex>(command_num) << kCommandShift) | (sub_command & kSubCommandMask));
103 return (reset_count << kResetShift) | command_index;
104 }
105 static SequenceId DecodeSequenceId(TagIndex index) {
106 SequenceId dt;
107 dt.reset_count = index >> kResetShift;
108 dt.seq_num = (index >> kCommandShift) & kCommandMask;
109 dt.sub_command = (index & 1);
110 return dt;
111 }
Jeremy Gebben4bb73502020-12-14 11:17:50 -0700112
John Zulauf5f13a792020-03-10 07:31:21 -0600113 ResourceUsageTag &operator++() {
114 index++;
115 return *this;
116 }
117 bool IsBefore(const ResourceUsageTag &rhs) const { return index < rhs.index; }
John Zulauff4aecca2021-01-05 16:21:58 -0700118 bool IsGloballyBefore(const ResourceUsageTag &rhs) const {
119 return (submit_index < rhs.submit_index) ||
120 ((submit_index == rhs.submit_index) &&
121 ((index < rhs.index) || ((index == rhs.index) && (secondary_index < rhs.secondary_index))));
122 }
123 bool operator==(const ResourceUsageTag &rhs) const {
124 return (submit_index == rhs.submit_index) && (index == rhs.index) && (secondary_index == rhs.secondary_index);
125 }
John Zulaufe5da6e52020-03-18 15:32:18 -0600126 bool operator!=(const ResourceUsageTag &rhs) const { return !(*this == rhs); }
Jeremy Gebben4bb73502020-12-14 11:17:50 -0700127
128 CMD_TYPE GetCommand() const { return command; }
John Zulauff4aecca2021-01-05 16:21:58 -0700129 SequenceId GetSequenceId() const { return DecodeSequenceId(index); }
Jeremy Gebben4bb73502020-12-14 11:17:50 -0700130
131 ResourceUsageTag NextSubCommand() const {
132 assert((index & 1) == 0);
133 ResourceUsageTag next = *this;
134 next.index++;
135 return next;
136 }
137
John Zulauff4aecca2021-01-05 16:21:58 -0700138 ResourceUsageTag()
139 : submit_index(kUnsubmittedPrimary), index(0), secondary_index(0), command(CMD_NONE), secondary_command(CMD_NONE) {}
140 ResourceUsageTag(uint64_t index_, CMD_TYPE command_, bool is_primary = true)
141 : submit_index(is_primary ? kUnsubmittedPrimary : kUnexecutedSecondary),
142 index(index_),
143 secondary_index(0),
144 command(command_),
145 secondary_command(CMD_NONE) {}
146
147 ResourceUsageTag(uint32_t reset_count, uint32_t command_num, CMD_TYPE command_, bool is_primary = true)
148 : index(EncodeSequenceId(reset_count, command_num, 0)), command(command_) {}
John Zulauf5f13a792020-03-10 07:31:21 -0600149};
150
John Zulauf9cb530d2019-09-30 14:14:10 -0600151struct HazardResult {
John Zulauf59e25072020-07-17 10:55:21 -0600152 std::unique_ptr<const ResourceAccessState> access_state;
153 SyncStageAccessIndex usage_index = std::numeric_limits<SyncStageAccessIndex>::max();
John Zulauf9cb530d2019-09-30 14:14:10 -0600154 SyncHazard hazard = NONE;
John Zulauf37ceaed2020-07-03 16:18:15 -0600155 SyncStageAccessFlags prior_access = 0U; // TODO -- change to a NONE enum in ...Bits
John Zulauf9cb530d2019-09-30 14:14:10 -0600156 ResourceUsageTag tag = ResourceUsageTag();
John Zulauf59e25072020-07-17 10:55:21 -0600157 void Set(const ResourceAccessState *access_state_, SyncStageAccessIndex usage_index_, SyncHazard hazard_,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700158 const SyncStageAccessFlags &prior_, const ResourceUsageTag &tag_);
John Zulauf9cb530d2019-09-30 14:14:10 -0600159};
160
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700161struct SyncExecScope {
162 VkPipelineStageFlags mask_param; // the xxxStageMask parameter passed by the caller
163 VkPipelineStageFlags expanded_mask; // all stage bits covered by any 'catch all bits' in the parameter (eg. ALL_GRAPHICS_BIT).
164 VkPipelineStageFlags exec_scope; // all earlier or later stages that would be affected by a barrier using this scope.
165 SyncStageAccessFlags valid_accesses; // all valid accesses that can be used with this scope.
166
167 SyncExecScope() : mask_param(0), expanded_mask(0), exec_scope(0), valid_accesses(0) {}
168
169 static SyncExecScope MakeSrc(VkQueueFlags queue_flags, VkPipelineStageFlags src_stage_mask);
170 static SyncExecScope MakeDst(VkQueueFlags queue_flags, VkPipelineStageFlags src_stage_mask);
171};
172
John Zulauf3d84f1b2020-03-09 13:33:25 -0600173struct SyncBarrier {
174 VkPipelineStageFlags src_exec_scope;
175 SyncStageAccessFlags src_access_scope;
176 VkPipelineStageFlags dst_exec_scope;
177 SyncStageAccessFlags dst_access_scope;
178 SyncBarrier() = default;
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700179 SyncBarrier(const SyncBarrier &other) = default;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600180 SyncBarrier &operator=(const SyncBarrier &) = default;
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700181
182 SyncBarrier(const SyncExecScope &src, const SyncExecScope &dst);
183
184 template <typename Barrier>
185 SyncBarrier(const Barrier &barrier, const SyncExecScope &src, const SyncExecScope &dst);
186
187 SyncBarrier(VkQueueFlags queue_flags, const VkSubpassDependency2 &barrier);
188
John Zulaufa0a98292020-09-18 09:30:10 -0600189 void Merge(const SyncBarrier &other) {
190 src_exec_scope |= other.src_exec_scope;
191 src_access_scope |= other.src_access_scope;
192 dst_exec_scope |= other.dst_exec_scope;
193 dst_access_scope |= other.dst_access_scope;
194 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600195};
John Zulauf69133422020-05-20 14:55:53 -0600196
John Zulauf43cc7462020-12-03 12:33:12 -0700197enum class AccessAddressType : uint32_t { kLinear = 0, kIdealized = 1, kMaxType = 1, kTypeCount = kMaxType + 1 };
198
John Zulauf4a6105a2020-11-17 15:11:05 -0700199struct SyncEventState {
200 enum IgnoreReason { NotIgnored = 0, ResetWaitRace, SetRace, MissingStageBits };
201 using EventPointer = std::shared_ptr<EVENT_STATE>;
202 using ScopeMap = sparse_container::range_map<VkDeviceSize, bool>;
203 EventPointer event;
204 CMD_TYPE last_command; // Only Event commands are valid here.
205 CMD_TYPE unsynchronized_set;
206 VkPipelineStageFlags barriers;
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700207 SyncExecScope scope;
John Zulauf4a6105a2020-11-17 15:11:05 -0700208 ResourceUsageTag first_scope_tag;
209 std::array<ScopeMap, static_cast<size_t>(AccessAddressType::kTypeCount)> first_scope;
210 SyncEventState(const EventPointer &event_state)
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700211 : event(event_state), last_command(CMD_NONE), unsynchronized_set(CMD_NONE), barriers(0U), scope() {}
John Zulauf4a6105a2020-11-17 15:11:05 -0700212 SyncEventState() : SyncEventState(EventPointer()) {}
213 void ResetFirstScope();
214 const ScopeMap &FirstScope(AccessAddressType address_type) const { return first_scope[static_cast<size_t>(address_type)]; }
215 IgnoreReason IsIgnoredByWait(VkPipelineStageFlags srcStageMask) const;
216 bool HasBarrier(VkPipelineStageFlags stageMask, VkPipelineStageFlags exec_scope) const;
217};
218
John Zulauf69133422020-05-20 14:55:53 -0600219// To represent ordering guarantees such as rasterization and store
220struct SyncOrderingBarrier {
221 VkPipelineStageFlags exec_scope;
222 SyncStageAccessFlags access_scope;
223 SyncOrderingBarrier() = default;
224 SyncOrderingBarrier &operator=(const SyncOrderingBarrier &) = default;
225};
John Zulauf3d84f1b2020-03-09 13:33:25 -0600226
John Zulauf9cb530d2019-09-30 14:14:10 -0600227class ResourceAccessState : public SyncStageAccess {
228 protected:
229 // Mutliple read operations can be simlutaneously (and independently) synchronized,
230 // given the only the second execution scope creates a dependency chain, we have to track each,
231 // but only up to one per pipeline stage (as another read from the *same* stage become more recent,
232 // and applicable one for hazard detection
233 struct ReadState {
234 VkPipelineStageFlagBits stage; // The stage of this read
John Zulauf37ceaed2020-07-03 16:18:15 -0600235 SyncStageAccessFlags access; // TODO: Change to FlagBits when we have a None bit enum
John Zulauff51fbb62020-10-02 14:43:24 -0600236 // TODO: Revisit whether this needs to support multiple reads per stage
John Zulauf9cb530d2019-09-30 14:14:10 -0600237 VkPipelineStageFlags barriers; // all applicable barriered stages
238 ResourceUsageTag tag;
John Zulauf89311b42020-09-29 16:28:47 -0600239 VkPipelineStageFlags pending_dep_chain; // Should be zero except during barrier application
240 // Excluded from comparison
241 ReadState() = default;
John Zulaufab7756b2020-12-29 16:10:16 -0700242 ReadState(VkPipelineStageFlagBits stage_, SyncStageAccessFlags access_, VkPipelineStageFlags barriers_,
243 const ResourceUsageTag &tag_)
244 : stage(stage_), access(access_), barriers(barriers_), tag(tag_), pending_dep_chain(0) {}
John Zulaufe5da6e52020-03-18 15:32:18 -0600245 bool operator==(const ReadState &rhs) const {
John Zulauf37ceaed2020-07-03 16:18:15 -0600246 bool same = (stage == rhs.stage) && (access == rhs.access) && (barriers == rhs.barriers) && (tag == rhs.tag);
John Zulaufe5da6e52020-03-18 15:32:18 -0600247 return same;
248 }
John Zulauf4a6105a2020-11-17 15:11:05 -0700249 bool IsReadBarrierHazard(VkPipelineStageFlags src_exec_scope) const {
250 // If the read stage is not in the src sync scope
251 // *AND* not execution chained with an existing sync barrier (that's the or)
252 // then the barrier access is unsafe (R/W after R)
253 return (src_exec_scope & (stage | barriers)) == 0;
254 }
255
John Zulaufe5da6e52020-03-18 15:32:18 -0600256 bool operator!=(const ReadState &rhs) const { return !(*this == rhs); }
John Zulauf4285ee92020-09-23 10:20:52 -0600257 inline void Set(VkPipelineStageFlagBits stage_, SyncStageAccessFlags access_, VkPipelineStageFlags barriers_,
258 const ResourceUsageTag &tag_) {
259 stage = stage_;
260 access = access_;
261 barriers = barriers_;
262 tag = tag_;
John Zulauf89311b42020-09-29 16:28:47 -0600263 pending_dep_chain = 0; // If this is a new read, we aren't applying a barrier set.
John Zulauf4285ee92020-09-23 10:20:52 -0600264 }
John Zulauf9cb530d2019-09-30 14:14:10 -0600265 };
266
267 public:
268 HazardResult DetectHazard(SyncStageAccessIndex usage_index) const;
John Zulauf69133422020-05-20 14:55:53 -0600269 HazardResult DetectHazard(SyncStageAccessIndex usage_index, const SyncOrderingBarrier &ordering) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600270
John Zulaufc9201222020-05-13 15:13:03 -0600271 HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags source_exec_scope,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700272 const SyncStageAccessFlags &source_access_scope) const;
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700273 HazardResult DetectAsyncHazard(SyncStageAccessIndex usage_index, const ResourceUsageTag &start_tag) const;
John Zulauf4a6105a2020-11-17 15:11:05 -0700274 HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags source_exec_scope,
275 const SyncStageAccessFlags &source_access_scope, const ResourceUsageTag &event_tag) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600276
John Zulauf9cb530d2019-09-30 14:14:10 -0600277 void Update(SyncStageAccessIndex usage_index, const ResourceUsageTag &tag);
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700278 void SetWrite(const SyncStageAccessFlags &usage_bit, const ResourceUsageTag &tag);
John Zulauf5f13a792020-03-10 07:31:21 -0600279 void Resolve(const ResourceAccessState &other);
John Zulaufb02c1eb2020-10-06 16:33:36 -0600280 void ApplyBarriers(const std::vector<SyncBarrier> &barriers, bool layout_transition);
281 void ApplyBarriers(const std::vector<SyncBarrier> &barriers, const ResourceUsageTag &tag);
John Zulauf89311b42020-09-29 16:28:47 -0600282 void ApplyBarrier(const SyncBarrier &barrier, bool layout_transition);
John Zulauf4a6105a2020-11-17 15:11:05 -0700283 void ApplyBarrier(const ResourceUsageTag &scope_tag, const SyncBarrier &barrier, bool layout_transition);
John Zulauf89311b42020-09-29 16:28:47 -0600284 void ApplyPendingBarriers(const ResourceUsageTag &tag);
John Zulauf9cb530d2019-09-30 14:14:10 -0600285
286 ResourceAccessState()
John Zulauf355e49b2020-04-24 15:11:15 -0600287 : write_barriers(~SyncStageAccessFlags(0)),
288 write_dependency_chain(0),
John Zulauf355e49b2020-04-24 15:11:15 -0600289 write_tag(),
John Zulaufd14743a2020-07-03 09:42:39 -0600290 last_write(0),
John Zulauff51fbb62020-10-02 14:43:24 -0600291 input_attachment_read(false),
John Zulauf361fb532020-07-22 10:45:39 -0600292 last_read_stages(0),
John Zulauf89311b42020-09-29 16:28:47 -0600293 read_execution_barriers(0),
294 pending_write_dep_chain(0),
295 pending_layout_transition(false),
296 pending_write_barriers(0) {}
John Zulauf9cb530d2019-09-30 14:14:10 -0600297
John Zulaufb02c1eb2020-10-06 16:33:36 -0600298 bool HasPendingState() const {
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700299 return (0 != pending_layout_transition) || pending_write_barriers.any() || (0 != pending_write_dep_chain);
John Zulaufb02c1eb2020-10-06 16:33:36 -0600300 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600301 bool HasWriteOp() const { return last_write != 0; }
John Zulaufe5da6e52020-03-18 15:32:18 -0600302 bool operator==(const ResourceAccessState &rhs) const {
303 bool same = (write_barriers == rhs.write_barriers) && (write_dependency_chain == rhs.write_dependency_chain) &&
John Zulaufab7756b2020-12-29 16:10:16 -0700304 (last_reads == rhs.last_reads) && (last_read_stages == rhs.last_read_stages) && (write_tag == rhs.write_tag) &&
305 (input_attachment_read == rhs.input_attachment_read) &&
John Zulauf361fb532020-07-22 10:45:39 -0600306 (read_execution_barriers == rhs.read_execution_barriers);
John Zulaufe5da6e52020-03-18 15:32:18 -0600307 return same;
308 }
309 bool operator!=(const ResourceAccessState &rhs) const { return !(*this == rhs); }
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700310 VkPipelineStageFlags GetReadBarriers(const SyncStageAccessFlags &usage) const;
John Zulauf59e25072020-07-17 10:55:21 -0600311 SyncStageAccessFlags GetWriteBarriers() const { return write_barriers; }
John Zulauf4a6105a2020-11-17 15:11:05 -0700312 bool InSourceScopeOrChain(VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope) const {
313 return ReadInSourceScopeOrChain(src_exec_scope) || WriteInSourceScopeOrChain(src_exec_scope, src_access_scope);
314 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600315
John Zulauf9cb530d2019-09-30 14:14:10 -0600316 private:
John Zulauf4285ee92020-09-23 10:20:52 -0600317 static constexpr VkPipelineStageFlags kInvalidAttachmentStage = ~VkPipelineStageFlags(0);
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700318 bool IsWriteHazard(SyncStageAccessFlags usage) const { return (usage & ~write_barriers).any(); }
319 bool IsRAWHazard(VkPipelineStageFlagBits usage_stage, const SyncStageAccessFlags &usage) const;
John Zulauf4a6105a2020-11-17 15:11:05 -0700320 bool IsWriteBarrierHazard(VkPipelineStageFlags src_exec_scope, const SyncStageAccessFlags &src_access_scope) const {
321 // If the previous write is *not* in the 1st access scope
322 // *AND* the current barrier is not in the dependency chain
323 // *AND* the there is no prior memory barrier for the previous write in the dependency chain
324 // then the barrier access is unsafe (R/W after W)
325 return ((last_write & src_access_scope) == 0) && ((src_exec_scope & write_dependency_chain) == 0) && (write_barriers == 0);
326 }
327 bool ReadInSourceScopeOrChain(VkPipelineStageFlags src_exec_scope) const {
328 return (0 != (src_exec_scope & (last_read_stages | read_execution_barriers)));
329 }
330 bool WriteInSourceScopeOrChain(VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope) const {
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700331 return (src_access_scope & last_write).any() || (write_dependency_chain & src_exec_scope);
John Zulaufa0a98292020-09-18 09:30:10 -0600332 }
John Zulaufd14743a2020-07-03 09:42:39 -0600333
334 static bool IsReadHazard(VkPipelineStageFlagBits stage, const VkPipelineStageFlags barriers) {
335 return 0 != (stage & ~barriers);
336 }
337 static bool IsReadHazard(VkPipelineStageFlags stage_mask, const VkPipelineStageFlags barriers) {
338 return stage_mask != (stage_mask & barriers);
339 }
340
John Zulauf9cb530d2019-09-30 14:14:10 -0600341 bool IsReadHazard(VkPipelineStageFlagBits stage, const ReadState &read_access) const {
John Zulaufd14743a2020-07-03 09:42:39 -0600342 return IsReadHazard(stage, read_access.barriers);
John Zulauf9cb530d2019-09-30 14:14:10 -0600343 }
John Zulauf0cb5be22020-01-23 12:18:22 -0700344 bool IsReadHazard(VkPipelineStageFlags stage_mask, const ReadState &read_access) const {
John Zulaufd14743a2020-07-03 09:42:39 -0600345 return IsReadHazard(stage_mask, read_access.barriers);
John Zulauf0cb5be22020-01-23 12:18:22 -0700346 }
John Zulauf4285ee92020-09-23 10:20:52 -0600347 VkPipelineStageFlags GetOrderedStages(const SyncOrderingBarrier &ordering) const;
John Zulaufd14743a2020-07-03 09:42:39 -0600348
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700349 // TODO: Add a NONE (zero) enum to SyncStageAccessFlags for input_attachment_read and last_write
John Zulaufd14743a2020-07-03 09:42:39 -0600350
John Zulauf9cb530d2019-09-30 14:14:10 -0600351 // With reads, each must be "safe" relative to it's prior write, so we need only
352 // save the most recent write operation (as anything *transitively* unsafe would arleady
353 // be included
354 SyncStageAccessFlags write_barriers; // union of applicable barrier masks since last write
355 VkPipelineStageFlags write_dependency_chain; // intiially zero, but accumulating the dstStages of barriers if they chain.
John Zulauf9cb530d2019-09-30 14:14:10 -0600356 ResourceUsageTag write_tag;
John Zulauf355e49b2020-04-24 15:11:15 -0600357 SyncStageAccessFlags last_write; // only the most recent write
John Zulauf9cb530d2019-09-30 14:14:10 -0600358
John Zulauff51fbb62020-10-02 14:43:24 -0600359 // TODO Input Attachment cleanup for multiple reads in a given stage
360 // Tracks whether the fragment shader read is input attachment read
361 bool input_attachment_read;
John Zulaufd14743a2020-07-03 09:42:39 -0600362
John Zulaufd14743a2020-07-03 09:42:39 -0600363 VkPipelineStageFlags last_read_stages;
John Zulauf361fb532020-07-22 10:45:39 -0600364 VkPipelineStageFlags read_execution_barriers;
John Zulaufab7756b2020-12-29 16:10:16 -0700365 small_vector<ReadState, 3> last_reads;
John Zulauf89311b42020-09-29 16:28:47 -0600366
367 // Pending execution state to support independent parallel barriers
368 VkPipelineStageFlags pending_write_dep_chain;
369 bool pending_layout_transition;
370 SyncStageAccessFlags pending_write_barriers;
John Zulauf9cb530d2019-09-30 14:14:10 -0600371};
372
John Zulauf16adfc92020-04-08 10:28:33 -0600373using ResourceAccessRangeMap = sparse_container::range_map<VkDeviceSize, ResourceAccessState>;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700374using ResourceAccessRange = typename ResourceAccessRangeMap::key_type;
John Zulauf355e49b2020-04-24 15:11:15 -0600375using ResourceRangeMergeIterator = sparse_container::parallel_iterator<ResourceAccessRangeMap, const ResourceAccessRangeMap>;
John Zulauf9cb530d2019-09-30 14:14:10 -0600376
John Zulauf540266b2020-04-06 18:54:53 -0600377class AccessContext {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700378 public:
John Zulauf69133422020-05-20 14:55:53 -0600379 enum DetectOptions : uint32_t {
John Zulauf355e49b2020-04-24 15:11:15 -0600380 kDetectPrevious = 1U << 0,
381 kDetectAsync = 1U << 1,
382 kDetectAll = (kDetectPrevious | kDetectAsync)
John Zulauf16adfc92020-04-08 10:28:33 -0600383 };
John Zulauf43cc7462020-12-03 12:33:12 -0700384 using MapArray = std::array<ResourceAccessRangeMap, static_cast<size_t>(AccessAddressType::kTypeCount)>;
John Zulauf16adfc92020-04-08 10:28:33 -0600385
John Zulaufbaea94f2020-09-15 17:55:16 -0600386 // WIP TODO WIP Multi-dep -- change track back to support barrier vector, not just last.
John Zulauf3d84f1b2020-03-09 13:33:25 -0600387 struct TrackBack {
John Zulaufa0a98292020-09-18 09:30:10 -0600388 std::vector<SyncBarrier> barriers;
John Zulauf1a224292020-06-30 14:52:13 -0600389 const AccessContext *context;
John Zulaufbaea94f2020-09-15 17:55:16 -0600390 TrackBack(const AccessContext *context_, VkQueueFlags queue_flags_,
John Zulaufa0a98292020-09-18 09:30:10 -0600391 const std::vector<const VkSubpassDependency2 *> &subpass_dependencies_)
392 : barriers(), context(context_) {
393 barriers.reserve(subpass_dependencies_.size());
394 for (const VkSubpassDependency2 *dependency : subpass_dependencies_) {
395 assert(dependency);
396 barriers.emplace_back(queue_flags_, *dependency);
397 }
398 }
399
John Zulauf3d84f1b2020-03-09 13:33:25 -0600400 TrackBack &operator=(const TrackBack &) = default;
401 TrackBack() = default;
402 };
John Zulauf5c5e88d2019-12-26 11:22:02 -0700403
John Zulauf355e49b2020-04-24 15:11:15 -0600404 HazardResult DetectHazard(const BUFFER_STATE &buffer, SyncStageAccessIndex usage_index, const ResourceAccessRange &range) const;
John Zulauf540266b2020-04-06 18:54:53 -0600405 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
John Zulauf3d84f1b2020-03-09 13:33:25 -0600406 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset,
407 const VkExtent3D &extent) const;
John Zulauf69133422020-05-20 14:55:53 -0600408 template <typename Detector>
409 HazardResult DetectHazard(Detector &detector, const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range,
410 const VkOffset3D &offset, const VkExtent3D &extent, DetectOptions options) const;
John Zulauf1507ee42020-05-18 11:33:09 -0600411 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
412 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset,
413 const VkExtent3D &extent) const;
John Zulauf69133422020-05-20 14:55:53 -0600414 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
415 const VkImageSubresourceRange &subresource_range, const SyncOrderingBarrier &ordering,
416 const VkOffset3D &offset, const VkExtent3D &extent) const;
John Zulaufb027cdb2020-05-21 14:25:22 -0600417 HazardResult DetectHazard(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const SyncOrderingBarrier &ordering,
418 const VkOffset3D &offset, const VkExtent3D &extent, VkImageAspectFlags aspect_mask = 0U) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600419 HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700420 const SyncStageAccessFlags &src_access_scope,
John Zulauf4a6105a2020-11-17 15:11:05 -0700421 const VkImageSubresourceRange &subresource_range, const SyncEventState &sync_event,
422 DetectOptions options) const;
423 HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
424 const SyncStageAccessFlags &src_access_scope,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700425 const VkImageSubresourceRange &subresource_range, DetectOptions options) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600426 HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700427 const SyncStageAccessFlags &src_stage_accesses,
428 const VkImageMemoryBarrier &barrier) const;
John Zulauf7635de32020-05-29 17:14:15 -0600429 HazardResult DetectSubpassTransitionHazard(const TrackBack &track_back, const IMAGE_VIEW_STATE *attach_view) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600430
John Zulaufb02c1eb2020-10-06 16:33:36 -0600431 void RecordLayoutTransitions(const RENDER_PASS_STATE &rp_state, uint32_t subpass,
432 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const ResourceUsageTag &tag);
433
John Zulaufe5da6e52020-03-18 15:32:18 -0600434 const TrackBack &GetDstExternalTrackBack() const { return dst_external_; }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600435 void Reset() {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600436 prev_.clear();
John Zulauf355e49b2020-04-24 15:11:15 -0600437 prev_by_subpass_.clear();
John Zulauf3d84f1b2020-03-09 13:33:25 -0600438 async_.clear();
John Zulaufe5da6e52020-03-18 15:32:18 -0600439 src_external_ = TrackBack();
John Zulaufa0a98292020-09-18 09:30:10 -0600440 dst_external_ = TrackBack();
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700441 start_tag_ = ResourceUsageTag();
John Zulauf16adfc92020-04-08 10:28:33 -0600442 for (auto &map : access_state_maps_) {
443 map.clear();
444 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600445 }
John Zulaufb02c1eb2020-10-06 16:33:36 -0600446
447 // Follow the context previous to access the access state, supporting "lazy" import into the context. Not intended for
448 // subpass layout transition, as the pending state handling is more complex
John Zulauf5f13a792020-03-10 07:31:21 -0600449 // TODO: See if returning the lower_bound would be useful from a performance POV -- look at the lower_bound overhead
450 // Would need to add a "hint" overload to parallel_iterator::invalidate_[AB] call, if so.
John Zulauf43cc7462020-12-03 12:33:12 -0700451 void ResolvePreviousAccess(AccessAddressType type, const ResourceAccessRange &range, ResourceAccessRangeMap *descent_map,
John Zulauf355e49b2020-04-24 15:11:15 -0600452 const ResourceAccessState *infill_state) const;
John Zulauf4a6105a2020-11-17 15:11:05 -0700453 void ResolvePreviousAccesses();
John Zulaufb02c1eb2020-10-06 16:33:36 -0600454 template <typename BarrierAction>
455 void ResolveAccessRange(const IMAGE_STATE &image_state, const VkImageSubresourceRange &subresource_range,
John Zulauf43cc7462020-12-03 12:33:12 -0700456 BarrierAction &barrier_action, AccessAddressType address_type, ResourceAccessRangeMap *descent_map,
John Zulaufb02c1eb2020-10-06 16:33:36 -0600457 const ResourceAccessState *infill_state) const;
458 template <typename BarrierAction>
John Zulauf43cc7462020-12-03 12:33:12 -0700459 void ResolveAccessRange(AccessAddressType type, const ResourceAccessRange &range, BarrierAction &barrier_action,
John Zulauf355e49b2020-04-24 15:11:15 -0600460 ResourceAccessRangeMap *resolve_map, const ResourceAccessState *infill_state,
461 bool recur_to_infill = true) const;
John Zulaufb02c1eb2020-10-06 16:33:36 -0600462
John Zulauf355e49b2020-04-24 15:11:15 -0600463 void UpdateAccessState(const BUFFER_STATE &buffer, SyncStageAccessIndex current_usage, const ResourceAccessRange &range,
464 const ResourceUsageTag &tag);
465 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
466 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset, const VkExtent3D &extent,
467 const ResourceUsageTag &tag);
John Zulauf7635de32020-05-29 17:14:15 -0600468 void UpdateAccessState(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const VkOffset3D &offset,
469 const VkExtent3D &extent, VkImageAspectFlags aspect_mask, const ResourceUsageTag &tag);
John Zulauf540266b2020-04-06 18:54:53 -0600470 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
John Zulauf3d84f1b2020-03-09 13:33:25 -0600471 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset, const VkExtent3D &extent,
472 const ResourceUsageTag &tag);
John Zulauf7635de32020-05-29 17:14:15 -0600473 void UpdateAttachmentResolveAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
474 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass,
475 const ResourceUsageTag &tag);
John Zulaufaff20662020-06-01 14:07:58 -0600476 void UpdateAttachmentStoreAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
477 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass,
478 const ResourceUsageTag &tag);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600479
John Zulauf540266b2020-04-06 18:54:53 -0600480 void ResolveChildContexts(const std::vector<AccessContext> &contexts);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600481
John Zulauf540266b2020-04-06 18:54:53 -0600482 template <typename Action>
John Zulauf89311b42020-09-29 16:28:47 -0600483 void UpdateResourceAccess(const BUFFER_STATE &buffer, const ResourceAccessRange &range, const Action action);
John Zulauf540266b2020-04-06 18:54:53 -0600484 template <typename Action>
John Zulauf89311b42020-09-29 16:28:47 -0600485 void UpdateResourceAccess(const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range, const Action action);
John Zulauf540266b2020-04-06 18:54:53 -0600486
487 template <typename Action>
488 void ApplyGlobalBarriers(const Action &barrier_action);
John Zulauf43cc7462020-12-03 12:33:12 -0700489 static AccessAddressType ImageAddressType(const IMAGE_STATE &image);
John Zulauf16adfc92020-04-08 10:28:33 -0600490
John Zulauf540266b2020-04-06 18:54:53 -0600491 AccessContext(uint32_t subpass, VkQueueFlags queue_flags, const std::vector<SubpassDependencyGraphNode> &dependencies,
John Zulauf1a224292020-06-30 14:52:13 -0600492 const std::vector<AccessContext> &contexts, const AccessContext *external_context);
John Zulauf540266b2020-04-06 18:54:53 -0600493
494 AccessContext() { Reset(); }
John Zulauf7635de32020-05-29 17:14:15 -0600495 AccessContext(const AccessContext &copy_from) = default;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600496
John Zulauf43cc7462020-12-03 12:33:12 -0700497 ResourceAccessRangeMap &GetAccessStateMap(AccessAddressType type) { return access_state_maps_[static_cast<size_t>(type)]; }
498 const ResourceAccessRangeMap &GetAccessStateMap(AccessAddressType type) const {
499 return access_state_maps_[static_cast<size_t>(type)];
500 }
501 ResourceAccessRangeMap &GetLinearMap() { return GetAccessStateMap(AccessAddressType::kLinear); }
502 const ResourceAccessRangeMap &GetLinearMap() const { return GetAccessStateMap(AccessAddressType::kLinear); }
503 ResourceAccessRangeMap &GetIdealizedMap() { return GetAccessStateMap(AccessAddressType::kIdealized); }
504 const ResourceAccessRangeMap &GetIdealizedMap() const { return GetAccessStateMap(AccessAddressType::kIdealized); }
John Zulauf355e49b2020-04-24 15:11:15 -0600505 const TrackBack *GetTrackBackFromSubpass(uint32_t subpass) const {
506 if (subpass == VK_SUBPASS_EXTERNAL) {
507 return &src_external_;
508 } else {
509 assert(subpass < prev_by_subpass_.size());
510 return prev_by_subpass_[subpass];
511 }
512 }
John Zulauf16adfc92020-04-08 10:28:33 -0600513
John Zulauf7635de32020-05-29 17:14:15 -0600514 bool ValidateLayoutTransitions(const SyncValidator &sync_state,
515
516 const RENDER_PASS_STATE &rp_state,
517
518 const VkRect2D &render_area,
519
520 uint32_t subpass, const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
521 const char *func_name) const;
John Zulauf1507ee42020-05-18 11:33:09 -0600522 bool ValidateLoadOperation(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
John Zulauf7635de32020-05-29 17:14:15 -0600523 uint32_t subpass, const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
524 const char *func_name) const;
John Zulaufaff20662020-06-01 14:07:58 -0600525 bool ValidateStoreOperation(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
526 uint32_t subpass, const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
527 const char *func_name) const;
John Zulaufb027cdb2020-05-21 14:25:22 -0600528 bool ValidateResolveOperations(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
529 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const char *func_name,
530 uint32_t subpass) const;
John Zulauf1507ee42020-05-18 11:33:09 -0600531
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700532 void SetStartTag(const ResourceUsageTag &tag) { start_tag_ = tag; }
John Zulauf4a6105a2020-11-17 15:11:05 -0700533 template <typename Action>
534 void ForAll(Action &&action);
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700535
John Zulauf3d84f1b2020-03-09 13:33:25 -0600536 private:
537 template <typename Detector>
John Zulauf43cc7462020-12-03 12:33:12 -0700538 HazardResult DetectHazard(AccessAddressType type, const Detector &detector, const ResourceAccessRange &range,
John Zulauf355e49b2020-04-24 15:11:15 -0600539 DetectOptions options) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600540 template <typename Detector>
John Zulauf43cc7462020-12-03 12:33:12 -0700541 HazardResult DetectAsyncHazard(AccessAddressType type, const Detector &detector, const ResourceAccessRange &range) const;
John Zulauf5f13a792020-03-10 07:31:21 -0600542 template <typename Detector>
John Zulauf43cc7462020-12-03 12:33:12 -0700543 HazardResult DetectPreviousHazard(AccessAddressType type, const Detector &detector, const ResourceAccessRange &range) const;
544 void UpdateAccessState(AccessAddressType type, SyncStageAccessIndex current_usage, const ResourceAccessRange &range,
John Zulauf16adfc92020-04-08 10:28:33 -0600545 const ResourceUsageTag &tag);
John Zulaufb02c1eb2020-10-06 16:33:36 -0600546
547 MapArray access_state_maps_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600548 std::vector<TrackBack> prev_;
John Zulauf355e49b2020-04-24 15:11:15 -0600549 std::vector<TrackBack *> prev_by_subpass_;
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700550 std::vector<const AccessContext *> async_;
John Zulaufe5da6e52020-03-18 15:32:18 -0600551 TrackBack src_external_;
552 TrackBack dst_external_;
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700553 ResourceUsageTag start_tag_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600554};
555
John Zulauf355e49b2020-04-24 15:11:15 -0600556class RenderPassAccessContext {
557 public:
John Zulauf1a224292020-06-30 14:52:13 -0600558 RenderPassAccessContext() : rp_state_(nullptr), current_subpass_(0) {}
John Zulauf355e49b2020-04-24 15:11:15 -0600559
locke-lunarg61870c22020-06-09 14:51:50 -0600560 bool ValidateDrawSubpassAttachment(const SyncValidator &sync_state, const CMD_BUFFER_STATE &cmd, const VkRect2D &render_area,
561 const char *func_name) const;
locke-lunarg96dc9632020-06-10 17:22:18 -0600562 void RecordDrawSubpassAttachment(const CMD_BUFFER_STATE &cmd, const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf1507ee42020-05-18 11:33:09 -0600563 bool ValidateNextSubpass(const SyncValidator &sync_state, const VkRect2D &render_area, const char *command_name) const;
John Zulauf7635de32020-05-29 17:14:15 -0600564 bool ValidateEndRenderPass(const SyncValidator &sync_state, const VkRect2D &render_area, const char *func_name) const;
565 bool ValidateFinalSubpassLayoutTransitions(const SyncValidator &sync_state, const VkRect2D &render_area,
566 const char *func_name) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600567
568 void RecordLayoutTransitions(const ResourceUsageTag &tag);
John Zulauf1507ee42020-05-18 11:33:09 -0600569 void RecordLoadOperations(const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf1a224292020-06-30 14:52:13 -0600570 void RecordBeginRenderPass(const SyncValidator &state, const CMD_BUFFER_STATE &cb_state, const AccessContext *external_context,
571 VkQueueFlags queue_flags, const ResourceUsageTag &tag);
John Zulauf1507ee42020-05-18 11:33:09 -0600572 void RecordNextSubpass(const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf1a224292020-06-30 14:52:13 -0600573 void RecordEndRenderPass(AccessContext *external_context, const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf355e49b2020-04-24 15:11:15 -0600574
John Zulauf540266b2020-04-06 18:54:53 -0600575 AccessContext &CurrentContext() { return subpass_contexts_[current_subpass_]; }
576 const AccessContext &CurrentContext() const { return subpass_contexts_[current_subpass_]; }
John Zulauf355e49b2020-04-24 15:11:15 -0600577 const std::vector<AccessContext> &GetContexts() const { return subpass_contexts_; }
578 uint32_t GetCurrentSubpass() const { return current_subpass_; }
579 const RENDER_PASS_STATE *GetRenderPassState() const { return rp_state_; }
John Zulauf7635de32020-05-29 17:14:15 -0600580 AccessContext *CreateStoreResolveProxy(const VkRect2D &render_area) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600581
582 private:
John Zulauf355e49b2020-04-24 15:11:15 -0600583 const RENDER_PASS_STATE *rp_state_;
584 uint32_t current_subpass_;
585 std::vector<AccessContext> subpass_contexts_;
586 std::vector<const IMAGE_VIEW_STATE *> attachment_views_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600587};
588
589class CommandBufferAccessContext {
590 public:
591 CommandBufferAccessContext()
John Zulauf355e49b2020-04-24 15:11:15 -0600592 : command_number_(0),
593 reset_count_(0),
594 render_pass_contexts_(),
595 cb_access_context_(),
596 current_context_(&cb_access_context_),
John Zulauf3d84f1b2020-03-09 13:33:25 -0600597 current_renderpass_context_(),
598 cb_state_(),
599 queue_flags_() {}
John Zulauf355e49b2020-04-24 15:11:15 -0600600 CommandBufferAccessContext(SyncValidator &sync_validator, std::shared_ptr<CMD_BUFFER_STATE> &cb_state, VkQueueFlags queue_flags)
John Zulauf3d84f1b2020-03-09 13:33:25 -0600601 : CommandBufferAccessContext() {
602 cb_state_ = cb_state;
John Zulauf355e49b2020-04-24 15:11:15 -0600603 sync_state_ = &sync_validator;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600604 queue_flags_ = queue_flags;
605 }
John Zulauf5c5e88d2019-12-26 11:22:02 -0700606
607 void Reset() {
John Zulauf355e49b2020-04-24 15:11:15 -0600608 command_number_ = 0;
609 reset_count_++;
610 cb_access_context_.Reset();
John Zulauf3d84f1b2020-03-09 13:33:25 -0600611 render_pass_contexts_.clear();
John Zulauf355e49b2020-04-24 15:11:15 -0600612 current_context_ = &cb_access_context_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600613 current_renderpass_context_ = nullptr;
John Zulauf4a6105a2020-11-17 15:11:05 -0700614 event_state_.clear();
John Zulauf5c5e88d2019-12-26 11:22:02 -0700615 }
616
John Zulauf540266b2020-04-06 18:54:53 -0600617 AccessContext *GetCurrentAccessContext() { return current_context_; }
618 const AccessContext *GetCurrentAccessContext() const { return current_context_; }
John Zulauf355e49b2020-04-24 15:11:15 -0600619 void RecordBeginRenderPass(const ResourceUsageTag &tag);
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700620 void ApplyBufferBarriers(const SyncEventState &sync_event, const SyncExecScope &dst, uint32_t barrier_count,
John Zulauf4a6105a2020-11-17 15:11:05 -0700621 const VkBufferMemoryBarrier *barriers);
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700622 void ApplyGlobalBarriers(SyncEventState &sync_event, const SyncExecScope &dst, uint32_t memory_barrier_count,
John Zulauf4a6105a2020-11-17 15:11:05 -0700623 const VkMemoryBarrier *pMemoryBarriers, const ResourceUsageTag &tag);
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700624 void ApplyGlobalBarriersToEvents(const SyncExecScope &src, const SyncExecScope &dst);
625 void ApplyImageBarriers(const SyncEventState &sync_event, const SyncExecScope &dst, uint32_t barrier_count,
John Zulauf4a6105a2020-11-17 15:11:05 -0700626 const VkImageMemoryBarrier *barriers, const ResourceUsageTag &tag);
John Zulauf355e49b2020-04-24 15:11:15 -0600627 bool ValidateBeginRenderPass(const RENDER_PASS_STATE &render_pass, const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -0800628 const VkSubpassBeginInfo *pSubpassBeginInfo, const char *func_name) const;
locke-lunarg61870c22020-06-09 14:51:50 -0600629 bool ValidateDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint, const char *func_name) const;
630 void RecordDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint, const ResourceUsageTag &tag);
631 bool ValidateDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const char *func_name) const;
632 void RecordDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const ResourceUsageTag &tag);
633 bool ValidateDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const char *func_name) const;
634 void RecordDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const ResourceUsageTag &tag);
635 bool ValidateDrawSubpassAttachment(const char *func_name) const;
636 void RecordDrawSubpassAttachment(const ResourceUsageTag &tag);
John Zulauf355e49b2020-04-24 15:11:15 -0600637 bool ValidateNextSubpass(const char *func_name) const;
638 bool ValidateEndRenderpass(const char *func_name) const;
639 void RecordNextSubpass(const RENDER_PASS_STATE &render_pass, const ResourceUsageTag &tag);
640 void RecordEndRenderPass(const RENDER_PASS_STATE &render_pass, const ResourceUsageTag &tag);
John Zulauf49beb112020-11-04 16:06:31 -0700641
642 bool ValidateSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const;
John Zulauf4a6105a2020-11-17 15:11:05 -0700643 void RecordSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask, const ResourceUsageTag &tag);
John Zulauf49beb112020-11-04 16:06:31 -0700644 bool ValidateResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const;
645 void RecordResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
John Zulauf4a6105a2020-11-17 15:11:05 -0700646 bool ValidateWaitEvents(uint32_t eventCount, const VkEvent *pEvents, VkPipelineStageFlags srcStageMask,
647 VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
648 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
649 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) const;
John Zulauf49beb112020-11-04 16:06:31 -0700650 void RecordWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
651 VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount,
652 const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount,
653 const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount,
John Zulauf4a6105a2020-11-17 15:11:05 -0700654 const VkImageMemoryBarrier *pImageMemoryBarriers, const ResourceUsageTag &tag);
655 void RecordDestroyEvent(VkEvent event);
John Zulauf49beb112020-11-04 16:06:31 -0700656
John Zulauf3d84f1b2020-03-09 13:33:25 -0600657 CMD_BUFFER_STATE *GetCommandBufferState() { return cb_state_.get(); }
658 const CMD_BUFFER_STATE *GetCommandBufferState() const { return cb_state_.get(); }
659 VkQueueFlags GetQueueFlags() const { return queue_flags_; }
John Zulauf355e49b2020-04-24 15:11:15 -0600660 inline ResourceUsageTag NextCommandTag(CMD_TYPE command) {
661 // TODO: add command encoding to ResourceUsageTag.
662 // What else we what to include. Do we want some sort of "parent" or global sequence number
663 command_number_++;
Jeremy Gebben6ea9d9e2020-12-11 09:41:01 -0700664 // The lowest bit is a sub-command number used to separate operations at the end of the previous renderpass
665 // from the start of the new one in VkCmdNextRenderpass().
Jeremy Gebben4bb73502020-12-14 11:17:50 -0700666 ResourceUsageTag next(reset_count_, command_number_, command);
John Zulauf355e49b2020-04-24 15:11:15 -0600667 return next;
668 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600669
670 private:
John Zulauf4a6105a2020-11-17 15:11:05 -0700671 SyncEventState *GetEventState(VkEvent);
672 const SyncEventState *GetEventState(VkEvent) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600673 uint32_t command_number_;
674 uint32_t reset_count_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600675 std::vector<RenderPassAccessContext> render_pass_contexts_;
John Zulauf355e49b2020-04-24 15:11:15 -0600676 AccessContext cb_access_context_;
John Zulauf540266b2020-04-06 18:54:53 -0600677 AccessContext *current_context_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600678 RenderPassAccessContext *current_renderpass_context_;
679 std::shared_ptr<CMD_BUFFER_STATE> cb_state_;
John Zulauf355e49b2020-04-24 15:11:15 -0600680 SyncValidator *sync_state_;
681
John Zulauf3d84f1b2020-03-09 13:33:25 -0600682 VkQueueFlags queue_flags_;
John Zulauf4a6105a2020-11-17 15:11:05 -0700683 std::unordered_map<VkEvent, std::unique_ptr<SyncEventState>> event_state_;
John Zulauf9cb530d2019-09-30 14:14:10 -0600684};
685
686class SyncValidator : public ValidationStateTracker, public SyncStageAccess {
687 public:
688 SyncValidator() { container_type = LayerObjectTypeSyncValidation; }
689 using StateTracker = ValidationStateTracker;
690
691 using StateTracker::AccessorTraitsTypes;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600692 std::unordered_map<VkCommandBuffer, std::unique_ptr<CommandBufferAccessContext>> cb_access_state;
693 CommandBufferAccessContext *GetAccessContextImpl(VkCommandBuffer command_buffer, bool do_insert) {
John Zulauf9cb530d2019-09-30 14:14:10 -0600694 auto found_it = cb_access_state.find(command_buffer);
695 if (found_it == cb_access_state.end()) {
696 if (!do_insert) return nullptr;
697 // If we don't have one, make it.
John Zulauf3d84f1b2020-03-09 13:33:25 -0600698 auto cb_state = GetShared<CMD_BUFFER_STATE>(command_buffer);
699 assert(cb_state.get());
700 auto queue_flags = GetQueueFlags(*cb_state);
John Zulauf355e49b2020-04-24 15:11:15 -0600701 std::unique_ptr<CommandBufferAccessContext> context(new CommandBufferAccessContext(*this, cb_state, queue_flags));
John Zulauf3d84f1b2020-03-09 13:33:25 -0600702 auto insert_pair = cb_access_state.insert(std::make_pair(command_buffer, std::move(context)));
John Zulauf9cb530d2019-09-30 14:14:10 -0600703 found_it = insert_pair.first;
704 }
705 return found_it->second.get();
706 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600707 CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) {
708 return GetAccessContextImpl(command_buffer, true); // true -> do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600709 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600710 CommandBufferAccessContext *GetAccessContextNoInsert(VkCommandBuffer command_buffer) {
711 return GetAccessContextImpl(command_buffer, false); // false -> don't do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600712 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600713
714 const CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) const {
John Zulauf9cb530d2019-09-30 14:14:10 -0600715 const auto found_it = cb_access_state.find(command_buffer);
716 if (found_it == cb_access_state.end()) {
717 return nullptr;
718 }
719 return found_it->second.get();
720 }
721
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700722 void ApplyGlobalBarriers(AccessContext *context, const SyncExecScope &src, const SyncExecScope &dst,
John Zulauf89311b42020-09-29 16:28:47 -0600723 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, const ResourceUsageTag &tag);
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700724
725 void ApplyBufferBarriers(AccessContext *context, const SyncExecScope &src, const SyncExecScope &dst, uint32_t barrier_count,
John Zulauf540266b2020-04-06 18:54:53 -0600726 const VkBufferMemoryBarrier *barriers);
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700727
728 void ApplyImageBarriers(AccessContext *context, const SyncExecScope &src, const SyncExecScope &dst, uint32_t barrier_count,
John Zulauf355e49b2020-04-24 15:11:15 -0600729 const VkImageMemoryBarrier *barriers, const ResourceUsageTag &tag);
John Zulauf9cb530d2019-09-30 14:14:10 -0600730
John Zulaufd1f85d42020-04-15 12:23:15 -0600731 void ResetCommandBufferCallback(VkCommandBuffer command_buffer);
732 void FreeCommandBufferCallback(VkCommandBuffer command_buffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600733 void RecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
John Zulauf355e49b2020-04-24 15:11:15 -0600734 const VkSubpassBeginInfo *pSubpassBeginInfo, CMD_TYPE command);
735 void RecordCmdNextSubpass(VkCommandBuffer commandBuffer,
John Zulauf355e49b2020-04-24 15:11:15 -0600736 const VkSubpassBeginInfo *pSubpassBeginInfo, const VkSubpassEndInfo *pSubpassEndInfo,
737 CMD_TYPE command);
738 void RecordCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo, CMD_TYPE command);
John Zulauf33fc1d52020-07-17 11:01:10 -0600739 bool SupressedBoundDescriptorWAW(const HazardResult &hazard) const;
John Zulauf9cb530d2019-09-30 14:14:10 -0600740
741 void PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600742 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result) override;
John Zulauf9cb530d2019-09-30 14:14:10 -0600743
John Zulauf355e49b2020-04-24 15:11:15 -0600744 bool ValidateBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -0800745 const VkSubpassBeginInfo *pSubpassBeginInfo, const char *func_name) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600746
747 bool PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600748 VkSubpassContents contents) const override;
John Zulauf355e49b2020-04-24 15:11:15 -0600749
750 bool PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -0800751 const VkSubpassBeginInfo *pSubpassBeginInfo) const override;
John Zulauf355e49b2020-04-24 15:11:15 -0600752
753 bool PreCallValidateCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -0800754 const VkSubpassBeginInfo *pSubpassBeginInfo) const override;
John Zulauf355e49b2020-04-24 15:11:15 -0600755
John Zulauf9cb530d2019-09-30 14:14:10 -0600756 bool PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600757 const VkBufferCopy *pRegions) const override;
John Zulauf9cb530d2019-09-30 14:14:10 -0600758
759 void PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600760 const VkBufferCopy *pRegions) override;
John Zulauf9cb530d2019-09-30 14:14:10 -0600761
John Zulauf4a6105a2020-11-17 15:11:05 -0700762 void PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) override;
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600763 bool PreCallValidateCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR *pCopyBufferInfos) const override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400764
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600765 void PreCallRecordCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR *pCopyBufferInfos) override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400766
John Zulauf5c5e88d2019-12-26 11:22:02 -0700767 bool PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
768 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600769 const VkImageCopy *pRegions) const override;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700770
771 void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600772 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions) override;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700773
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600774 bool PreCallValidateCmdCopyImage2KHR(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR *pCopyImageInfo) const override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400775
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600776 void PreCallRecordCmdCopyImage2KHR(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR *pCopyImageInfo) override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400777
John Zulauf9cb530d2019-09-30 14:14:10 -0600778 bool PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
779 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
780 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
781 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
782 uint32_t imageMemoryBarrierCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600783 const VkImageMemoryBarrier *pImageMemoryBarriers) const override;
John Zulauf9cb530d2019-09-30 14:14:10 -0600784
785 void PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
786 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
787 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
788 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600789 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600790
791 void PostCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600792 VkResult result) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600793
794 void PostCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600795 VkSubpassContents contents) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600796 void PostCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600797 const VkSubpassBeginInfo *pSubpassBeginInfo) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600798 void PostCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600799 const VkSubpassBeginInfo *pSubpassBeginInfo) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600800
Mike Schuchardt2df08912020-12-15 16:28:09 -0800801 bool ValidateCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
802 const VkSubpassEndInfo *pSubpassEndInfo, const char *func_name) const;
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600803 bool PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const override;
Mike Schuchardt2df08912020-12-15 16:28:09 -0800804 bool PreCallValidateCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
805 const VkSubpassEndInfo *pSubpassEndInfo) const override;
806 bool PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
807 const VkSubpassEndInfo *pSubpassEndInfo) const override;
John Zulauf355e49b2020-04-24 15:11:15 -0600808
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600809 void PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600810 void PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600811 const VkSubpassEndInfo *pSubpassEndInfo) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600812 void PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600813 const VkSubpassEndInfo *pSubpassEndInfo) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600814
Mike Schuchardt2df08912020-12-15 16:28:09 -0800815 bool ValidateCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo,
John Zulauf355e49b2020-04-24 15:11:15 -0600816 const char *func_name) const;
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600817 bool PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) const override;
Mike Schuchardt2df08912020-12-15 16:28:09 -0800818 bool PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) const override;
819 bool PreCallValidateCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) const override;
John Zulauf355e49b2020-04-24 15:11:15 -0600820
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600821 void PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) override;
822 void PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) override;
823 void PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400824
825 template <typename BufferImageCopyRegionType>
826 bool ValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
827 VkImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopyRegionType *pRegions,
828 CopyCommandVersion version) const;
locke-lunarga19c71d2020-03-02 18:17:04 -0700829 bool PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
830 VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600831 const VkBufferImageCopy *pRegions) const override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400832 bool PreCallValidateCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600833 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) const override;
locke-lunarga19c71d2020-03-02 18:17:04 -0700834
Jeff Leger178b1e52020-10-05 12:22:23 -0400835 template <typename BufferImageCopyRegionType>
836 void RecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
837 VkImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopyRegionType *pRegions,
838 CopyCommandVersion version);
locke-lunarga19c71d2020-03-02 18:17:04 -0700839 void PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600840 VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions) override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400841 void PreCallRecordCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600842 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) override;
locke-lunarga19c71d2020-03-02 18:17:04 -0700843
Jeff Leger178b1e52020-10-05 12:22:23 -0400844 template <typename BufferImageCopyRegionType>
845 bool ValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
846 VkBuffer dstBuffer, uint32_t regionCount, const BufferImageCopyRegionType *pRegions,
847 CopyCommandVersion version) const;
locke-lunarga19c71d2020-03-02 18:17:04 -0700848 bool PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600849 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) const override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400850 bool PreCallValidateCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600851 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) const override;
locke-lunarga19c71d2020-03-02 18:17:04 -0700852
Jeff Leger178b1e52020-10-05 12:22:23 -0400853 template <typename BufferImageCopyRegionType>
854 void RecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
855 VkBuffer dstBuffer, uint32_t regionCount, const BufferImageCopyRegionType *pRegions,
856 CopyCommandVersion version);
locke-lunarga19c71d2020-03-02 18:17:04 -0700857 void PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600858 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400859 void PreCallRecordCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600860 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400861
862 template <typename RegionType>
863 bool ValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
864 VkImageLayout dstImageLayout, uint32_t regionCount, const RegionType *pRegions, VkFilter filter,
865 const char *apiName) const;
locke-lunarga19c71d2020-03-02 18:17:04 -0700866
867 bool PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
868 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600869 const VkImageBlit *pRegions, VkFilter filter) const override;
870 bool PreCallValidateCmdBlitImage2KHR(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR *pBlitImageInfo) const override;
locke-lunarga19c71d2020-03-02 18:17:04 -0700871
Jeff Leger178b1e52020-10-05 12:22:23 -0400872 template <typename RegionType>
873 void RecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
874 VkImageLayout dstImageLayout, uint32_t regionCount, const RegionType *pRegions, VkFilter filter,
875 ResourceUsageTag tag);
locke-lunarga19c71d2020-03-02 18:17:04 -0700876 void PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
877 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600878 VkFilter filter) override;
879 void PreCallRecordCmdBlitImage2KHR(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR *pBlitImageInfo) override;
locke-lunarg36ba2592020-04-03 09:42:04 -0600880
locke-lunarg61870c22020-06-09 14:51:50 -0600881 bool ValidateIndirectBuffer(const AccessContext &context, VkCommandBuffer commandBuffer, const VkDeviceSize struct_size,
882 const VkBuffer buffer, const VkDeviceSize offset, const uint32_t drawCount, const uint32_t stride,
883 const char *function) const;
884 void RecordIndirectBuffer(AccessContext &context, const ResourceUsageTag &tag, const VkDeviceSize struct_size,
885 const VkBuffer buffer, const VkDeviceSize offset, const uint32_t drawCount, uint32_t stride);
locke-lunarg36ba2592020-04-03 09:42:04 -0600886
locke-lunarg61870c22020-06-09 14:51:50 -0600887 bool ValidateCountBuffer(const AccessContext &context, VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
888 const char *function) const;
889 void RecordCountBuffer(AccessContext &context, const ResourceUsageTag &tag, VkBuffer buffer, VkDeviceSize offset);
locke-lunarg93d68af2020-05-12 17:18:03 -0600890
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600891 bool PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) const override;
892 void PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600893
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600894 bool PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const override;
895 void PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600896
897 bool PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600898 uint32_t firstInstance) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600899 void PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600900 uint32_t firstInstance) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600901
902 bool PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600903 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600904 void PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600905 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600906
907 bool PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600908 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600909 void PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600910 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600911
912 bool PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600913 uint32_t drawCount, uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600914 void PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600915 uint32_t drawCount, uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600916
locke-lunargff255f92020-05-13 18:53:52 -0600917 bool ValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer,
918 VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride,
919 const char *function) const;
locke-lunarge1a67022020-04-29 00:15:36 -0600920 bool PreCallValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
921 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600922 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600923 void PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
924 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600925 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600926 bool PreCallValidateCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
927 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600928 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600929 void PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
930 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600931 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600932 bool PreCallValidateCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
933 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600934 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600935 void PreCallRecordCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
936 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600937 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600938
locke-lunargff255f92020-05-13 18:53:52 -0600939 bool ValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
940 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
941 uint32_t stride, const char *function) const;
locke-lunarge1a67022020-04-29 00:15:36 -0600942 bool PreCallValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
943 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600944 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600945 void PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
946 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600947 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600948 bool PreCallValidateCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
949 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600950 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600951 void PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
952 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600953 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600954 bool PreCallValidateCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
955 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600956 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600957 void PreCallRecordCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
958 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600959 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600960
961 bool PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
962 const VkClearColorValue *pColor, uint32_t rangeCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600963 const VkImageSubresourceRange *pRanges) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600964 void PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
965 const VkClearColorValue *pColor, uint32_t rangeCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600966 const VkImageSubresourceRange *pRanges) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600967
968 bool PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
969 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600970 const VkImageSubresourceRange *pRanges) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600971 void PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
972 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600973 const VkImageSubresourceRange *pRanges) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600974
975 bool PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
976 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600977 VkDeviceSize stride, VkQueryResultFlags flags) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600978 void PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
979 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600980 VkQueryResultFlags flags) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600981
982 bool PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600983 uint32_t data) const override;
locke-lunarge1a67022020-04-29 00:15:36 -0600984 void PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600985 uint32_t data) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600986
987 bool PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
988 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600989 const VkImageResolve *pRegions) const override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400990
locke-lunarge1a67022020-04-29 00:15:36 -0600991 void PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
992 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600993 const VkImageResolve *pRegions) override;
locke-lunarge1a67022020-04-29 00:15:36 -0600994
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600995 bool PreCallValidateCmdResolveImage2KHR(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR *pResolveImageInfo) const override;
996 void PreCallRecordCmdResolveImage2KHR(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR *pResolveImageInfo) override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400997
locke-lunarge1a67022020-04-29 00:15:36 -0600998 bool PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600999 VkDeviceSize dataSize, const void *pData) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001000 void PreCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001001 VkDeviceSize dataSize, const void *pData) override;
locke-lunargff255f92020-05-13 18:53:52 -06001002
1003 bool PreCallValidateCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001004 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) const override;
locke-lunargff255f92020-05-13 18:53:52 -06001005 void PreCallRecordCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001006 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) override;
John Zulauf49beb112020-11-04 16:06:31 -07001007
1008 bool PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const override;
1009 void PostCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) override;
1010
1011 bool PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const override;
1012 void PostCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) override;
1013
1014 bool PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
1015 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
1016 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
1017 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1018 uint32_t imageMemoryBarrierCount,
1019 const VkImageMemoryBarrier *pImageMemoryBarriers) const override;
1020 void PostCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
1021 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
1022 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
1023 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1024 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) override;
John Zulauf9cb530d2019-09-30 14:14:10 -06001025};