blob: 05fa033e33ece686232a6a5e2888ef9ed787bd13 [file] [log] [blame]
John Zulauf3d84f1b2020-03-09 13:33:25 -06001/*
John Zulaufab7756b2020-12-29 16:10:16 -07002 * Copyright (c) 2019-2021 Valve Corporation
3 * Copyright (c) 2019-2021 LunarG, Inc.
John Zulauf9cb530d2019-09-30 14:14:10 -06004 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 * Author: John Zulauf <jzulauf@lunarg.com>
John Zulaufab7756b2020-12-29 16:10:16 -070018 * Author: Locke Lin <locke@lunarg.com>
19 * Author: Jeremy Gebben <jeremyg@lunarg.com>
John Zulauf9cb530d2019-09-30 14:14:10 -060020 */
21
22#pragma once
23
John Zulauf7635de32020-05-29 17:14:15 -060024#include <limits>
John Zulauf9cb530d2019-09-30 14:14:10 -060025#include <map>
26#include <memory>
27#include <unordered_map>
28#include <vulkan/vulkan.h>
29
30#include "synchronization_validation_types.h"
31#include "state_tracker.h"
32
John Zulaufd5115702021-01-18 12:34:33 -070033class AccessContext;
John Zulauffaea0ee2021-01-14 14:01:32 -070034class CommandBufferAccessContext;
John Zulaufe7f6a5e2021-01-16 14:31:18 -070035using CommandBufferAccessContextShared = std::shared_ptr<CommandBufferAccessContext>;
John Zulaufd5115702021-01-18 12:34:33 -070036class ResourceAccessState;
37class SyncValidator;
John Zulauf355e49b2020-04-24 15:11:15 -060038
John Zulauf2f952d22020-02-10 11:34:51 -070039enum SyncHazard {
40 NONE = 0,
41 READ_AFTER_WRITE,
42 WRITE_AFTER_READ,
43 WRITE_AFTER_WRITE,
44 READ_RACING_WRITE,
45 WRITE_RACING_WRITE,
46 WRITE_RACING_READ,
47};
John Zulauf9cb530d2019-09-30 14:14:10 -060048
John Zulauf8e3c3e92021-01-06 11:19:36 -070049enum class SyncOrdering : uint8_t {
50 kNonAttachment = 0,
51 kColorAttachment = 1,
52 kDepthStencilAttachment = 2,
53 kRaster = 3,
54 kNumOrderings = 4,
55};
56
John Zulauf9cb530d2019-09-30 14:14:10 -060057// Useful Utilites for manipulating StageAccess parameters, suitable as base class to save typing
58struct SyncStageAccess {
Jeremy Gebbend0de1f82020-11-09 08:21:07 -070059 static inline SyncStageAccessFlags FlagBit(SyncStageAccessIndex stage_access) {
John Zulauf9cb530d2019-09-30 14:14:10 -060060 return syncStageAccessInfoByStageAccessIndex[stage_access].stage_access_bit;
61 }
John Zulauf1507ee42020-05-18 11:33:09 -060062 static inline SyncStageAccessFlags Flags(SyncStageAccessIndex stage_access) {
63 return static_cast<SyncStageAccessFlags>(FlagBit(stage_access));
64 }
John Zulauf9cb530d2019-09-30 14:14:10 -060065
Jeremy Gebbend0de1f82020-11-09 08:21:07 -070066 static bool IsRead(const SyncStageAccessFlags &stage_access_bit) { return (stage_access_bit & syncStageAccessReadMask).any(); }
John Zulauf9cb530d2019-09-30 14:14:10 -060067 static bool IsRead(SyncStageAccessIndex stage_access_index) { return IsRead(FlagBit(stage_access_index)); }
68
Jeremy Gebbend0de1f82020-11-09 08:21:07 -070069 static bool IsWrite(const SyncStageAccessFlags &stage_access_bit) {
70 return (stage_access_bit & syncStageAccessWriteMask).any();
71 }
72 static bool HasWrite(const SyncStageAccessFlags &stage_access_mask) {
73 return (stage_access_mask & syncStageAccessWriteMask).any();
74 }
John Zulauf9cb530d2019-09-30 14:14:10 -060075 static bool IsWrite(SyncStageAccessIndex stage_access_index) { return IsWrite(FlagBit(stage_access_index)); }
76 static VkPipelineStageFlagBits PipelineStageBit(SyncStageAccessIndex stage_access_index) {
77 return syncStageAccessInfoByStageAccessIndex[stage_access_index].stage_mask;
78 }
79 static SyncStageAccessFlags AccessScopeByStage(VkPipelineStageFlags stages);
80 static SyncStageAccessFlags AccessScopeByAccess(VkAccessFlags access);
81 static SyncStageAccessFlags AccessScope(VkPipelineStageFlags stages, VkAccessFlags access);
Jeremy Gebbend0de1f82020-11-09 08:21:07 -070082 static SyncStageAccessFlags AccessScope(const SyncStageAccessFlags &stage_scope, VkAccessFlags accesses) {
John Zulauf9cb530d2019-09-30 14:14:10 -060083 return stage_scope & AccessScopeByAccess(accesses);
84 }
85};
86
John Zulauffaea0ee2021-01-14 14:01:32 -070087// The resource tag is relative to the command buffer or queue in which it's found
John Zulauf5f13a792020-03-10 07:31:21 -060088struct ResourceUsageTag {
John Zulauff4aecca2021-01-05 16:21:58 -070089 using TagIndex = uint64_t;
John Zulauffaea0ee2021-01-14 14:01:32 -070090 using Count = uint32_t;
John Zulauff4aecca2021-01-05 16:21:58 -070091 constexpr static TagIndex kMaxIndex = std::numeric_limits<TagIndex>::max();
John Zulauffaea0ee2021-01-14 14:01:32 -070092 constexpr static uint32_t kMaxCount = std::numeric_limits<Count>::max();
Jeremy Gebben4bb73502020-12-14 11:17:50 -070093
John Zulauffaea0ee2021-01-14 14:01:32 -070094 TagIndex index = 0U; // the index of the command within the command buffer itself (primary or secondary)
95 CMD_TYPE command = CMD_NONE;
96 Count seq_num = 0U;
97 Count sub_command = 0U;
Jeremy Gebben4bb73502020-12-14 11:17:50 -070098
John Zulauffaea0ee2021-01-14 14:01:32 -070099 bool operator<(const ResourceUsageTag &rhs) const { return index < rhs.index; }
John Zulauf5f13a792020-03-10 07:31:21 -0600100 bool IsBefore(const ResourceUsageTag &rhs) const { return index < rhs.index; }
John Zulauffaea0ee2021-01-14 14:01:32 -0700101 bool operator==(const ResourceUsageTag &rhs) const { return (index == rhs.index); }
John Zulaufe5da6e52020-03-18 15:32:18 -0600102 bool operator!=(const ResourceUsageTag &rhs) const { return !(*this == rhs); }
Jeremy Gebben4bb73502020-12-14 11:17:50 -0700103
John Zulauffaea0ee2021-01-14 14:01:32 -0700104 ResourceUsageTag() = default;
105 ResourceUsageTag(TagIndex index_, Count seq_num_, Count sub_command_, CMD_TYPE command_)
106 : index(index_), command(command_), seq_num(seq_num_), sub_command(sub_command_) {}
John Zulauf5f13a792020-03-10 07:31:21 -0600107};
108
John Zulauf9cb530d2019-09-30 14:14:10 -0600109struct HazardResult {
John Zulauf59e25072020-07-17 10:55:21 -0600110 std::unique_ptr<const ResourceAccessState> access_state;
111 SyncStageAccessIndex usage_index = std::numeric_limits<SyncStageAccessIndex>::max();
John Zulauf9cb530d2019-09-30 14:14:10 -0600112 SyncHazard hazard = NONE;
John Zulauf37ceaed2020-07-03 16:18:15 -0600113 SyncStageAccessFlags prior_access = 0U; // TODO -- change to a NONE enum in ...Bits
John Zulauf9cb530d2019-09-30 14:14:10 -0600114 ResourceUsageTag tag = ResourceUsageTag();
John Zulauf59e25072020-07-17 10:55:21 -0600115 void Set(const ResourceAccessState *access_state_, SyncStageAccessIndex usage_index_, SyncHazard hazard_,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700116 const SyncStageAccessFlags &prior_, const ResourceUsageTag &tag_);
John Zulauf9cb530d2019-09-30 14:14:10 -0600117};
118
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700119struct SyncExecScope {
120 VkPipelineStageFlags mask_param; // the xxxStageMask parameter passed by the caller
121 VkPipelineStageFlags expanded_mask; // all stage bits covered by any 'catch all bits' in the parameter (eg. ALL_GRAPHICS_BIT).
122 VkPipelineStageFlags exec_scope; // all earlier or later stages that would be affected by a barrier using this scope.
123 SyncStageAccessFlags valid_accesses; // all valid accesses that can be used with this scope.
124
125 SyncExecScope() : mask_param(0), expanded_mask(0), exec_scope(0), valid_accesses(0) {}
126
127 static SyncExecScope MakeSrc(VkQueueFlags queue_flags, VkPipelineStageFlags src_stage_mask);
128 static SyncExecScope MakeDst(VkQueueFlags queue_flags, VkPipelineStageFlags src_stage_mask);
129};
130
John Zulauf3d84f1b2020-03-09 13:33:25 -0600131struct SyncBarrier {
132 VkPipelineStageFlags src_exec_scope;
133 SyncStageAccessFlags src_access_scope;
134 VkPipelineStageFlags dst_exec_scope;
135 SyncStageAccessFlags dst_access_scope;
136 SyncBarrier() = default;
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700137 SyncBarrier(const SyncBarrier &other) = default;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600138 SyncBarrier &operator=(const SyncBarrier &) = default;
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700139
140 SyncBarrier(const SyncExecScope &src, const SyncExecScope &dst);
141
142 template <typename Barrier>
143 SyncBarrier(const Barrier &barrier, const SyncExecScope &src, const SyncExecScope &dst);
144
145 SyncBarrier(VkQueueFlags queue_flags, const VkSubpassDependency2 &barrier);
146
John Zulaufa0a98292020-09-18 09:30:10 -0600147 void Merge(const SyncBarrier &other) {
148 src_exec_scope |= other.src_exec_scope;
149 src_access_scope |= other.src_access_scope;
150 dst_exec_scope |= other.dst_exec_scope;
151 dst_access_scope |= other.dst_access_scope;
152 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600153};
John Zulauf69133422020-05-20 14:55:53 -0600154
John Zulauf43cc7462020-12-03 12:33:12 -0700155enum class AccessAddressType : uint32_t { kLinear = 0, kIdealized = 1, kMaxType = 1, kTypeCount = kMaxType + 1 };
156
John Zulauf4a6105a2020-11-17 15:11:05 -0700157struct SyncEventState {
158 enum IgnoreReason { NotIgnored = 0, ResetWaitRace, SetRace, MissingStageBits };
John Zulauf669dfd52021-01-27 17:15:28 -0700159 using EventPointer = std::shared_ptr<const EVENT_STATE>;
John Zulauf4a6105a2020-11-17 15:11:05 -0700160 using ScopeMap = sparse_container::range_map<VkDeviceSize, bool>;
161 EventPointer event;
162 CMD_TYPE last_command; // Only Event commands are valid here.
163 CMD_TYPE unsynchronized_set;
164 VkPipelineStageFlags barriers;
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700165 SyncExecScope scope;
John Zulauf4a6105a2020-11-17 15:11:05 -0700166 ResourceUsageTag first_scope_tag;
John Zulaufd5115702021-01-18 12:34:33 -0700167 bool destroyed;
John Zulauf4a6105a2020-11-17 15:11:05 -0700168 std::array<ScopeMap, static_cast<size_t>(AccessAddressType::kTypeCount)> first_scope;
John Zulauf669dfd52021-01-27 17:15:28 -0700169 template <typename EventPointerType>
170 SyncEventState(EventPointerType &&event_state)
171 : event(std::forward<EventPointerType>(event_state)),
John Zulaufd5115702021-01-18 12:34:33 -0700172 last_command(CMD_NONE),
173 unsynchronized_set(CMD_NONE),
174 barriers(0U),
175 scope(),
176 first_scope_tag(),
177 destroyed((event_state.get() == nullptr) || event_state->destroyed) {}
John Zulauf4a6105a2020-11-17 15:11:05 -0700178 SyncEventState() : SyncEventState(EventPointer()) {}
179 void ResetFirstScope();
180 const ScopeMap &FirstScope(AccessAddressType address_type) const { return first_scope[static_cast<size_t>(address_type)]; }
181 IgnoreReason IsIgnoredByWait(VkPipelineStageFlags srcStageMask) const;
182 bool HasBarrier(VkPipelineStageFlags stageMask, VkPipelineStageFlags exec_scope) const;
183};
John Zulaufd5115702021-01-18 12:34:33 -0700184using SyncEventStateShared = std::shared_ptr<SyncEventState>;
185using SyncEventStateConstShared = std::shared_ptr<const SyncEventState>;
John Zulauf669dfd52021-01-27 17:15:28 -0700186class SyncEventsContext {
187 public:
188 using Map = std::unordered_map<const EVENT_STATE *, SyncEventStateShared>;
189 using iterator = Map::iterator;
190 using const_iterator = Map::const_iterator;
191
192 SyncEventState *GetFromShared(const SyncEventState::EventPointer &event_state) {
193 const auto find_it = map_.find(event_state.get());
194 if (find_it == map_.end()) {
John Zulauf6ce24372021-01-30 05:56:25 -0700195 if (!event_state.get()) return nullptr;
196
John Zulauf669dfd52021-01-27 17:15:28 -0700197 const auto *event_plain_ptr = event_state.get();
198 auto sync_state = SyncEventStateShared(new SyncEventState(event_state));
199 auto insert_pair = map_.insert(std::make_pair(event_plain_ptr, std::move(sync_state)));
200 return insert_pair.first->second.get();
201 }
202 return find_it->second.get();
203 }
204
205 const SyncEventState *Get(const EVENT_STATE *event_state) const {
206 const auto find_it = map_.find(event_state);
207 if (find_it == map_.end()) {
208 return nullptr;
209 }
210 return find_it->second.get();
211 }
John Zulauf6ce24372021-01-30 05:56:25 -0700212 const SyncEventState *Get(const SyncEventState::EventPointer &event_state) const { return Get(event_state.get()); }
John Zulauf669dfd52021-01-27 17:15:28 -0700213
214 // stl style naming for range-for support
215 inline iterator begin() { return map_.begin(); }
216 inline const_iterator begin() const { return map_.begin(); }
217 inline iterator end() { return map_.end(); }
218 inline const_iterator end() const { return map_.end(); }
219
220 void Destroy(const EVENT_STATE *event_state) {
221 auto sync_it = map_.find(event_state);
222 if (sync_it != map_.end()) {
223 sync_it->second->destroyed = true;
224 map_.erase(sync_it);
225 }
226 }
227 void Clear() { map_.clear(); }
228
229 private:
230 Map map_;
231};
John Zulauf4a6105a2020-11-17 15:11:05 -0700232
John Zulauf69133422020-05-20 14:55:53 -0600233// To represent ordering guarantees such as rasterization and store
John Zulauf3d84f1b2020-03-09 13:33:25 -0600234
John Zulauf9cb530d2019-09-30 14:14:10 -0600235class ResourceAccessState : public SyncStageAccess {
236 protected:
John Zulauf8e3c3e92021-01-06 11:19:36 -0700237 struct OrderingBarrier {
238 VkPipelineStageFlags exec_scope;
239 SyncStageAccessFlags access_scope;
240 OrderingBarrier() = default;
Nathaniel Cesarioe3025c62021-02-03 16:36:22 -0700241 OrderingBarrier(VkPipelineStageFlags es, SyncStageAccessFlags as) : exec_scope(es), access_scope(as) {}
John Zulauf8e3c3e92021-01-06 11:19:36 -0700242 OrderingBarrier &operator=(const OrderingBarrier &) = default;
243 };
244 using OrderingBarriers = std::array<OrderingBarrier, static_cast<size_t>(SyncOrdering::kNumOrderings)>;
245
John Zulauffaea0ee2021-01-14 14:01:32 -0700246 struct FirstAccess {
247 ResourceUsageTag tag;
248 SyncStageAccessIndex usage_index;
249 SyncOrdering ordering_rule;
250 FirstAccess(const ResourceUsageTag &tag_, SyncStageAccessIndex usage_index_, SyncOrdering ordering_rule_)
251 : tag(tag_), usage_index(usage_index_), ordering_rule(ordering_rule_){};
252 FirstAccess(const FirstAccess &other) = default;
253 FirstAccess(FirstAccess &&other) = default;
254 FirstAccess &operator=(const FirstAccess &rhs) = default;
255 FirstAccess &operator=(FirstAccess &&rhs) = default;
256
257 bool operator==(const FirstAccess &rhs) const {
258 return (tag == rhs.tag) && (usage_index == rhs.usage_index) && (ordering_rule == rhs.ordering_rule);
259 }
260 };
261 using FirstAccesses = small_vector<FirstAccess, 3>;
262
John Zulauf9cb530d2019-09-30 14:14:10 -0600263 // Mutliple read operations can be simlutaneously (and independently) synchronized,
264 // given the only the second execution scope creates a dependency chain, we have to track each,
265 // but only up to one per pipeline stage (as another read from the *same* stage become more recent,
266 // and applicable one for hazard detection
267 struct ReadState {
268 VkPipelineStageFlagBits stage; // The stage of this read
John Zulauf37ceaed2020-07-03 16:18:15 -0600269 SyncStageAccessFlags access; // TODO: Change to FlagBits when we have a None bit enum
John Zulauff51fbb62020-10-02 14:43:24 -0600270 // TODO: Revisit whether this needs to support multiple reads per stage
John Zulauf9cb530d2019-09-30 14:14:10 -0600271 VkPipelineStageFlags barriers; // all applicable barriered stages
272 ResourceUsageTag tag;
John Zulauf89311b42020-09-29 16:28:47 -0600273 VkPipelineStageFlags pending_dep_chain; // Should be zero except during barrier application
274 // Excluded from comparison
275 ReadState() = default;
John Zulaufab7756b2020-12-29 16:10:16 -0700276 ReadState(VkPipelineStageFlagBits stage_, SyncStageAccessFlags access_, VkPipelineStageFlags barriers_,
277 const ResourceUsageTag &tag_)
278 : stage(stage_), access(access_), barriers(barriers_), tag(tag_), pending_dep_chain(0) {}
John Zulaufe5da6e52020-03-18 15:32:18 -0600279 bool operator==(const ReadState &rhs) const {
John Zulauf37ceaed2020-07-03 16:18:15 -0600280 bool same = (stage == rhs.stage) && (access == rhs.access) && (barriers == rhs.barriers) && (tag == rhs.tag);
John Zulaufe5da6e52020-03-18 15:32:18 -0600281 return same;
282 }
John Zulauf4a6105a2020-11-17 15:11:05 -0700283 bool IsReadBarrierHazard(VkPipelineStageFlags src_exec_scope) const {
284 // If the read stage is not in the src sync scope
285 // *AND* not execution chained with an existing sync barrier (that's the or)
286 // then the barrier access is unsafe (R/W after R)
287 return (src_exec_scope & (stage | barriers)) == 0;
288 }
289
John Zulaufe5da6e52020-03-18 15:32:18 -0600290 bool operator!=(const ReadState &rhs) const { return !(*this == rhs); }
John Zulauf4285ee92020-09-23 10:20:52 -0600291 inline void Set(VkPipelineStageFlagBits stage_, SyncStageAccessFlags access_, VkPipelineStageFlags barriers_,
292 const ResourceUsageTag &tag_) {
293 stage = stage_;
294 access = access_;
295 barriers = barriers_;
296 tag = tag_;
John Zulauf89311b42020-09-29 16:28:47 -0600297 pending_dep_chain = 0; // If this is a new read, we aren't applying a barrier set.
John Zulauf4285ee92020-09-23 10:20:52 -0600298 }
John Zulauf9cb530d2019-09-30 14:14:10 -0600299 };
300
301 public:
302 HazardResult DetectHazard(SyncStageAccessIndex usage_index) const;
John Zulauf8e3c3e92021-01-06 11:19:36 -0700303 HazardResult DetectHazard(SyncStageAccessIndex usage_index, const SyncOrdering &ordering_rule) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600304
John Zulaufc9201222020-05-13 15:13:03 -0600305 HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags source_exec_scope,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700306 const SyncStageAccessFlags &source_access_scope) const;
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700307 HazardResult DetectAsyncHazard(SyncStageAccessIndex usage_index, const ResourceUsageTag &start_tag) const;
John Zulauf4a6105a2020-11-17 15:11:05 -0700308 HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags source_exec_scope,
309 const SyncStageAccessFlags &source_access_scope, const ResourceUsageTag &event_tag) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600310
John Zulauf8e3c3e92021-01-06 11:19:36 -0700311 void Update(SyncStageAccessIndex usage_index, SyncOrdering ordering_rule, const ResourceUsageTag &tag);
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700312 void SetWrite(const SyncStageAccessFlags &usage_bit, const ResourceUsageTag &tag);
John Zulauf5f13a792020-03-10 07:31:21 -0600313 void Resolve(const ResourceAccessState &other);
John Zulaufb02c1eb2020-10-06 16:33:36 -0600314 void ApplyBarriers(const std::vector<SyncBarrier> &barriers, bool layout_transition);
315 void ApplyBarriers(const std::vector<SyncBarrier> &barriers, const ResourceUsageTag &tag);
John Zulauf89311b42020-09-29 16:28:47 -0600316 void ApplyBarrier(const SyncBarrier &barrier, bool layout_transition);
John Zulauf4a6105a2020-11-17 15:11:05 -0700317 void ApplyBarrier(const ResourceUsageTag &scope_tag, const SyncBarrier &barrier, bool layout_transition);
John Zulauf89311b42020-09-29 16:28:47 -0600318 void ApplyPendingBarriers(const ResourceUsageTag &tag);
John Zulauf9cb530d2019-09-30 14:14:10 -0600319
320 ResourceAccessState()
John Zulauf355e49b2020-04-24 15:11:15 -0600321 : write_barriers(~SyncStageAccessFlags(0)),
322 write_dependency_chain(0),
John Zulauf355e49b2020-04-24 15:11:15 -0600323 write_tag(),
John Zulaufd14743a2020-07-03 09:42:39 -0600324 last_write(0),
John Zulauff51fbb62020-10-02 14:43:24 -0600325 input_attachment_read(false),
John Zulauf361fb532020-07-22 10:45:39 -0600326 last_read_stages(0),
John Zulauf89311b42020-09-29 16:28:47 -0600327 read_execution_barriers(0),
328 pending_write_dep_chain(0),
329 pending_layout_transition(false),
John Zulauffaea0ee2021-01-14 14:01:32 -0700330 pending_write_barriers(0),
331 first_accesses_(),
332 first_read_stages_(0U) {}
John Zulauf9cb530d2019-09-30 14:14:10 -0600333
John Zulaufb02c1eb2020-10-06 16:33:36 -0600334 bool HasPendingState() const {
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700335 return (0 != pending_layout_transition) || pending_write_barriers.any() || (0 != pending_write_dep_chain);
John Zulaufb02c1eb2020-10-06 16:33:36 -0600336 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600337 bool HasWriteOp() const { return last_write != 0; }
John Zulaufe5da6e52020-03-18 15:32:18 -0600338 bool operator==(const ResourceAccessState &rhs) const {
339 bool same = (write_barriers == rhs.write_barriers) && (write_dependency_chain == rhs.write_dependency_chain) &&
John Zulaufab7756b2020-12-29 16:10:16 -0700340 (last_reads == rhs.last_reads) && (last_read_stages == rhs.last_read_stages) && (write_tag == rhs.write_tag) &&
341 (input_attachment_read == rhs.input_attachment_read) &&
John Zulauffaea0ee2021-01-14 14:01:32 -0700342 (read_execution_barriers == rhs.read_execution_barriers) && (first_accesses_ == rhs.first_accesses_);
John Zulaufe5da6e52020-03-18 15:32:18 -0600343 return same;
344 }
345 bool operator!=(const ResourceAccessState &rhs) const { return !(*this == rhs); }
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700346 VkPipelineStageFlags GetReadBarriers(const SyncStageAccessFlags &usage) const;
John Zulauf59e25072020-07-17 10:55:21 -0600347 SyncStageAccessFlags GetWriteBarriers() const { return write_barriers; }
John Zulauf4a6105a2020-11-17 15:11:05 -0700348 bool InSourceScopeOrChain(VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope) const {
349 return ReadInSourceScopeOrChain(src_exec_scope) || WriteInSourceScopeOrChain(src_exec_scope, src_access_scope);
350 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600351
John Zulauf9cb530d2019-09-30 14:14:10 -0600352 private:
John Zulauf4285ee92020-09-23 10:20:52 -0600353 static constexpr VkPipelineStageFlags kInvalidAttachmentStage = ~VkPipelineStageFlags(0);
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700354 bool IsWriteHazard(SyncStageAccessFlags usage) const { return (usage & ~write_barriers).any(); }
355 bool IsRAWHazard(VkPipelineStageFlagBits usage_stage, const SyncStageAccessFlags &usage) const;
John Zulauf4a6105a2020-11-17 15:11:05 -0700356 bool IsWriteBarrierHazard(VkPipelineStageFlags src_exec_scope, const SyncStageAccessFlags &src_access_scope) const {
357 // If the previous write is *not* in the 1st access scope
358 // *AND* the current barrier is not in the dependency chain
359 // *AND* the there is no prior memory barrier for the previous write in the dependency chain
360 // then the barrier access is unsafe (R/W after W)
361 return ((last_write & src_access_scope) == 0) && ((src_exec_scope & write_dependency_chain) == 0) && (write_barriers == 0);
362 }
363 bool ReadInSourceScopeOrChain(VkPipelineStageFlags src_exec_scope) const {
364 return (0 != (src_exec_scope & (last_read_stages | read_execution_barriers)));
365 }
366 bool WriteInSourceScopeOrChain(VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope) const {
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700367 return (src_access_scope & last_write).any() || (write_dependency_chain & src_exec_scope);
John Zulaufa0a98292020-09-18 09:30:10 -0600368 }
John Zulaufd14743a2020-07-03 09:42:39 -0600369
370 static bool IsReadHazard(VkPipelineStageFlagBits stage, const VkPipelineStageFlags barriers) {
371 return 0 != (stage & ~barriers);
372 }
373 static bool IsReadHazard(VkPipelineStageFlags stage_mask, const VkPipelineStageFlags barriers) {
374 return stage_mask != (stage_mask & barriers);
375 }
376
John Zulauf9cb530d2019-09-30 14:14:10 -0600377 bool IsReadHazard(VkPipelineStageFlagBits stage, const ReadState &read_access) const {
John Zulaufd14743a2020-07-03 09:42:39 -0600378 return IsReadHazard(stage, read_access.barriers);
John Zulauf9cb530d2019-09-30 14:14:10 -0600379 }
John Zulauf0cb5be22020-01-23 12:18:22 -0700380 bool IsReadHazard(VkPipelineStageFlags stage_mask, const ReadState &read_access) const {
John Zulaufd14743a2020-07-03 09:42:39 -0600381 return IsReadHazard(stage_mask, read_access.barriers);
John Zulauf0cb5be22020-01-23 12:18:22 -0700382 }
John Zulauf8e3c3e92021-01-06 11:19:36 -0700383 VkPipelineStageFlags GetOrderedStages(const OrderingBarrier &ordering) const;
384
John Zulauffaea0ee2021-01-14 14:01:32 -0700385 void UpdateFirst(const ResourceUsageTag &tag, SyncStageAccessIndex usage_index, SyncOrdering ordering_rule);
386
John Zulauf8e3c3e92021-01-06 11:19:36 -0700387 static const OrderingBarrier &GetOrderingRules(SyncOrdering ordering_enum) {
388 return kOrderingRules[static_cast<size_t>(ordering_enum)];
389 }
John Zulaufd14743a2020-07-03 09:42:39 -0600390
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700391 // TODO: Add a NONE (zero) enum to SyncStageAccessFlags for input_attachment_read and last_write
John Zulaufd14743a2020-07-03 09:42:39 -0600392
John Zulauf9cb530d2019-09-30 14:14:10 -0600393 // With reads, each must be "safe" relative to it's prior write, so we need only
394 // save the most recent write operation (as anything *transitively* unsafe would arleady
395 // be included
396 SyncStageAccessFlags write_barriers; // union of applicable barrier masks since last write
397 VkPipelineStageFlags write_dependency_chain; // intiially zero, but accumulating the dstStages of barriers if they chain.
John Zulauf9cb530d2019-09-30 14:14:10 -0600398 ResourceUsageTag write_tag;
John Zulauf355e49b2020-04-24 15:11:15 -0600399 SyncStageAccessFlags last_write; // only the most recent write
John Zulauf9cb530d2019-09-30 14:14:10 -0600400
John Zulauff51fbb62020-10-02 14:43:24 -0600401 // TODO Input Attachment cleanup for multiple reads in a given stage
402 // Tracks whether the fragment shader read is input attachment read
403 bool input_attachment_read;
John Zulaufd14743a2020-07-03 09:42:39 -0600404
John Zulaufd14743a2020-07-03 09:42:39 -0600405 VkPipelineStageFlags last_read_stages;
John Zulauf361fb532020-07-22 10:45:39 -0600406 VkPipelineStageFlags read_execution_barriers;
John Zulaufab7756b2020-12-29 16:10:16 -0700407 small_vector<ReadState, 3> last_reads;
John Zulauf89311b42020-09-29 16:28:47 -0600408
409 // Pending execution state to support independent parallel barriers
410 VkPipelineStageFlags pending_write_dep_chain;
411 bool pending_layout_transition;
412 SyncStageAccessFlags pending_write_barriers;
John Zulauffaea0ee2021-01-14 14:01:32 -0700413 FirstAccesses first_accesses_;
414 VkPipelineStageFlags first_read_stages_;
John Zulauf8e3c3e92021-01-06 11:19:36 -0700415
416 static OrderingBarriers kOrderingRules;
John Zulauf9cb530d2019-09-30 14:14:10 -0600417};
418
John Zulauf16adfc92020-04-08 10:28:33 -0600419using ResourceAccessRangeMap = sparse_container::range_map<VkDeviceSize, ResourceAccessState>;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700420using ResourceAccessRange = typename ResourceAccessRangeMap::key_type;
John Zulauf355e49b2020-04-24 15:11:15 -0600421using ResourceRangeMergeIterator = sparse_container::parallel_iterator<ResourceAccessRangeMap, const ResourceAccessRangeMap>;
John Zulauf9cb530d2019-09-30 14:14:10 -0600422
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700423using SyncMemoryBarrier = SyncBarrier;
424struct SyncBufferMemoryBarrier {
425 using Buffer = std::shared_ptr<const BUFFER_STATE>;
426 Buffer buffer;
427 SyncBarrier barrier;
428 ResourceAccessRange range;
John Zulaufd5115702021-01-18 12:34:33 -0700429 bool IsLayoutTransition() const { return false; }
430 const ResourceAccessRange &Range() const { return range; };
431 const BUFFER_STATE *GetState() const { return buffer.get(); }
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700432 SyncBufferMemoryBarrier(const Buffer &buffer_, const SyncBarrier &barrier_, const ResourceAccessRange &range_)
433 : buffer(buffer_), barrier(barrier_), range(range_) {}
434 SyncBufferMemoryBarrier() = default;
435};
436
437struct SyncImageMemoryBarrier {
438 using Image = std::shared_ptr<const IMAGE_STATE>;
John Zulaufd5115702021-01-18 12:34:33 -0700439 struct SubImageRange {
440 VkImageSubresourceRange subresource_range;
441 VkOffset3D offset;
442 VkExtent3D extent;
443 };
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700444 Image image;
445 uint32_t index;
446 SyncBarrier barrier;
447 VkImageLayout old_layout;
448 VkImageLayout new_layout;
John Zulaufd5115702021-01-18 12:34:33 -0700449 SubImageRange range;
450
451 bool IsLayoutTransition() const { return old_layout != new_layout; }
452 const SubImageRange &Range() const { return range; };
453 const IMAGE_STATE *GetState() const { return image.get(); }
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700454 SyncImageMemoryBarrier(const Image &image_, uint32_t index_, const SyncBarrier &barrier_, VkImageLayout old_layout_,
455 VkImageLayout new_layout_, const VkImageSubresourceRange &subresource_range_)
456 : image(image_),
457 index(index_),
458 barrier(barrier_),
459 old_layout(old_layout_),
460 new_layout(new_layout_),
John Zulaufd5115702021-01-18 12:34:33 -0700461 range({subresource_range_, VkOffset3D{0, 0, 0}, image->createInfo.extent}) {}
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700462 SyncImageMemoryBarrier() = default;
463};
464
465class SyncOpBase {
466 public:
John Zulauf36ef9282021-02-02 11:47:24 -0700467 SyncOpBase() : cmd_(CMD_NONE) {}
468 SyncOpBase(CMD_TYPE cmd) : cmd_(cmd) {}
469 const char *CmdName() const { return CommandTypeString(cmd_); }
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700470 virtual bool Validate(const CommandBufferAccessContext &cb_context) const = 0;
John Zulauf36ef9282021-02-02 11:47:24 -0700471 virtual void Record(CommandBufferAccessContext *cb_context) const = 0;
472
473 protected:
474 CMD_TYPE cmd_;
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700475};
476
John Zulaufd5115702021-01-18 12:34:33 -0700477class SyncOpBarriers : public SyncOpBase {
478 protected:
479 template <typename Barriers, typename FunctorFactory>
480 static void ApplyBarriers(const Barriers &barriers, const FunctorFactory &factory, const ResourceUsageTag &tag,
481 AccessContext *context);
482 template <typename Barriers, typename FunctorFactory>
483 static void ApplyGlobalBarriers(const Barriers &barriers, const FunctorFactory &factory, const ResourceUsageTag &tag,
484 AccessContext *access_context);
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700485
John Zulauf36ef9282021-02-02 11:47:24 -0700486 SyncOpBarriers(CMD_TYPE cmd, const SyncValidator &sync_state, VkQueueFlags queue_flags, VkPipelineStageFlags srcStageMask,
John Zulaufd5115702021-01-18 12:34:33 -0700487 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount,
488 const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount,
489 const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount,
490 const VkImageMemoryBarrier *pImageMemoryBarriers);
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700491
492 protected:
493 void MakeMemoryBarriers(const SyncExecScope &src, const SyncExecScope &dst, VkDependencyFlags dependencyFlags,
494 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers);
495 void MakeBufferMemoryBarriers(const SyncValidator &sync_state, const SyncExecScope &src, const SyncExecScope &dst,
496 VkDependencyFlags dependencyFlags, uint32_t bufferMemoryBarrierCount,
497 const VkBufferMemoryBarrier *pBufferMemoryBarriers);
498 void MakeImageMemoryBarriers(const SyncValidator &sync_state, const SyncExecScope &src, const SyncExecScope &dst,
499 VkDependencyFlags dependencyFlags, uint32_t imageMemoryBarrierCount,
500 const VkImageMemoryBarrier *pImageMemoryBarriers);
501
502 VkDependencyFlags dependency_flags_;
503 SyncExecScope src_exec_scope_;
504 SyncExecScope dst_exec_scope_;
505 std::vector<SyncMemoryBarrier> memory_barriers_;
506 std::vector<SyncBufferMemoryBarrier> buffer_memory_barriers_;
507 std::vector<SyncImageMemoryBarrier> image_memory_barriers_;
508};
509
John Zulaufd5115702021-01-18 12:34:33 -0700510class SyncOpPipelineBarrier : public SyncOpBarriers {
511 public:
John Zulauf36ef9282021-02-02 11:47:24 -0700512 SyncOpPipelineBarrier(CMD_TYPE cmd, const SyncValidator &sync_state, VkQueueFlags queue_flags,
513 VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
514 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount,
John Zulaufd5115702021-01-18 12:34:33 -0700515 const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount,
516 const VkImageMemoryBarrier *pImageMemoryBarriers);
517 bool Validate(const CommandBufferAccessContext &cb_context) const override;
John Zulauf36ef9282021-02-02 11:47:24 -0700518 void Record(CommandBufferAccessContext *cb_context) const override;
John Zulaufd5115702021-01-18 12:34:33 -0700519};
520
521class SyncOpWaitEvents : public SyncOpBarriers {
522 public:
John Zulauf36ef9282021-02-02 11:47:24 -0700523 SyncOpWaitEvents(CMD_TYPE cmd, const SyncValidator &sync_state, VkQueueFlags queue_flags, uint32_t eventCount,
524 const VkEvent *pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
525 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount,
John Zulaufd5115702021-01-18 12:34:33 -0700526 const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount,
527 const VkImageMemoryBarrier *pImageMemoryBarriers);
528 bool Validate(const CommandBufferAccessContext &cb_context) const override;
John Zulauf36ef9282021-02-02 11:47:24 -0700529 void Record(CommandBufferAccessContext *cb_context) const override;
John Zulaufd5115702021-01-18 12:34:33 -0700530
531 protected:
532 // TODO PHASE2 This is the wrong thing to use for "replay".. as the event state will have moved on since the record
533 // TODO PHASE2 May need to capture by value w.r.t. "first use" or build up in calling/enqueue context through replay.
John Zulauf669dfd52021-01-27 17:15:28 -0700534 std::vector<std::shared_ptr<const EVENT_STATE>> events_;
535 void MakeEventsList(const SyncValidator &sync_state, uint32_t event_count, const VkEvent *events);
John Zulaufd5115702021-01-18 12:34:33 -0700536};
537
John Zulauf6ce24372021-01-30 05:56:25 -0700538class SyncOpResetEvent : public SyncOpBase {
539 public:
John Zulauf36ef9282021-02-02 11:47:24 -0700540 SyncOpResetEvent(CMD_TYPE cmd, const SyncValidator &sync_state, VkQueueFlags queue_flags, VkEvent event,
541 VkPipelineStageFlags stageMask);
John Zulauf6ce24372021-01-30 05:56:25 -0700542 bool Validate(const CommandBufferAccessContext &cb_context) const override;
John Zulauf36ef9282021-02-02 11:47:24 -0700543 void Record(CommandBufferAccessContext *cb_context) const override;
John Zulauf6ce24372021-01-30 05:56:25 -0700544
545 private:
546 std::shared_ptr<const EVENT_STATE> event_;
547 SyncExecScope exec_scope_;
548};
549
550class SyncOpSetEvent : public SyncOpBase {
551 public:
John Zulauf36ef9282021-02-02 11:47:24 -0700552 SyncOpSetEvent(CMD_TYPE cmd, const SyncValidator &sync_state, VkQueueFlags queue_flags, VkEvent event,
553 VkPipelineStageFlags stageMask);
John Zulauf6ce24372021-01-30 05:56:25 -0700554 bool Validate(const CommandBufferAccessContext &cb_context) const override;
John Zulauf36ef9282021-02-02 11:47:24 -0700555 void Record(CommandBufferAccessContext *cb_context) const override;
John Zulauf6ce24372021-01-30 05:56:25 -0700556
557 private:
558 std::shared_ptr<const EVENT_STATE> event_;
559 SyncExecScope src_exec_scope_;
560};
John Zulauf540266b2020-04-06 18:54:53 -0600561class AccessContext {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700562 public:
John Zulauf69133422020-05-20 14:55:53 -0600563 enum DetectOptions : uint32_t {
John Zulauf355e49b2020-04-24 15:11:15 -0600564 kDetectPrevious = 1U << 0,
565 kDetectAsync = 1U << 1,
566 kDetectAll = (kDetectPrevious | kDetectAsync)
John Zulauf16adfc92020-04-08 10:28:33 -0600567 };
John Zulauf43cc7462020-12-03 12:33:12 -0700568 using MapArray = std::array<ResourceAccessRangeMap, static_cast<size_t>(AccessAddressType::kTypeCount)>;
John Zulauf16adfc92020-04-08 10:28:33 -0600569
John Zulaufbaea94f2020-09-15 17:55:16 -0600570 // WIP TODO WIP Multi-dep -- change track back to support barrier vector, not just last.
John Zulauf3d84f1b2020-03-09 13:33:25 -0600571 struct TrackBack {
John Zulaufa0a98292020-09-18 09:30:10 -0600572 std::vector<SyncBarrier> barriers;
John Zulauf1a224292020-06-30 14:52:13 -0600573 const AccessContext *context;
John Zulaufbaea94f2020-09-15 17:55:16 -0600574 TrackBack(const AccessContext *context_, VkQueueFlags queue_flags_,
John Zulaufa0a98292020-09-18 09:30:10 -0600575 const std::vector<const VkSubpassDependency2 *> &subpass_dependencies_)
576 : barriers(), context(context_) {
577 barriers.reserve(subpass_dependencies_.size());
578 for (const VkSubpassDependency2 *dependency : subpass_dependencies_) {
579 assert(dependency);
580 barriers.emplace_back(queue_flags_, *dependency);
581 }
582 }
583
John Zulauf3d84f1b2020-03-09 13:33:25 -0600584 TrackBack &operator=(const TrackBack &) = default;
585 TrackBack() = default;
586 };
John Zulauf5c5e88d2019-12-26 11:22:02 -0700587
John Zulauf355e49b2020-04-24 15:11:15 -0600588 HazardResult DetectHazard(const BUFFER_STATE &buffer, SyncStageAccessIndex usage_index, const ResourceAccessRange &range) const;
John Zulauf540266b2020-04-06 18:54:53 -0600589 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
John Zulauf3d84f1b2020-03-09 13:33:25 -0600590 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset,
591 const VkExtent3D &extent) const;
John Zulauf69133422020-05-20 14:55:53 -0600592 template <typename Detector>
593 HazardResult DetectHazard(Detector &detector, const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range,
594 const VkOffset3D &offset, const VkExtent3D &extent, DetectOptions options) const;
John Zulauf1507ee42020-05-18 11:33:09 -0600595 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
596 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset,
597 const VkExtent3D &extent) const;
John Zulauf69133422020-05-20 14:55:53 -0600598 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
John Zulauf8e3c3e92021-01-06 11:19:36 -0700599 const VkImageSubresourceRange &subresource_range, SyncOrdering ordering_rule,
John Zulauf69133422020-05-20 14:55:53 -0600600 const VkOffset3D &offset, const VkExtent3D &extent) const;
John Zulauf8e3c3e92021-01-06 11:19:36 -0700601 HazardResult DetectHazard(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, SyncOrdering ordering_rule,
John Zulaufb027cdb2020-05-21 14:25:22 -0600602 const VkOffset3D &offset, const VkExtent3D &extent, VkImageAspectFlags aspect_mask = 0U) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600603 HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700604 const SyncStageAccessFlags &src_access_scope,
John Zulauf4a6105a2020-11-17 15:11:05 -0700605 const VkImageSubresourceRange &subresource_range, const SyncEventState &sync_event,
606 DetectOptions options) const;
607 HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
608 const SyncStageAccessFlags &src_access_scope,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700609 const VkImageSubresourceRange &subresource_range, DetectOptions options) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600610 HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700611 const SyncStageAccessFlags &src_stage_accesses,
612 const VkImageMemoryBarrier &barrier) const;
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700613 HazardResult DetectImageBarrierHazard(const SyncImageMemoryBarrier &image_barrier) const;
John Zulauf7635de32020-05-29 17:14:15 -0600614 HazardResult DetectSubpassTransitionHazard(const TrackBack &track_back, const IMAGE_VIEW_STATE *attach_view) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600615
John Zulaufb02c1eb2020-10-06 16:33:36 -0600616 void RecordLayoutTransitions(const RENDER_PASS_STATE &rp_state, uint32_t subpass,
617 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const ResourceUsageTag &tag);
618
John Zulaufe5da6e52020-03-18 15:32:18 -0600619 const TrackBack &GetDstExternalTrackBack() const { return dst_external_; }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600620 void Reset() {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600621 prev_.clear();
John Zulauf355e49b2020-04-24 15:11:15 -0600622 prev_by_subpass_.clear();
John Zulauf3d84f1b2020-03-09 13:33:25 -0600623 async_.clear();
John Zulaufe5da6e52020-03-18 15:32:18 -0600624 src_external_ = TrackBack();
John Zulaufa0a98292020-09-18 09:30:10 -0600625 dst_external_ = TrackBack();
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700626 start_tag_ = ResourceUsageTag();
John Zulauf16adfc92020-04-08 10:28:33 -0600627 for (auto &map : access_state_maps_) {
628 map.clear();
629 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600630 }
John Zulaufb02c1eb2020-10-06 16:33:36 -0600631
632 // Follow the context previous to access the access state, supporting "lazy" import into the context. Not intended for
633 // subpass layout transition, as the pending state handling is more complex
John Zulauf5f13a792020-03-10 07:31:21 -0600634 // TODO: See if returning the lower_bound would be useful from a performance POV -- look at the lower_bound overhead
635 // Would need to add a "hint" overload to parallel_iterator::invalidate_[AB] call, if so.
John Zulauf43cc7462020-12-03 12:33:12 -0700636 void ResolvePreviousAccess(AccessAddressType type, const ResourceAccessRange &range, ResourceAccessRangeMap *descent_map,
John Zulauf355e49b2020-04-24 15:11:15 -0600637 const ResourceAccessState *infill_state) const;
John Zulauf4a6105a2020-11-17 15:11:05 -0700638 void ResolvePreviousAccesses();
John Zulaufb02c1eb2020-10-06 16:33:36 -0600639 template <typename BarrierAction>
640 void ResolveAccessRange(const IMAGE_STATE &image_state, const VkImageSubresourceRange &subresource_range,
John Zulauf43cc7462020-12-03 12:33:12 -0700641 BarrierAction &barrier_action, AccessAddressType address_type, ResourceAccessRangeMap *descent_map,
John Zulaufb02c1eb2020-10-06 16:33:36 -0600642 const ResourceAccessState *infill_state) const;
643 template <typename BarrierAction>
John Zulauf43cc7462020-12-03 12:33:12 -0700644 void ResolveAccessRange(AccessAddressType type, const ResourceAccessRange &range, BarrierAction &barrier_action,
John Zulauf355e49b2020-04-24 15:11:15 -0600645 ResourceAccessRangeMap *resolve_map, const ResourceAccessState *infill_state,
646 bool recur_to_infill = true) const;
John Zulaufb02c1eb2020-10-06 16:33:36 -0600647
John Zulauf8e3c3e92021-01-06 11:19:36 -0700648 void UpdateAccessState(const BUFFER_STATE &buffer, SyncStageAccessIndex current_usage, SyncOrdering ordering_rule,
649 const ResourceAccessRange &range, const ResourceUsageTag &tag);
650 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage, SyncOrdering ordering_rule,
John Zulauf355e49b2020-04-24 15:11:15 -0600651 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset, const VkExtent3D &extent,
652 const ResourceUsageTag &tag);
John Zulauf8e3c3e92021-01-06 11:19:36 -0700653 void UpdateAccessState(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, SyncOrdering ordering_rule,
654 const VkOffset3D &offset, const VkExtent3D &extent, VkImageAspectFlags aspect_mask,
655 const ResourceUsageTag &tag);
656 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage, SyncOrdering ordering_rule,
John Zulauf3d84f1b2020-03-09 13:33:25 -0600657 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset, const VkExtent3D &extent,
658 const ResourceUsageTag &tag);
John Zulauf7635de32020-05-29 17:14:15 -0600659 void UpdateAttachmentResolveAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
660 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass,
661 const ResourceUsageTag &tag);
John Zulaufaff20662020-06-01 14:07:58 -0600662 void UpdateAttachmentStoreAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
663 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass,
664 const ResourceUsageTag &tag);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600665
John Zulauf540266b2020-04-06 18:54:53 -0600666 void ResolveChildContexts(const std::vector<AccessContext> &contexts);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600667
John Zulauf540266b2020-04-06 18:54:53 -0600668 template <typename Action>
John Zulauf89311b42020-09-29 16:28:47 -0600669 void UpdateResourceAccess(const BUFFER_STATE &buffer, const ResourceAccessRange &range, const Action action);
John Zulauf540266b2020-04-06 18:54:53 -0600670 template <typename Action>
John Zulauf89311b42020-09-29 16:28:47 -0600671 void UpdateResourceAccess(const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range, const Action action);
John Zulauf540266b2020-04-06 18:54:53 -0600672
673 template <typename Action>
John Zulaufd5115702021-01-18 12:34:33 -0700674 void ApplyToContext(const Action &barrier_action);
John Zulauf43cc7462020-12-03 12:33:12 -0700675 static AccessAddressType ImageAddressType(const IMAGE_STATE &image);
John Zulauf16adfc92020-04-08 10:28:33 -0600676
John Zulauf540266b2020-04-06 18:54:53 -0600677 AccessContext(uint32_t subpass, VkQueueFlags queue_flags, const std::vector<SubpassDependencyGraphNode> &dependencies,
John Zulauf1a224292020-06-30 14:52:13 -0600678 const std::vector<AccessContext> &contexts, const AccessContext *external_context);
John Zulauf540266b2020-04-06 18:54:53 -0600679
680 AccessContext() { Reset(); }
John Zulauf7635de32020-05-29 17:14:15 -0600681 AccessContext(const AccessContext &copy_from) = default;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600682
John Zulauf43cc7462020-12-03 12:33:12 -0700683 ResourceAccessRangeMap &GetAccessStateMap(AccessAddressType type) { return access_state_maps_[static_cast<size_t>(type)]; }
684 const ResourceAccessRangeMap &GetAccessStateMap(AccessAddressType type) const {
685 return access_state_maps_[static_cast<size_t>(type)];
686 }
687 ResourceAccessRangeMap &GetLinearMap() { return GetAccessStateMap(AccessAddressType::kLinear); }
688 const ResourceAccessRangeMap &GetLinearMap() const { return GetAccessStateMap(AccessAddressType::kLinear); }
689 ResourceAccessRangeMap &GetIdealizedMap() { return GetAccessStateMap(AccessAddressType::kIdealized); }
690 const ResourceAccessRangeMap &GetIdealizedMap() const { return GetAccessStateMap(AccessAddressType::kIdealized); }
John Zulauf355e49b2020-04-24 15:11:15 -0600691 const TrackBack *GetTrackBackFromSubpass(uint32_t subpass) const {
692 if (subpass == VK_SUBPASS_EXTERNAL) {
693 return &src_external_;
694 } else {
695 assert(subpass < prev_by_subpass_.size());
696 return prev_by_subpass_[subpass];
697 }
698 }
John Zulauf16adfc92020-04-08 10:28:33 -0600699
John Zulauffaea0ee2021-01-14 14:01:32 -0700700 bool ValidateLayoutTransitions(const CommandBufferAccessContext &cb_context, const RENDER_PASS_STATE &rp_state,
701 const VkRect2D &render_area, uint32_t subpass,
702 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const char *func_name) const;
703 bool ValidateLoadOperation(const CommandBufferAccessContext &cb_context, const RENDER_PASS_STATE &rp_state,
704 const VkRect2D &render_area, uint32_t subpass,
705 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const char *func_name) const;
706 bool ValidateStoreOperation(const CommandBufferAccessContext &cb_context, const RENDER_PASS_STATE &rp_state,
707 const VkRect2D &render_area, uint32_t subpass,
708 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const char *func_name) const;
709 bool ValidateResolveOperations(const CommandBufferAccessContext &cb_context, const RENDER_PASS_STATE &rp_state,
710 const VkRect2D &render_area, const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
711 const char *func_name, uint32_t subpass) const;
John Zulauf1507ee42020-05-18 11:33:09 -0600712
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700713 void SetStartTag(const ResourceUsageTag &tag) { start_tag_ = tag; }
John Zulauf4a6105a2020-11-17 15:11:05 -0700714 template <typename Action>
715 void ForAll(Action &&action);
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700716
John Zulauf3d84f1b2020-03-09 13:33:25 -0600717 private:
718 template <typename Detector>
John Zulauf43cc7462020-12-03 12:33:12 -0700719 HazardResult DetectHazard(AccessAddressType type, const Detector &detector, const ResourceAccessRange &range,
John Zulauf355e49b2020-04-24 15:11:15 -0600720 DetectOptions options) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600721 template <typename Detector>
John Zulauf43cc7462020-12-03 12:33:12 -0700722 HazardResult DetectAsyncHazard(AccessAddressType type, const Detector &detector, const ResourceAccessRange &range) const;
John Zulauf5f13a792020-03-10 07:31:21 -0600723 template <typename Detector>
John Zulauf43cc7462020-12-03 12:33:12 -0700724 HazardResult DetectPreviousHazard(AccessAddressType type, const Detector &detector, const ResourceAccessRange &range) const;
John Zulauf8e3c3e92021-01-06 11:19:36 -0700725 void UpdateAccessState(AccessAddressType type, SyncStageAccessIndex current_usage, SyncOrdering ordering_rule,
726 const ResourceAccessRange &range, const ResourceUsageTag &tag);
John Zulaufb02c1eb2020-10-06 16:33:36 -0600727
728 MapArray access_state_maps_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600729 std::vector<TrackBack> prev_;
John Zulauf355e49b2020-04-24 15:11:15 -0600730 std::vector<TrackBack *> prev_by_subpass_;
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700731 std::vector<const AccessContext *> async_;
John Zulaufe5da6e52020-03-18 15:32:18 -0600732 TrackBack src_external_;
733 TrackBack dst_external_;
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700734 ResourceUsageTag start_tag_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600735};
736
John Zulauf355e49b2020-04-24 15:11:15 -0600737class RenderPassAccessContext {
738 public:
John Zulauf1a224292020-06-30 14:52:13 -0600739 RenderPassAccessContext() : rp_state_(nullptr), current_subpass_(0) {}
John Zulauf355e49b2020-04-24 15:11:15 -0600740
John Zulauffaea0ee2021-01-14 14:01:32 -0700741 bool ValidateDrawSubpassAttachment(const CommandBufferAccessContext &cb_context, const CMD_BUFFER_STATE &cmd,
742 const VkRect2D &render_area, const char *func_name) const;
locke-lunarg96dc9632020-06-10 17:22:18 -0600743 void RecordDrawSubpassAttachment(const CMD_BUFFER_STATE &cmd, const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauffaea0ee2021-01-14 14:01:32 -0700744 bool ValidateNextSubpass(const CommandBufferAccessContext &cb_context, const VkRect2D &render_area,
745 const char *command_name) const;
746 bool ValidateEndRenderPass(const CommandBufferAccessContext &cb_context, const VkRect2D &render_area,
747 const char *func_name) const;
748 bool ValidateFinalSubpassLayoutTransitions(const CommandBufferAccessContext &cb_context, const VkRect2D &render_area,
John Zulauf7635de32020-05-29 17:14:15 -0600749 const char *func_name) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600750
751 void RecordLayoutTransitions(const ResourceUsageTag &tag);
John Zulauf1507ee42020-05-18 11:33:09 -0600752 void RecordLoadOperations(const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf1a224292020-06-30 14:52:13 -0600753 void RecordBeginRenderPass(const SyncValidator &state, const CMD_BUFFER_STATE &cb_state, const AccessContext *external_context,
754 VkQueueFlags queue_flags, const ResourceUsageTag &tag);
John Zulauffaea0ee2021-01-14 14:01:32 -0700755 void RecordNextSubpass(const VkRect2D &render_area, const ResourceUsageTag &prev_subpass_tag,
756 const ResourceUsageTag &next_subpass_tag);
John Zulauf1a224292020-06-30 14:52:13 -0600757 void RecordEndRenderPass(AccessContext *external_context, const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf355e49b2020-04-24 15:11:15 -0600758
John Zulauf540266b2020-04-06 18:54:53 -0600759 AccessContext &CurrentContext() { return subpass_contexts_[current_subpass_]; }
760 const AccessContext &CurrentContext() const { return subpass_contexts_[current_subpass_]; }
John Zulauf355e49b2020-04-24 15:11:15 -0600761 const std::vector<AccessContext> &GetContexts() const { return subpass_contexts_; }
762 uint32_t GetCurrentSubpass() const { return current_subpass_; }
763 const RENDER_PASS_STATE *GetRenderPassState() const { return rp_state_; }
John Zulauf7635de32020-05-29 17:14:15 -0600764 AccessContext *CreateStoreResolveProxy(const VkRect2D &render_area) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600765
766 private:
John Zulauf355e49b2020-04-24 15:11:15 -0600767 const RENDER_PASS_STATE *rp_state_;
768 uint32_t current_subpass_;
769 std::vector<AccessContext> subpass_contexts_;
770 std::vector<const IMAGE_VIEW_STATE *> attachment_views_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600771};
772
773class CommandBufferAccessContext {
774 public:
775 CommandBufferAccessContext()
John Zulauffaea0ee2021-01-14 14:01:32 -0700776 : access_index_(0),
777 command_number_(0),
778 subcommand_number_(0),
John Zulauf355e49b2020-04-24 15:11:15 -0600779 reset_count_(0),
780 render_pass_contexts_(),
781 cb_access_context_(),
782 current_context_(&cb_access_context_),
John Zulauf3d84f1b2020-03-09 13:33:25 -0600783 current_renderpass_context_(),
784 cb_state_(),
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700785 queue_flags_(),
John Zulauf669dfd52021-01-27 17:15:28 -0700786 events_context_(),
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700787 destroyed_(false) {}
John Zulauf355e49b2020-04-24 15:11:15 -0600788 CommandBufferAccessContext(SyncValidator &sync_validator, std::shared_ptr<CMD_BUFFER_STATE> &cb_state, VkQueueFlags queue_flags)
John Zulauf3d84f1b2020-03-09 13:33:25 -0600789 : CommandBufferAccessContext() {
790 cb_state_ = cb_state;
John Zulauf355e49b2020-04-24 15:11:15 -0600791 sync_state_ = &sync_validator;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600792 queue_flags_ = queue_flags;
793 }
John Zulauf5c5e88d2019-12-26 11:22:02 -0700794
795 void Reset() {
John Zulauffaea0ee2021-01-14 14:01:32 -0700796 access_index_ = 0;
John Zulauf355e49b2020-04-24 15:11:15 -0600797 command_number_ = 0;
John Zulauffaea0ee2021-01-14 14:01:32 -0700798 subcommand_number_ = 0;
John Zulauf355e49b2020-04-24 15:11:15 -0600799 reset_count_++;
800 cb_access_context_.Reset();
John Zulauf3d84f1b2020-03-09 13:33:25 -0600801 render_pass_contexts_.clear();
John Zulauf355e49b2020-04-24 15:11:15 -0600802 current_context_ = &cb_access_context_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600803 current_renderpass_context_ = nullptr;
John Zulauf669dfd52021-01-27 17:15:28 -0700804 events_context_.Clear();
John Zulauf5c5e88d2019-12-26 11:22:02 -0700805 }
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700806 void MarkDestroyed() { destroyed_ = true; }
807 bool IsDestroyed() const { return destroyed_; }
John Zulauf5c5e88d2019-12-26 11:22:02 -0700808
John Zulauffaea0ee2021-01-14 14:01:32 -0700809 std::string FormatUsage(const HazardResult &hazard) const;
John Zulauf540266b2020-04-06 18:54:53 -0600810 AccessContext *GetCurrentAccessContext() { return current_context_; }
John Zulauf669dfd52021-01-27 17:15:28 -0700811 SyncEventsContext *GetCurrentEventsContext() { return &events_context_; }
812 const SyncEventsContext *GetCurrentEventsContext() const { return &events_context_; }
John Zulauf540266b2020-04-06 18:54:53 -0600813 const AccessContext *GetCurrentAccessContext() const { return current_context_; }
John Zulauf355e49b2020-04-24 15:11:15 -0600814 void RecordBeginRenderPass(const ResourceUsageTag &tag);
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700815 void ApplyGlobalBarriersToEvents(const SyncExecScope &src, const SyncExecScope &dst);
John Zulaufd5115702021-01-18 12:34:33 -0700816
John Zulauf355e49b2020-04-24 15:11:15 -0600817 bool ValidateBeginRenderPass(const RENDER_PASS_STATE &render_pass, const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -0800818 const VkSubpassBeginInfo *pSubpassBeginInfo, const char *func_name) const;
locke-lunarg61870c22020-06-09 14:51:50 -0600819 bool ValidateDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint, const char *func_name) const;
820 void RecordDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint, const ResourceUsageTag &tag);
821 bool ValidateDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const char *func_name) const;
822 void RecordDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const ResourceUsageTag &tag);
823 bool ValidateDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const char *func_name) const;
824 void RecordDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const ResourceUsageTag &tag);
825 bool ValidateDrawSubpassAttachment(const char *func_name) const;
826 void RecordDrawSubpassAttachment(const ResourceUsageTag &tag);
John Zulauf355e49b2020-04-24 15:11:15 -0600827 bool ValidateNextSubpass(const char *func_name) const;
828 bool ValidateEndRenderpass(const char *func_name) const;
John Zulauffaea0ee2021-01-14 14:01:32 -0700829 void RecordNextSubpass(const RENDER_PASS_STATE &render_pass, CMD_TYPE command);
830 void RecordEndRenderPass(const RENDER_PASS_STATE &render_pass, CMD_TYPE command);
John Zulauf4a6105a2020-11-17 15:11:05 -0700831 void RecordDestroyEvent(VkEvent event);
John Zulauf49beb112020-11-04 16:06:31 -0700832
John Zulauf3d84f1b2020-03-09 13:33:25 -0600833 CMD_BUFFER_STATE *GetCommandBufferState() { return cb_state_.get(); }
834 const CMD_BUFFER_STATE *GetCommandBufferState() const { return cb_state_.get(); }
835 VkQueueFlags GetQueueFlags() const { return queue_flags_; }
John Zulauffaea0ee2021-01-14 14:01:32 -0700836 inline ResourceUsageTag::TagIndex NextAccessIndex() { return access_index_++; }
837
838 inline ResourceUsageTag NextSubcommandTag(CMD_TYPE command) {
839 ResourceUsageTag next(NextAccessIndex(), command_number_, subcommand_number_++, command);
840 return next;
841 }
842
John Zulauf355e49b2020-04-24 15:11:15 -0600843 inline ResourceUsageTag NextCommandTag(CMD_TYPE command) {
John Zulauf355e49b2020-04-24 15:11:15 -0600844 command_number_++;
John Zulauffaea0ee2021-01-14 14:01:32 -0700845 subcommand_number_ = 0;
Jeremy Gebben6ea9d9e2020-12-11 09:41:01 -0700846 // The lowest bit is a sub-command number used to separate operations at the end of the previous renderpass
847 // from the start of the new one in VkCmdNextRenderpass().
John Zulauffaea0ee2021-01-14 14:01:32 -0700848 ResourceUsageTag next(NextAccessIndex(), command_number_, subcommand_number_, command);
John Zulauf355e49b2020-04-24 15:11:15 -0600849 return next;
850 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600851
John Zulauffaea0ee2021-01-14 14:01:32 -0700852 const CMD_BUFFER_STATE &GetCBState() const {
853 assert(cb_state_);
854 return *(cb_state_.get());
855 }
856 CMD_BUFFER_STATE &GetCBState() {
857 assert(cb_state_);
858 return *(cb_state_.get());
859 }
860 const SyncValidator &GetSyncState() const {
861 assert(sync_state_);
862 return *sync_state_;
863 }
864 SyncValidator &GetSyncState() {
865 assert(sync_state_);
866 return *sync_state_;
867 }
868
John Zulauf3d84f1b2020-03-09 13:33:25 -0600869 private:
John Zulauffaea0ee2021-01-14 14:01:32 -0700870 ResourceUsageTag::TagIndex access_index_;
John Zulauf355e49b2020-04-24 15:11:15 -0600871 uint32_t command_number_;
John Zulauffaea0ee2021-01-14 14:01:32 -0700872 uint32_t subcommand_number_;
John Zulauf355e49b2020-04-24 15:11:15 -0600873 uint32_t reset_count_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600874 std::vector<RenderPassAccessContext> render_pass_contexts_;
John Zulauf355e49b2020-04-24 15:11:15 -0600875 AccessContext cb_access_context_;
John Zulauf540266b2020-04-06 18:54:53 -0600876 AccessContext *current_context_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600877 RenderPassAccessContext *current_renderpass_context_;
878 std::shared_ptr<CMD_BUFFER_STATE> cb_state_;
John Zulauf355e49b2020-04-24 15:11:15 -0600879 SyncValidator *sync_state_;
880
John Zulauf3d84f1b2020-03-09 13:33:25 -0600881 VkQueueFlags queue_flags_;
John Zulauf669dfd52021-01-27 17:15:28 -0700882 SyncEventsContext events_context_;
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700883 bool destroyed_;
John Zulauf9cb530d2019-09-30 14:14:10 -0600884};
885
886class SyncValidator : public ValidationStateTracker, public SyncStageAccess {
887 public:
888 SyncValidator() { container_type = LayerObjectTypeSyncValidation; }
889 using StateTracker = ValidationStateTracker;
890
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700891 std::unordered_map<VkCommandBuffer, CommandBufferAccessContextShared> cb_access_state;
892
893 CommandBufferAccessContextShared GetAccessContextImpl(VkCommandBuffer command_buffer, bool do_insert) {
John Zulauf9cb530d2019-09-30 14:14:10 -0600894 auto found_it = cb_access_state.find(command_buffer);
895 if (found_it == cb_access_state.end()) {
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700896 if (!do_insert) return CommandBufferAccessContextShared();
John Zulauf9cb530d2019-09-30 14:14:10 -0600897 // If we don't have one, make it.
John Zulauf3d84f1b2020-03-09 13:33:25 -0600898 auto cb_state = GetShared<CMD_BUFFER_STATE>(command_buffer);
899 assert(cb_state.get());
900 auto queue_flags = GetQueueFlags(*cb_state);
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700901 std::shared_ptr<CommandBufferAccessContext> context(new CommandBufferAccessContext(*this, cb_state, queue_flags));
John Zulauf3d84f1b2020-03-09 13:33:25 -0600902 auto insert_pair = cb_access_state.insert(std::make_pair(command_buffer, std::move(context)));
John Zulauf9cb530d2019-09-30 14:14:10 -0600903 found_it = insert_pair.first;
904 }
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700905 return found_it->second;
John Zulauf9cb530d2019-09-30 14:14:10 -0600906 }
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700907
John Zulauf3d84f1b2020-03-09 13:33:25 -0600908 CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) {
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700909 return GetAccessContextImpl(command_buffer, true).get(); // true -> do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600910 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600911 CommandBufferAccessContext *GetAccessContextNoInsert(VkCommandBuffer command_buffer) {
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700912 return GetAccessContextImpl(command_buffer, false).get(); // false -> don't do_insert on not found
913 }
914
915 CommandBufferAccessContextShared GetAccessContextShared(VkCommandBuffer command_buffer) {
916 return GetAccessContextImpl(command_buffer, true); // true -> do_insert on not found
917 }
918 CommandBufferAccessContextShared GetAccessContextSharedNoInsert(VkCommandBuffer command_buffer) {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600919 return GetAccessContextImpl(command_buffer, false); // false -> don't do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600920 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600921
922 const CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) const {
John Zulauf9cb530d2019-09-30 14:14:10 -0600923 const auto found_it = cb_access_state.find(command_buffer);
924 if (found_it == cb_access_state.end()) {
925 return nullptr;
926 }
927 return found_it->second.get();
928 }
929
John Zulaufd1f85d42020-04-15 12:23:15 -0600930 void ResetCommandBufferCallback(VkCommandBuffer command_buffer);
931 void FreeCommandBufferCallback(VkCommandBuffer command_buffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600932 void RecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
John Zulauf355e49b2020-04-24 15:11:15 -0600933 const VkSubpassBeginInfo *pSubpassBeginInfo, CMD_TYPE command);
934 void RecordCmdNextSubpass(VkCommandBuffer commandBuffer,
John Zulauf355e49b2020-04-24 15:11:15 -0600935 const VkSubpassBeginInfo *pSubpassBeginInfo, const VkSubpassEndInfo *pSubpassEndInfo,
936 CMD_TYPE command);
937 void RecordCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo, CMD_TYPE command);
John Zulauf33fc1d52020-07-17 11:01:10 -0600938 bool SupressedBoundDescriptorWAW(const HazardResult &hazard) const;
John Zulauf9cb530d2019-09-30 14:14:10 -0600939
940 void PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600941 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result) override;
John Zulauf9cb530d2019-09-30 14:14:10 -0600942
John Zulauf355e49b2020-04-24 15:11:15 -0600943 bool ValidateBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -0800944 const VkSubpassBeginInfo *pSubpassBeginInfo, const char *func_name) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600945
946 bool PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600947 VkSubpassContents contents) const override;
John Zulauf355e49b2020-04-24 15:11:15 -0600948
949 bool PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -0800950 const VkSubpassBeginInfo *pSubpassBeginInfo) const override;
John Zulauf355e49b2020-04-24 15:11:15 -0600951
952 bool PreCallValidateCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -0800953 const VkSubpassBeginInfo *pSubpassBeginInfo) const override;
John Zulauf355e49b2020-04-24 15:11:15 -0600954
John Zulauf9cb530d2019-09-30 14:14:10 -0600955 bool PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600956 const VkBufferCopy *pRegions) const override;
John Zulauf9cb530d2019-09-30 14:14:10 -0600957
958 void PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600959 const VkBufferCopy *pRegions) override;
John Zulauf9cb530d2019-09-30 14:14:10 -0600960
John Zulauf4a6105a2020-11-17 15:11:05 -0700961 void PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) override;
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600962 bool PreCallValidateCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR *pCopyBufferInfos) const override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400963
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600964 void PreCallRecordCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR *pCopyBufferInfos) override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400965
John Zulauf5c5e88d2019-12-26 11:22:02 -0700966 bool PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
967 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600968 const VkImageCopy *pRegions) const override;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700969
970 void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600971 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions) override;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700972
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600973 bool PreCallValidateCmdCopyImage2KHR(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR *pCopyImageInfo) const override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400974
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600975 void PreCallRecordCmdCopyImage2KHR(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR *pCopyImageInfo) override;
Jeff Leger178b1e52020-10-05 12:22:23 -0400976
John Zulauf9cb530d2019-09-30 14:14:10 -0600977 bool PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
978 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
979 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
980 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
981 uint32_t imageMemoryBarrierCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600982 const VkImageMemoryBarrier *pImageMemoryBarriers) const override;
John Zulauf9cb530d2019-09-30 14:14:10 -0600983
984 void PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
985 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
986 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
987 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600988 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600989
990 void PostCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600991 VkResult result) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600992
993 void PostCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600994 VkSubpassContents contents) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600995 void PostCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600996 const VkSubpassBeginInfo *pSubpassBeginInfo) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600997 void PostCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Jeremy Gebbenf8924692020-10-28 16:27:14 -0600998 const VkSubpassBeginInfo *pSubpassBeginInfo) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600999
Mike Schuchardt2df08912020-12-15 16:28:09 -08001000 bool ValidateCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
1001 const VkSubpassEndInfo *pSubpassEndInfo, const char *func_name) const;
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001002 bool PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const override;
Mike Schuchardt2df08912020-12-15 16:28:09 -08001003 bool PreCallValidateCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
1004 const VkSubpassEndInfo *pSubpassEndInfo) const override;
1005 bool PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
1006 const VkSubpassEndInfo *pSubpassEndInfo) const override;
John Zulauf355e49b2020-04-24 15:11:15 -06001007
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001008 void PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -06001009 void PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001010 const VkSubpassEndInfo *pSubpassEndInfo) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -06001011 void PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001012 const VkSubpassEndInfo *pSubpassEndInfo) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -06001013
Mike Schuchardt2df08912020-12-15 16:28:09 -08001014 bool ValidateCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo,
John Zulauf355e49b2020-04-24 15:11:15 -06001015 const char *func_name) const;
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001016 bool PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) const override;
Mike Schuchardt2df08912020-12-15 16:28:09 -08001017 bool PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) const override;
1018 bool PreCallValidateCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) const override;
John Zulauf355e49b2020-04-24 15:11:15 -06001019
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001020 void PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) override;
1021 void PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) override;
1022 void PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) override;
Jeff Leger178b1e52020-10-05 12:22:23 -04001023
1024 template <typename BufferImageCopyRegionType>
1025 bool ValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
1026 VkImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopyRegionType *pRegions,
1027 CopyCommandVersion version) const;
locke-lunarga19c71d2020-03-02 18:17:04 -07001028 bool PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
1029 VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001030 const VkBufferImageCopy *pRegions) const override;
Jeff Leger178b1e52020-10-05 12:22:23 -04001031 bool PreCallValidateCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001032 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) const override;
locke-lunarga19c71d2020-03-02 18:17:04 -07001033
Jeff Leger178b1e52020-10-05 12:22:23 -04001034 template <typename BufferImageCopyRegionType>
1035 void RecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
1036 VkImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopyRegionType *pRegions,
1037 CopyCommandVersion version);
locke-lunarga19c71d2020-03-02 18:17:04 -07001038 void PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001039 VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions) override;
Jeff Leger178b1e52020-10-05 12:22:23 -04001040 void PreCallRecordCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001041 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) override;
locke-lunarga19c71d2020-03-02 18:17:04 -07001042
Jeff Leger178b1e52020-10-05 12:22:23 -04001043 template <typename BufferImageCopyRegionType>
1044 bool ValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
1045 VkBuffer dstBuffer, uint32_t regionCount, const BufferImageCopyRegionType *pRegions,
1046 CopyCommandVersion version) const;
locke-lunarga19c71d2020-03-02 18:17:04 -07001047 bool PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001048 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) const override;
Jeff Leger178b1e52020-10-05 12:22:23 -04001049 bool PreCallValidateCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001050 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) const override;
locke-lunarga19c71d2020-03-02 18:17:04 -07001051
Jeff Leger178b1e52020-10-05 12:22:23 -04001052 template <typename BufferImageCopyRegionType>
1053 void RecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
1054 VkBuffer dstBuffer, uint32_t regionCount, const BufferImageCopyRegionType *pRegions,
1055 CopyCommandVersion version);
locke-lunarga19c71d2020-03-02 18:17:04 -07001056 void PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001057 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) override;
Jeff Leger178b1e52020-10-05 12:22:23 -04001058 void PreCallRecordCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001059 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) override;
Jeff Leger178b1e52020-10-05 12:22:23 -04001060
1061 template <typename RegionType>
1062 bool ValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
1063 VkImageLayout dstImageLayout, uint32_t regionCount, const RegionType *pRegions, VkFilter filter,
1064 const char *apiName) const;
locke-lunarga19c71d2020-03-02 18:17:04 -07001065
1066 bool PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
1067 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001068 const VkImageBlit *pRegions, VkFilter filter) const override;
1069 bool PreCallValidateCmdBlitImage2KHR(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR *pBlitImageInfo) const override;
locke-lunarga19c71d2020-03-02 18:17:04 -07001070
Jeff Leger178b1e52020-10-05 12:22:23 -04001071 template <typename RegionType>
1072 void RecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
1073 VkImageLayout dstImageLayout, uint32_t regionCount, const RegionType *pRegions, VkFilter filter,
1074 ResourceUsageTag tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07001075 void PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
1076 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001077 VkFilter filter) override;
1078 void PreCallRecordCmdBlitImage2KHR(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR *pBlitImageInfo) override;
locke-lunarg36ba2592020-04-03 09:42:04 -06001079
John Zulauffaea0ee2021-01-14 14:01:32 -07001080 bool ValidateIndirectBuffer(const CommandBufferAccessContext &cb_context, const AccessContext &context,
1081 VkCommandBuffer commandBuffer, const VkDeviceSize struct_size, const VkBuffer buffer,
1082 const VkDeviceSize offset, const uint32_t drawCount, const uint32_t stride,
locke-lunarg61870c22020-06-09 14:51:50 -06001083 const char *function) const;
1084 void RecordIndirectBuffer(AccessContext &context, const ResourceUsageTag &tag, const VkDeviceSize struct_size,
1085 const VkBuffer buffer, const VkDeviceSize offset, const uint32_t drawCount, uint32_t stride);
locke-lunarg36ba2592020-04-03 09:42:04 -06001086
John Zulauffaea0ee2021-01-14 14:01:32 -07001087 bool ValidateCountBuffer(const CommandBufferAccessContext &cb_context, const AccessContext &context,
1088 VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, const char *function) const;
locke-lunarg61870c22020-06-09 14:51:50 -06001089 void RecordCountBuffer(AccessContext &context, const ResourceUsageTag &tag, VkBuffer buffer, VkDeviceSize offset);
locke-lunarg93d68af2020-05-12 17:18:03 -06001090
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001091 bool PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) const override;
1092 void PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001093
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001094 bool PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const override;
1095 void PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001096
1097 bool PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001098 uint32_t firstInstance) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001099 void PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001100 uint32_t firstInstance) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001101
1102 bool PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001103 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001104 void PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001105 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001106
1107 bool PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001108 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001109 void PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001110 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001111
1112 bool PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001113 uint32_t drawCount, uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001114 void PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001115 uint32_t drawCount, uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001116
locke-lunargff255f92020-05-13 18:53:52 -06001117 bool ValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer,
1118 VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride,
1119 const char *function) const;
locke-lunarge1a67022020-04-29 00:15:36 -06001120 bool PreCallValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1121 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001122 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001123 void PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1124 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001125 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001126 bool PreCallValidateCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1127 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001128 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001129 void PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1130 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001131 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001132 bool PreCallValidateCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1133 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001134 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001135 void PreCallRecordCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1136 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001137 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001138
locke-lunargff255f92020-05-13 18:53:52 -06001139 bool ValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1140 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
1141 uint32_t stride, const char *function) const;
locke-lunarge1a67022020-04-29 00:15:36 -06001142 bool PreCallValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1143 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001144 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001145 void PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1146 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001147 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001148 bool PreCallValidateCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1149 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001150 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001151 void PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1152 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001153 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001154 bool PreCallValidateCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1155 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001156 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001157 void PreCallRecordCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1158 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001159 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001160
1161 bool PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
1162 const VkClearColorValue *pColor, uint32_t rangeCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001163 const VkImageSubresourceRange *pRanges) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001164 void PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
1165 const VkClearColorValue *pColor, uint32_t rangeCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001166 const VkImageSubresourceRange *pRanges) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001167
1168 bool PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
1169 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001170 const VkImageSubresourceRange *pRanges) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001171 void PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
1172 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001173 const VkImageSubresourceRange *pRanges) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001174
1175 bool PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
1176 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001177 VkDeviceSize stride, VkQueryResultFlags flags) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001178 void PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
1179 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001180 VkQueryResultFlags flags) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001181
1182 bool PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001183 uint32_t data) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001184 void PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001185 uint32_t data) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001186
1187 bool PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
1188 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001189 const VkImageResolve *pRegions) const override;
Jeff Leger178b1e52020-10-05 12:22:23 -04001190
locke-lunarge1a67022020-04-29 00:15:36 -06001191 void PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
1192 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001193 const VkImageResolve *pRegions) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001194
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001195 bool PreCallValidateCmdResolveImage2KHR(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR *pResolveImageInfo) const override;
1196 void PreCallRecordCmdResolveImage2KHR(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR *pResolveImageInfo) override;
Jeff Leger178b1e52020-10-05 12:22:23 -04001197
locke-lunarge1a67022020-04-29 00:15:36 -06001198 bool PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001199 VkDeviceSize dataSize, const void *pData) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001200 void PreCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001201 VkDeviceSize dataSize, const void *pData) override;
locke-lunargff255f92020-05-13 18:53:52 -06001202
1203 bool PreCallValidateCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001204 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) const override;
locke-lunargff255f92020-05-13 18:53:52 -06001205 void PreCallRecordCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001206 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) override;
John Zulauf49beb112020-11-04 16:06:31 -07001207
1208 bool PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const override;
1209 void PostCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) override;
1210
1211 bool PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const override;
1212 void PostCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) override;
1213
1214 bool PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
1215 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
1216 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
1217 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1218 uint32_t imageMemoryBarrierCount,
1219 const VkImageMemoryBarrier *pImageMemoryBarriers) const override;
1220 void PostCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
1221 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
1222 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
1223 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1224 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) override;
John Zulauf9cb530d2019-09-30 14:14:10 -06001225};