blob: a009c12fbb587df2e360d9cca02712145afd034a [file] [log] [blame]
John Zulauf3d84f1b2020-03-09 13:33:25 -06001/*
Tony-LunarG4c253372022-01-18 13:51:07 -07002 * Copyright (c) 2019-2022 Valve Corporation
3 * Copyright (c) 2019-2022 LunarG, Inc.
John Zulauf9cb530d2019-09-30 14:14:10 -06004 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 * Author: John Zulauf <jzulauf@lunarg.com>
John Zulaufab7756b2020-12-29 16:10:16 -070018 * Author: Locke Lin <locke@lunarg.com>
19 * Author: Jeremy Gebben <jeremyg@lunarg.com>
John Zulauf9cb530d2019-09-30 14:14:10 -060020 */
21
22#pragma once
23
John Zulauf7635de32020-05-29 17:14:15 -060024#include <limits>
John Zulauf9cb530d2019-09-30 14:14:10 -060025#include <memory>
John Zulauf9cb530d2019-09-30 14:14:10 -060026#include <vulkan/vulkan.h>
27
28#include "synchronization_validation_types.h"
29#include "state_tracker.h"
Jeremy Gebben159b3cc2021-06-03 09:09:03 -060030#include "cmd_buffer_state.h"
31#include "render_pass_state.h"
John Zulauf9cb530d2019-09-30 14:14:10 -060032
John Zulaufd5115702021-01-18 12:34:33 -070033class AccessContext;
John Zulauffaea0ee2021-01-14 14:01:32 -070034class CommandBufferAccessContext;
John Zulauf64ffe552021-02-06 10:25:07 -070035class CommandExecutionContext;
John Zulaufecf4ac52022-06-06 10:08:42 -060036class QueueBatchContext;
John Zulaufa8700a52022-08-18 16:22:08 -060037struct QueueSubmitCmdState;
John Zulaufdab327f2022-07-08 12:02:05 -060038class RenderPassAccessContext;
John Zulaufd5115702021-01-18 12:34:33 -070039class ResourceAccessState;
John Zulauf4fa68462021-04-26 21:04:22 -060040struct ResourceFirstAccess;
John Zulaufe0757ba2022-06-10 16:51:45 -060041class SyncEventsContext;
42struct SyncEventState;
John Zulaufd5115702021-01-18 12:34:33 -070043class SyncValidator;
John Zulauf355e49b2020-04-24 15:11:15 -060044
John Zulaufd0ec59f2021-03-13 14:25:08 -070045using ImageRangeEncoder = subresource_adapter::ImageRangeEncoder;
46using ImageRangeGen = subresource_adapter::ImageRangeGenerator;
47
John Zulaufecf4ac52022-06-06 10:08:42 -060048using QueueId = uint32_t;
49
John Zulauf2f952d22020-02-10 11:34:51 -070050enum SyncHazard {
51 NONE = 0,
52 READ_AFTER_WRITE,
53 WRITE_AFTER_READ,
54 WRITE_AFTER_WRITE,
55 READ_RACING_WRITE,
56 WRITE_RACING_WRITE,
57 WRITE_RACING_READ,
58};
John Zulauf9cb530d2019-09-30 14:14:10 -060059
John Zulauf8e3c3e92021-01-06 11:19:36 -070060enum class SyncOrdering : uint8_t {
61 kNonAttachment = 0,
62 kColorAttachment = 1,
63 kDepthStencilAttachment = 2,
64 kRaster = 3,
65 kNumOrderings = 4,
66};
67
John Zulauf9cb530d2019-09-30 14:14:10 -060068// Useful Utilites for manipulating StageAccess parameters, suitable as base class to save typing
69struct SyncStageAccess {
Jeremy Gebbend0de1f82020-11-09 08:21:07 -070070 static inline SyncStageAccessFlags FlagBit(SyncStageAccessIndex stage_access) {
John Zulauf9cb530d2019-09-30 14:14:10 -060071 return syncStageAccessInfoByStageAccessIndex[stage_access].stage_access_bit;
72 }
John Zulauf1507ee42020-05-18 11:33:09 -060073 static inline SyncStageAccessFlags Flags(SyncStageAccessIndex stage_access) {
74 return static_cast<SyncStageAccessFlags>(FlagBit(stage_access));
75 }
John Zulauf9cb530d2019-09-30 14:14:10 -060076
Jeremy Gebbend0de1f82020-11-09 08:21:07 -070077 static bool IsRead(const SyncStageAccessFlags &stage_access_bit) { return (stage_access_bit & syncStageAccessReadMask).any(); }
John Zulauf9cb530d2019-09-30 14:14:10 -060078 static bool IsRead(SyncStageAccessIndex stage_access_index) { return IsRead(FlagBit(stage_access_index)); }
79
Jeremy Gebbend0de1f82020-11-09 08:21:07 -070080 static bool IsWrite(const SyncStageAccessFlags &stage_access_bit) {
81 return (stage_access_bit & syncStageAccessWriteMask).any();
82 }
83 static bool HasWrite(const SyncStageAccessFlags &stage_access_mask) {
84 return (stage_access_mask & syncStageAccessWriteMask).any();
85 }
John Zulauf9cb530d2019-09-30 14:14:10 -060086 static bool IsWrite(SyncStageAccessIndex stage_access_index) { return IsWrite(FlagBit(stage_access_index)); }
Jeremy Gebben40a22942020-12-22 14:22:06 -070087 static VkPipelineStageFlags2KHR PipelineStageBit(SyncStageAccessIndex stage_access_index) {
John Zulauf9cb530d2019-09-30 14:14:10 -060088 return syncStageAccessInfoByStageAccessIndex[stage_access_index].stage_mask;
89 }
Jeremy Gebben40a22942020-12-22 14:22:06 -070090 static SyncStageAccessFlags AccessScopeByStage(VkPipelineStageFlags2KHR stages);
91 static SyncStageAccessFlags AccessScopeByAccess(VkAccessFlags2KHR access);
92 static SyncStageAccessFlags AccessScope(VkPipelineStageFlags2KHR stages, VkAccessFlags2KHR access);
93 static SyncStageAccessFlags AccessScope(const SyncStageAccessFlags &stage_scope, VkAccessFlags2KHR accesses) {
John Zulauf9cb530d2019-09-30 14:14:10 -060094 return stage_scope & AccessScopeByAccess(accesses);
95 }
96};
97
John Zulauf14940722021-04-12 15:19:02 -060098struct ResourceUsageRecord {
John Zulauf41a9c7c2021-12-07 15:59:53 -070099 enum class SubcommandType { kNone, kSubpassTransition, kLoadOp, kStoreOp, kResolveOp, kIndex };
100
John Zulauf14940722021-04-12 15:19:02 -0600101 using TagIndex = size_t;
John Zulauffaea0ee2021-01-14 14:01:32 -0700102 using Count = uint32_t;
John Zulauff4aecca2021-01-05 16:21:58 -0700103 constexpr static TagIndex kMaxIndex = std::numeric_limits<TagIndex>::max();
John Zulauf3c2a0b32021-07-14 11:14:52 -0600104 constexpr static Count kMaxCount = std::numeric_limits<Count>::max();
John Zulauffaea0ee2021-01-14 14:01:32 -0700105 CMD_TYPE command = CMD_NONE;
106 Count seq_num = 0U;
John Zulauf41a9c7c2021-12-07 15:59:53 -0700107 SubcommandType sub_command_type = SubcommandType::kNone;
John Zulauffaea0ee2021-01-14 14:01:32 -0700108 Count sub_command = 0U;
John Zulauf3c2a0b32021-07-14 11:14:52 -0600109
110 // This is somewhat repetitive, but it prevents the need for Exec/Submit time touchup, after which usage records can be
111 // from different command buffers and resets.
John Zulauf4fa68462021-04-26 21:04:22 -0600112 const CMD_BUFFER_STATE *cb_state = nullptr; // plain pointer as a shared pointer is held by the context storing this record
John Zulauf3c2a0b32021-07-14 11:14:52 -0600113 Count reset_count;
Jeremy Gebben4bb73502020-12-14 11:17:50 -0700114
John Zulauf14940722021-04-12 15:19:02 -0600115 ResourceUsageRecord() = default;
John Zulauf41a9c7c2021-12-07 15:59:53 -0700116 ResourceUsageRecord(CMD_TYPE command_, Count seq_num_, SubcommandType sub_type_, Count sub_command_,
117 const CMD_BUFFER_STATE *cb_state_, Count reset_count_)
118 : command(command_),
119 seq_num(seq_num_),
120 sub_command_type(sub_type_),
121 sub_command(sub_command_),
122 cb_state(cb_state_),
123 reset_count(reset_count_) {}
John Zulauf5f13a792020-03-10 07:31:21 -0600124};
125
John Zulauf3c2a0b32021-07-14 11:14:52 -0600126// The resource tag index is relative to the command buffer or queue in which it's found
John Zulauf14940722021-04-12 15:19:02 -0600127using ResourceUsageTag = ResourceUsageRecord::TagIndex;
John Zulaufae842002021-04-15 18:20:55 -0600128using ResourceUsageRange = sparse_container::range<ResourceUsageTag>;
John Zulauf14940722021-04-12 15:19:02 -0600129
John Zulauf9cb530d2019-09-30 14:14:10 -0600130struct HazardResult {
John Zulauf59e25072020-07-17 10:55:21 -0600131 std::unique_ptr<const ResourceAccessState> access_state;
John Zulauf4fa68462021-04-26 21:04:22 -0600132 std::unique_ptr<const ResourceFirstAccess> recorded_access;
John Zulauf59e25072020-07-17 10:55:21 -0600133 SyncStageAccessIndex usage_index = std::numeric_limits<SyncStageAccessIndex>::max();
John Zulauf9cb530d2019-09-30 14:14:10 -0600134 SyncHazard hazard = NONE;
John Zulauf37ceaed2020-07-03 16:18:15 -0600135 SyncStageAccessFlags prior_access = 0U; // TODO -- change to a NONE enum in ...Bits
John Zulauf9cb530d2019-09-30 14:14:10 -0600136 ResourceUsageTag tag = ResourceUsageTag();
John Zulauf59e25072020-07-17 10:55:21 -0600137 void Set(const ResourceAccessState *access_state_, SyncStageAccessIndex usage_index_, SyncHazard hazard_,
John Zulauf14940722021-04-12 15:19:02 -0600138 const SyncStageAccessFlags &prior_, ResourceUsageTag tag_);
John Zulauf4fa68462021-04-26 21:04:22 -0600139 void AddRecordedAccess(const ResourceFirstAccess &first_access);
John Zulaufe0757ba2022-06-10 16:51:45 -0600140 bool IsHazard() const { return NONE != hazard; }
John Zulauf9cb530d2019-09-30 14:14:10 -0600141};
142
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700143struct SyncExecScope {
Jeremy Gebben40a22942020-12-22 14:22:06 -0700144 VkPipelineStageFlags2KHR mask_param; // the xxxStageMask parameter passed by the caller
145 VkPipelineStageFlags2KHR
146 expanded_mask; // all stage bits covered by any 'catch all bits' in the parameter (eg. ALL_GRAPHICS_BIT).
147 VkPipelineStageFlags2KHR exec_scope; // all earlier or later stages that would be affected by a barrier using this scope.
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700148 SyncStageAccessFlags valid_accesses; // all valid accesses that can be used with this scope.
149
150 SyncExecScope() : mask_param(0), expanded_mask(0), exec_scope(0), valid_accesses(0) {}
151
John Zulauf06f6f1e2022-04-19 15:28:11 -0600152 static SyncExecScope MakeSrc(VkQueueFlags queue_flags, VkPipelineStageFlags2KHR src_stage_mask,
153 const VkPipelineStageFlags2KHR disabled_feature_mask = 0);
Jeremy Gebben40a22942020-12-22 14:22:06 -0700154 static SyncExecScope MakeDst(VkQueueFlags queue_flags, VkPipelineStageFlags2KHR src_stage_mask);
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700155};
156
John Zulauf3d84f1b2020-03-09 13:33:25 -0600157struct SyncBarrier {
John Zulaufecf4ac52022-06-06 10:08:42 -0600158 struct AllAccess {};
John Zulaufc523bf62021-02-16 08:20:34 -0700159 SyncExecScope src_exec_scope;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600160 SyncStageAccessFlags src_access_scope;
John Zulaufc523bf62021-02-16 08:20:34 -0700161 SyncExecScope dst_exec_scope;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600162 SyncStageAccessFlags dst_access_scope;
163 SyncBarrier() = default;
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700164 SyncBarrier(const SyncBarrier &other) = default;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600165 SyncBarrier &operator=(const SyncBarrier &) = default;
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700166
167 SyncBarrier(const SyncExecScope &src, const SyncExecScope &dst);
John Zulaufecf4ac52022-06-06 10:08:42 -0600168 SyncBarrier(const SyncExecScope &src, const SyncExecScope &dst, const AllAccess &);
John Zulauf06f6f1e2022-04-19 15:28:11 -0600169 SyncBarrier(const SyncExecScope &src_exec, const SyncStageAccessFlags &src_access, const SyncExecScope &dst_exec,
170 const SyncStageAccessFlags &dst_access)
171 : src_exec_scope(src_exec), src_access_scope(src_access), dst_exec_scope(dst_exec), dst_access_scope(dst_access) {}
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700172
173 template <typename Barrier>
174 SyncBarrier(const Barrier &barrier, const SyncExecScope &src, const SyncExecScope &dst);
175
176 SyncBarrier(VkQueueFlags queue_flags, const VkSubpassDependency2 &barrier);
Jeremy Gebbendf3fcc32021-02-15 08:53:17 -0700177 // template constructor for sync2 barriers
178 template <typename Barrier>
179 SyncBarrier(VkQueueFlags queue_flags, const Barrier &barrier);
Jeremy Gebben9893daf2021-01-04 10:40:50 -0700180
John Zulaufa0a98292020-09-18 09:30:10 -0600181 void Merge(const SyncBarrier &other) {
John Zulaufc523bf62021-02-16 08:20:34 -0700182 // Note that after merge, only the exec_scope and access_scope fields are fully valid
183 // TODO: Do we need to update any of the other fields? Merging has limited application.
184 src_exec_scope.exec_scope |= other.src_exec_scope.exec_scope;
John Zulaufa0a98292020-09-18 09:30:10 -0600185 src_access_scope |= other.src_access_scope;
John Zulaufc523bf62021-02-16 08:20:34 -0700186 dst_exec_scope.exec_scope |= other.dst_exec_scope.exec_scope;
John Zulaufa0a98292020-09-18 09:30:10 -0600187 dst_access_scope |= other.dst_access_scope;
188 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600189};
John Zulauf69133422020-05-20 14:55:53 -0600190
John Zulauf43cc7462020-12-03 12:33:12 -0700191enum class AccessAddressType : uint32_t { kLinear = 0, kIdealized = 1, kMaxType = 1, kTypeCount = kMaxType + 1 };
192
John Zulaufecf4ac52022-06-06 10:08:42 -0600193struct SemaphoreScope : SyncExecScope {
194 SemaphoreScope(QueueId qid, const SyncExecScope &exec_scope) : SyncExecScope(exec_scope), queue(qid) {}
195 SemaphoreScope() = default;
196 QueueId queue;
197};
198
199class SignaledSemaphores {
200 public:
201 // Is the record of a signaled semaphore, deleted when unsignaled
202 struct Signal {
203 Signal() = delete;
204 Signal(const Signal &other) = default;
205 Signal(Signal &&other) = default;
206 Signal &operator=(const Signal &other) = default;
207 Signal &operator=(Signal &&other) = default;
208 Signal(const std::shared_ptr<const SEMAPHORE_STATE> &sem_state_, const std::shared_ptr<QueueBatchContext> &batch_,
209 const SyncExecScope &exec_scope_);
210
211 std::shared_ptr<const SEMAPHORE_STATE> sem_state;
212 std::shared_ptr<QueueBatchContext> batch;
213 // Use the SyncExecScope::valid_accesses for first access scope
214 SemaphoreScope first_scope;
215 // TODO add timeline semaphore support.
216 };
217 using SignalMap = layer_data::unordered_map<VkSemaphore, std::shared_ptr<Signal>>;
218 using iterator = SignalMap::iterator;
John Zulaufe0757ba2022-06-10 16:51:45 -0600219 using const_iterator = SignalMap::const_iterator;
220 using mapped_type = SignalMap::mapped_type;
John Zulaufecf4ac52022-06-06 10:08:42 -0600221 iterator begin() { return signaled_.begin(); }
John Zulaufe0757ba2022-06-10 16:51:45 -0600222 const_iterator begin() const { return signaled_.begin(); }
John Zulaufecf4ac52022-06-06 10:08:42 -0600223 iterator end() { return signaled_.end(); }
John Zulaufe0757ba2022-06-10 16:51:45 -0600224 const_iterator end() const { return signaled_.end(); }
John Zulaufecf4ac52022-06-06 10:08:42 -0600225
226 bool SignalSemaphore(const std::shared_ptr<const SEMAPHORE_STATE> &sem_state, const std::shared_ptr<QueueBatchContext> &batch,
227 const VkSemaphoreSubmitInfo &signal_info);
228 std::shared_ptr<const Signal> Unsignal(VkSemaphore);
229 void Import(VkSemaphore sem, std::shared_ptr<Signal> &&move_from);
230 void Reset();
231 SignaledSemaphores() : prev_(nullptr) {}
232 SignaledSemaphores(const SignaledSemaphores &prev) : prev_(&prev) {}
233
234 private:
235 std::shared_ptr<const Signal> GetPrev(VkSemaphore sem) const;
236 layer_data::unordered_map<VkSemaphore, std::shared_ptr<Signal>> signaled_;
237 const SignaledSemaphores *prev_; // Allowing this type to act as a writable overlay
238};
239
John Zulauf4fa68462021-04-26 21:04:22 -0600240struct ResourceFirstAccess {
241 ResourceUsageTag tag;
242 SyncStageAccessIndex usage_index;
243 SyncOrdering ordering_rule;
244 ResourceFirstAccess(ResourceUsageTag tag_, SyncStageAccessIndex usage_index_, SyncOrdering ordering_rule_)
245 : tag(tag_), usage_index(usage_index_), ordering_rule(ordering_rule_){};
246 ResourceFirstAccess(const ResourceFirstAccess &other) = default;
247 ResourceFirstAccess(ResourceFirstAccess &&other) = default;
248 ResourceFirstAccess &operator=(const ResourceFirstAccess &rhs) = default;
249 ResourceFirstAccess &operator=(ResourceFirstAccess &&rhs) = default;
250 bool operator==(const ResourceFirstAccess &rhs) const {
251 return (tag == rhs.tag) && (usage_index == rhs.usage_index) && (ordering_rule == rhs.ordering_rule);
252 }
253};
John Zulauf3d84f1b2020-03-09 13:33:25 -0600254
John Zulauf1d5f9c12022-05-13 14:51:08 -0600255using QueueId = uint32_t;
John Zulauf9cb530d2019-09-30 14:14:10 -0600256class ResourceAccessState : public SyncStageAccess {
257 protected:
John Zulauf8e3c3e92021-01-06 11:19:36 -0700258 struct OrderingBarrier {
Jeremy Gebben40a22942020-12-22 14:22:06 -0700259 VkPipelineStageFlags2KHR exec_scope;
John Zulauf8e3c3e92021-01-06 11:19:36 -0700260 SyncStageAccessFlags access_scope;
261 OrderingBarrier() = default;
Nathaniel Cesariob38a6672021-11-15 12:05:48 -0700262 OrderingBarrier(const OrderingBarrier &) = default;
Jeremy Gebben40a22942020-12-22 14:22:06 -0700263 OrderingBarrier(VkPipelineStageFlags2KHR es, SyncStageAccessFlags as) : exec_scope(es), access_scope(as) {}
John Zulauf8e3c3e92021-01-06 11:19:36 -0700264 OrderingBarrier &operator=(const OrderingBarrier &) = default;
John Zulauf4fa68462021-04-26 21:04:22 -0600265 OrderingBarrier &operator|=(const OrderingBarrier &rhs) {
266 exec_scope |= rhs.exec_scope;
267 access_scope |= rhs.access_scope;
268 return *this;
John Zulauffaea0ee2021-01-14 14:01:32 -0700269 }
270 };
John Zulauf4fa68462021-04-26 21:04:22 -0600271 using OrderingBarriers = std::array<OrderingBarrier, static_cast<size_t>(SyncOrdering::kNumOrderings)>;
272 using FirstAccesses = small_vector<ResourceFirstAccess, 3>;
John Zulauffaea0ee2021-01-14 14:01:32 -0700273
John Zulauf9cb530d2019-09-30 14:14:10 -0600274 // Mutliple read operations can be simlutaneously (and independently) synchronized,
275 // given the only the second execution scope creates a dependency chain, we have to track each,
276 // but only up to one per pipeline stage (as another read from the *same* stage become more recent,
277 // and applicable one for hazard detection
278 struct ReadState {
Jeremy Gebben40a22942020-12-22 14:22:06 -0700279 VkPipelineStageFlags2KHR stage; // The stage of this read
John Zulauf37ceaed2020-07-03 16:18:15 -0600280 SyncStageAccessFlags access; // TODO: Change to FlagBits when we have a None bit enum
John Zulauff51fbb62020-10-02 14:43:24 -0600281 // TODO: Revisit whether this needs to support multiple reads per stage
Jeremy Gebben40a22942020-12-22 14:22:06 -0700282 VkPipelineStageFlags2KHR barriers; // all applicable barriered stages
John Zulauf1d5f9c12022-05-13 14:51:08 -0600283 VkPipelineStageFlags2KHR sync_stages; // reads known to have happened after this
John Zulauf9cb530d2019-09-30 14:14:10 -0600284 ResourceUsageTag tag;
John Zulauf1d5f9c12022-05-13 14:51:08 -0600285 QueueId queue;
Jeremy Gebben40a22942020-12-22 14:22:06 -0700286 VkPipelineStageFlags2KHR pending_dep_chain; // Should be zero except during barrier application
287 // Excluded from comparison
John Zulauf89311b42020-09-29 16:28:47 -0600288 ReadState() = default;
Jeremy Gebben40a22942020-12-22 14:22:06 -0700289 ReadState(VkPipelineStageFlags2KHR stage_, SyncStageAccessFlags access_, VkPipelineStageFlags2KHR barriers_,
John Zulauf1d5f9c12022-05-13 14:51:08 -0600290 ResourceUsageTag tag_);
John Zulaufe5da6e52020-03-18 15:32:18 -0600291 bool operator==(const ReadState &rhs) const {
John Zulauf37ceaed2020-07-03 16:18:15 -0600292 bool same = (stage == rhs.stage) && (access == rhs.access) && (barriers == rhs.barriers) && (tag == rhs.tag);
John Zulaufe5da6e52020-03-18 15:32:18 -0600293 return same;
294 }
Jeremy Gebben40a22942020-12-22 14:22:06 -0700295 bool IsReadBarrierHazard(VkPipelineStageFlags2KHR src_exec_scope) const {
John Zulauf4a6105a2020-11-17 15:11:05 -0700296 // If the read stage is not in the src sync scope
297 // *AND* not execution chained with an existing sync barrier (that's the or)
298 // then the barrier access is unsafe (R/W after R)
299 return (src_exec_scope & (stage | barriers)) == 0;
300 }
John Zulaufe0757ba2022-06-10 16:51:45 -0600301 bool IsReadBarrierHazard(QueueId barrier_queue, VkPipelineStageFlags2KHR src_exec_scope) const {
302 // If the read stage is not in the src sync scope
303 // *AND* not execution chained with an existing sync barrier (that's the or)
304 // then the barrier access is unsafe (R/W after R)
305 VkPipelineStageFlags2 queue_ordered_stage = (queue == barrier_queue) ? stage : VK_PIPELINE_STAGE_2_NONE;
306 return (src_exec_scope & (queue_ordered_stage | barriers)) == 0;
307 }
John Zulauf4a6105a2020-11-17 15:11:05 -0700308
John Zulaufe5da6e52020-03-18 15:32:18 -0600309 bool operator!=(const ReadState &rhs) const { return !(*this == rhs); }
John Zulaufee984022022-04-13 16:39:50 -0600310 void Set(VkPipelineStageFlags2KHR stage_, const SyncStageAccessFlags &access_, VkPipelineStageFlags2KHR barriers_,
311 ResourceUsageTag tag_);
John Zulaufb7578302022-05-19 13:50:18 -0600312 bool ReadInScopeOrChain(VkPipelineStageFlags2 exec_scope) const { return (exec_scope & (stage | barriers)) != 0; }
John Zulauf00119522022-05-23 19:07:42 -0600313 bool ReadInQueueScopeOrChain(QueueId queue, VkPipelineStageFlags2 exec_scope) const;
John Zulaufe0757ba2022-06-10 16:51:45 -0600314 bool ReadInEventScope(VkPipelineStageFlags2 exec_scope, QueueId scope_queue, ResourceUsageTag scope_tag) const {
John Zulaufb7578302022-05-19 13:50:18 -0600315 // If this read is the same one we included in the set event and in scope, then apply the execution barrier...
316 // NOTE: That's not really correct... this read stage might *not* have been included in the setevent, and the barriers
317 // representing the chain might have changed since then (that would be an odd usage), so as a first approximation
318 // we'll assume the barriers *haven't* been changed since (if the tag hasn't), and while this could be a false
319 // positive in the case of Set; SomeBarrier; Wait; we'll live with it until we can add more state to the first scope
320 // capture (the specific write and read stages that *were* in scope at the moment of SetEvents.
John Zulaufe0757ba2022-06-10 16:51:45 -0600321 return (tag < scope_tag) && ReadInQueueScopeOrChain(scope_queue, exec_scope);
John Zulaufb7578302022-05-19 13:50:18 -0600322 }
John Zulauf9cb530d2019-09-30 14:14:10 -0600323 };
324
325 public:
326 HazardResult DetectHazard(SyncStageAccessIndex usage_index) const;
John Zulaufec943ec2022-06-29 07:52:56 -0600327 HazardResult DetectHazard(SyncStageAccessIndex usage_index, SyncOrdering ordering_rule, QueueId queue_id) const;
328 HazardResult DetectHazard(SyncStageAccessIndex usage_index, const OrderingBarrier &ordering, QueueId queue_id) const;
329 HazardResult DetectHazard(const ResourceAccessState &recorded_use, QueueId queue_id, const ResourceUsageRange &tag_range) const;
John Zulaufae842002021-04-15 18:20:55 -0600330
331 HazardResult DetectAsyncHazard(SyncStageAccessIndex usage_index, ResourceUsageTag start_tag) const;
332 HazardResult DetectAsyncHazard(const ResourceAccessState &recorded_use, const ResourceUsageRange &tag_range,
333 ResourceUsageTag start_tag) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600334
John Zulaufec943ec2022-06-29 07:52:56 -0600335 HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, QueueId queue_id, VkPipelineStageFlags2KHR source_exec_scope,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700336 const SyncStageAccessFlags &source_access_scope) const;
John Zulaufe0757ba2022-06-10 16:51:45 -0600337 HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, const ResourceAccessState &scope_state,
338 VkPipelineStageFlags2KHR source_exec_scope, const SyncStageAccessFlags &source_access_scope,
339 QueueId event_queue, ResourceUsageTag event_tag) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600340
John Zulauf14940722021-04-12 15:19:02 -0600341 void Update(SyncStageAccessIndex usage_index, SyncOrdering ordering_rule, ResourceUsageTag tag);
342 void SetWrite(const SyncStageAccessFlags &usage_bit, ResourceUsageTag tag);
John Zulauf1d5f9c12022-05-13 14:51:08 -0600343 void ClearWrite();
344 void ClearRead();
John Zulauf5f13a792020-03-10 07:31:21 -0600345 void Resolve(const ResourceAccessState &other);
John Zulaufb02c1eb2020-10-06 16:33:36 -0600346 void ApplyBarriers(const std::vector<SyncBarrier> &barriers, bool layout_transition);
John Zulaufbb890452021-12-14 11:30:18 -0700347 void ApplyBarriersImmediate(const std::vector<SyncBarrier> &barriers);
John Zulaufb7578302022-05-19 13:50:18 -0600348 template <typename ScopeOps>
349 void ApplyBarrier(ScopeOps &&scope, const SyncBarrier &barrier, bool layout_transition);
John Zulauf14940722021-04-12 15:19:02 -0600350 void ApplyPendingBarriers(ResourceUsageTag tag);
John Zulaufecf4ac52022-06-06 10:08:42 -0600351 void ApplySemaphore(const SemaphoreScope &signal, const SemaphoreScope wait);
John Zulauf1d5f9c12022-05-13 14:51:08 -0600352
353 struct QueueTagPredicate {
354 QueueId queue;
355 ResourceUsageTag tag;
John Zulauf3da08bb2022-08-01 17:56:56 -0600356 bool operator()(QueueId usage_queue, ResourceUsageTag usage_tag) const;
John Zulauf1d5f9c12022-05-13 14:51:08 -0600357 };
358
359 struct QueuePredicate {
360 QueueId queue;
361 QueuePredicate(QueueId queue_) : queue(queue_) {}
John Zulauf3da08bb2022-08-01 17:56:56 -0600362 bool operator()(QueueId usage_queue, ResourceUsageTag usage_tag) const;
John Zulauf1d5f9c12022-05-13 14:51:08 -0600363 };
364 struct TagPredicate {
365 ResourceUsageTag tag;
John Zulauf3da08bb2022-08-01 17:56:56 -0600366 bool operator()(QueueId usage_queue, ResourceUsageTag usage_tag) const;
John Zulauf1d5f9c12022-05-13 14:51:08 -0600367 };
368
369 template <typename Pred>
370 bool ApplyQueueTagWait(Pred &&);
John Zulaufae842002021-04-15 18:20:55 -0600371 bool FirstAccessInTagRange(const ResourceUsageRange &tag_range) const;
John Zulauf9cb530d2019-09-30 14:14:10 -0600372
John Zulauf1d5f9c12022-05-13 14:51:08 -0600373 void OffsetTag(ResourceUsageTag offset);
374 ResourceAccessState();
John Zulauf9cb530d2019-09-30 14:14:10 -0600375
John Zulaufb02c1eb2020-10-06 16:33:36 -0600376 bool HasPendingState() const {
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700377 return (0 != pending_layout_transition) || pending_write_barriers.any() || (0 != pending_write_dep_chain);
John Zulaufb02c1eb2020-10-06 16:33:36 -0600378 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600379 bool HasWriteOp() const { return last_write != 0; }
John Zulaufe5da6e52020-03-18 15:32:18 -0600380 bool operator==(const ResourceAccessState &rhs) const {
381 bool same = (write_barriers == rhs.write_barriers) && (write_dependency_chain == rhs.write_dependency_chain) &&
John Zulaufab7756b2020-12-29 16:10:16 -0700382 (last_reads == rhs.last_reads) && (last_read_stages == rhs.last_read_stages) && (write_tag == rhs.write_tag) &&
383 (input_attachment_read == rhs.input_attachment_read) &&
John Zulauffaea0ee2021-01-14 14:01:32 -0700384 (read_execution_barriers == rhs.read_execution_barriers) && (first_accesses_ == rhs.first_accesses_);
John Zulaufe5da6e52020-03-18 15:32:18 -0600385 return same;
386 }
387 bool operator!=(const ResourceAccessState &rhs) const { return !(*this == rhs); }
Jeremy Gebben40a22942020-12-22 14:22:06 -0700388 VkPipelineStageFlags2KHR GetReadBarriers(const SyncStageAccessFlags &usage) const;
John Zulauf59e25072020-07-17 10:55:21 -0600389 SyncStageAccessFlags GetWriteBarriers() const { return write_barriers; }
Jeremy Gebben40a22942020-12-22 14:22:06 -0700390 bool InSourceScopeOrChain(VkPipelineStageFlags2KHR src_exec_scope, SyncStageAccessFlags src_access_scope) const {
John Zulauf4a6105a2020-11-17 15:11:05 -0700391 return ReadInSourceScopeOrChain(src_exec_scope) || WriteInSourceScopeOrChain(src_exec_scope, src_access_scope);
392 }
John Zulauf1d5f9c12022-05-13 14:51:08 -0600393 void SetQueueId(QueueId id);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600394
John Zulauf00119522022-05-23 19:07:42 -0600395 bool WriteInChain(VkPipelineStageFlags2KHR src_exec_scope) const;
396 bool WriteInScope(const SyncStageAccessFlags &src_access_scope) const;
John Zulaufec943ec2022-06-29 07:52:56 -0600397 bool WriteBarrierInScope(const SyncStageAccessFlags &src_access_scope) const;
398 bool WriteInChainedScope(VkPipelineStageFlags2KHR src_exec_scope, const SyncStageAccessFlags &src_access_scope) const;
John Zulaufb7578302022-05-19 13:50:18 -0600399 bool WriteInSourceScopeOrChain(VkPipelineStageFlags2KHR src_exec_scope, SyncStageAccessFlags src_access_scope) const;
John Zulauf00119522022-05-23 19:07:42 -0600400 bool WriteInQueueSourceScopeOrChain(QueueId queue, VkPipelineStageFlags2KHR src_exec_scope,
401 SyncStageAccessFlags src_access_scope) const;
John Zulaufe0757ba2022-06-10 16:51:45 -0600402 bool WriteInEventScope(VkPipelineStageFlags2KHR src_exec_scope, const SyncStageAccessFlags &src_access_scope,
403 QueueId scope_queue, ResourceUsageTag scope_tag) const;
John Zulaufb7578302022-05-19 13:50:18 -0600404
405 struct UntaggedScopeOps {
406 bool WriteInScope(const SyncBarrier &barrier, const ResourceAccessState &access) const {
407 return access.WriteInSourceScopeOrChain(barrier.src_exec_scope.exec_scope, barrier.src_access_scope);
408 }
409 bool ReadInScope(const SyncBarrier &barrier, const ReadState &read_state) const {
410 return read_state.ReadInScopeOrChain(barrier.src_exec_scope.exec_scope);
411 }
412 };
John Zulauf00119522022-05-23 19:07:42 -0600413
414 struct QueueScopeOps {
415 bool WriteInScope(const SyncBarrier &barrier, const ResourceAccessState &access) const {
416 return access.WriteInQueueSourceScopeOrChain(queue, barrier.src_exec_scope.exec_scope, barrier.src_access_scope);
417 }
418 bool ReadInScope(const SyncBarrier &barrier, const ReadState &read_state) const {
419 return read_state.ReadInQueueScopeOrChain(queue, barrier.src_exec_scope.exec_scope);
420 }
421 QueueScopeOps(QueueId scope_queue) : queue(scope_queue) {}
422 QueueId queue;
423 };
424
John Zulaufb7578302022-05-19 13:50:18 -0600425 struct EventScopeOps {
426 bool WriteInScope(const SyncBarrier &barrier, const ResourceAccessState &access) const {
John Zulaufe0757ba2022-06-10 16:51:45 -0600427 return access.WriteInEventScope(barrier.src_exec_scope.exec_scope, barrier.src_access_scope, scope_queue, scope_tag);
John Zulaufb7578302022-05-19 13:50:18 -0600428 }
429 bool ReadInScope(const SyncBarrier &barrier, const ReadState &read_state) const {
John Zulaufe0757ba2022-06-10 16:51:45 -0600430 return read_state.ReadInEventScope(barrier.src_exec_scope.exec_scope, scope_queue, scope_tag);
John Zulaufb7578302022-05-19 13:50:18 -0600431 }
John Zulaufe0757ba2022-06-10 16:51:45 -0600432 EventScopeOps(QueueId qid, ResourceUsageTag event_tag) : scope_queue(qid), scope_tag(event_tag) {}
433 QueueId scope_queue;
John Zulaufb7578302022-05-19 13:50:18 -0600434 ResourceUsageTag scope_tag;
435 };
436
John Zulauf9cb530d2019-09-30 14:14:10 -0600437 private:
Jeremy Gebben40a22942020-12-22 14:22:06 -0700438 static constexpr VkPipelineStageFlags2KHR kInvalidAttachmentStage = ~VkPipelineStageFlags2KHR(0);
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700439 bool IsWriteHazard(SyncStageAccessFlags usage) const { return (usage & ~write_barriers).any(); }
Jeremy Gebben40a22942020-12-22 14:22:06 -0700440 bool IsRAWHazard(VkPipelineStageFlags2KHR usage_stage, const SyncStageAccessFlags &usage) const;
John Zulaufec943ec2022-06-29 07:52:56 -0600441
442 // This form is only valid when queue submit order is known...
Jeremy Gebben40a22942020-12-22 14:22:06 -0700443 bool IsWriteBarrierHazard(VkPipelineStageFlags2KHR src_exec_scope, const SyncStageAccessFlags &src_access_scope) const {
John Zulauf6b583642021-10-05 17:25:31 -0600444 // If the previous write is *not* a layout transition
445 // *AND* is *not* in the 1st access scope
John Zulauf4a6105a2020-11-17 15:11:05 -0700446 // *AND* the current barrier is not in the dependency chain
447 // *AND* the there is no prior memory barrier for the previous write in the dependency chain
448 // then the barrier access is unsafe (R/W after W)
John Zulaufec943ec2022-06-29 07:52:56 -0600449 return (last_write != SYNC_IMAGE_LAYOUT_TRANSITION_BIT) && !WriteInScope(src_access_scope) &&
450 !WriteInChainedScope(src_exec_scope, src_access_scope);
451 }
452
453 bool IsWriteBarrierHazard(QueueId queue_id, VkPipelineStageFlags2KHR src_exec_scope,
454 const SyncStageAccessFlags &src_access_scope) const {
455 if (queue_id == write_queue) {
456 return IsWriteBarrierHazard(src_exec_scope, src_access_scope);
457 }
John Zulaufa4bc6992022-08-29 14:37:20 -0600458 // Accesses with queue submit or...
459 // If the last access is a layout transition, then exec_scope is all that is needed, otherwise access scope is needed
460 if (last_write == SYNC_IMAGE_LAYOUT_TRANSITION_BIT) {
461 return !WriteInChain(src_exec_scope);
462 }
John Zulaufec943ec2022-06-29 07:52:56 -0600463 return !WriteInChainedScope(src_exec_scope, src_access_scope);
John Zulauf4a6105a2020-11-17 15:11:05 -0700464 }
Jeremy Gebben40a22942020-12-22 14:22:06 -0700465 bool ReadInSourceScopeOrChain(VkPipelineStageFlags2KHR src_exec_scope) const {
John Zulauf4a6105a2020-11-17 15:11:05 -0700466 return (0 != (src_exec_scope & (last_read_stages | read_execution_barriers)));
467 }
John Zulaufd14743a2020-07-03 09:42:39 -0600468
Jeremy Gebben40a22942020-12-22 14:22:06 -0700469 static bool IsReadHazard(VkPipelineStageFlags2KHR stage_mask, const VkPipelineStageFlags2KHR barriers) {
John Zulaufd14743a2020-07-03 09:42:39 -0600470 return stage_mask != (stage_mask & barriers);
471 }
472
Jeremy Gebben40a22942020-12-22 14:22:06 -0700473 bool IsReadHazard(VkPipelineStageFlags2KHR stage_mask, const ReadState &read_access) const {
John Zulaufd14743a2020-07-03 09:42:39 -0600474 return IsReadHazard(stage_mask, read_access.barriers);
John Zulauf0cb5be22020-01-23 12:18:22 -0700475 }
John Zulaufec943ec2022-06-29 07:52:56 -0600476 VkPipelineStageFlags2 GetOrderedStages(QueueId queue_id, const OrderingBarrier &ordering) const;
John Zulauf8e3c3e92021-01-06 11:19:36 -0700477
John Zulauf14940722021-04-12 15:19:02 -0600478 void UpdateFirst(ResourceUsageTag tag, SyncStageAccessIndex usage_index, SyncOrdering ordering_rule);
John Zulauf4fa68462021-04-26 21:04:22 -0600479 void TouchupFirstForLayoutTransition(ResourceUsageTag tag, const OrderingBarrier &layout_ordering);
John Zulauffaea0ee2021-01-14 14:01:32 -0700480
John Zulauf8e3c3e92021-01-06 11:19:36 -0700481 static const OrderingBarrier &GetOrderingRules(SyncOrdering ordering_enum) {
482 return kOrderingRules[static_cast<size_t>(ordering_enum)];
483 }
John Zulaufd14743a2020-07-03 09:42:39 -0600484
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700485 // TODO: Add a NONE (zero) enum to SyncStageAccessFlags for input_attachment_read and last_write
John Zulaufd14743a2020-07-03 09:42:39 -0600486
John Zulauf9cb530d2019-09-30 14:14:10 -0600487 // With reads, each must be "safe" relative to it's prior write, so we need only
488 // save the most recent write operation (as anything *transitively* unsafe would arleady
489 // be included
490 SyncStageAccessFlags write_barriers; // union of applicable barrier masks since last write
Jeremy Gebben40a22942020-12-22 14:22:06 -0700491 VkPipelineStageFlags2KHR write_dependency_chain; // intiially zero, but accumulating the dstStages of barriers if they chain.
John Zulauf9cb530d2019-09-30 14:14:10 -0600492 ResourceUsageTag write_tag;
John Zulauf1d5f9c12022-05-13 14:51:08 -0600493 QueueId write_queue;
John Zulauf355e49b2020-04-24 15:11:15 -0600494 SyncStageAccessFlags last_write; // only the most recent write
John Zulauf9cb530d2019-09-30 14:14:10 -0600495
John Zulauff51fbb62020-10-02 14:43:24 -0600496 // TODO Input Attachment cleanup for multiple reads in a given stage
497 // Tracks whether the fragment shader read is input attachment read
498 bool input_attachment_read;
John Zulaufd14743a2020-07-03 09:42:39 -0600499
Jeremy Gebben40a22942020-12-22 14:22:06 -0700500 VkPipelineStageFlags2KHR last_read_stages;
501 VkPipelineStageFlags2KHR read_execution_barriers;
John Zulauf1d5f9c12022-05-13 14:51:08 -0600502 using ReadStates = small_vector<ReadState, 3, uint32_t>;
503 ReadStates last_reads;
John Zulauf89311b42020-09-29 16:28:47 -0600504
505 // Pending execution state to support independent parallel barriers
Jeremy Gebben40a22942020-12-22 14:22:06 -0700506 VkPipelineStageFlags2KHR pending_write_dep_chain;
John Zulauf89311b42020-09-29 16:28:47 -0600507 bool pending_layout_transition;
508 SyncStageAccessFlags pending_write_barriers;
John Zulauf4fa68462021-04-26 21:04:22 -0600509 OrderingBarrier pending_layout_ordering_;
John Zulauffaea0ee2021-01-14 14:01:32 -0700510 FirstAccesses first_accesses_;
Jeremy Gebben40a22942020-12-22 14:22:06 -0700511 VkPipelineStageFlags2KHR first_read_stages_;
John Zulauf4fa68462021-04-26 21:04:22 -0600512 OrderingBarrier first_write_layout_ordering_;
John Zulauf8e3c3e92021-01-06 11:19:36 -0700513
514 static OrderingBarriers kOrderingRules;
John Zulauf9cb530d2019-09-30 14:14:10 -0600515};
John Zulauf22aefed2021-03-11 18:14:35 -0700516using ResourceAccessStateFunction = std::function<void(ResourceAccessState *)>;
517using ResourceAccessStateConstFunction = std::function<void(const ResourceAccessState &)>;
John Zulauf9cb530d2019-09-30 14:14:10 -0600518
John Zulaufe0757ba2022-06-10 16:51:45 -0600519using ResourceAddress = VkDeviceSize;
520using ResourceAccessRangeMap = sparse_container::range_map<ResourceAddress, ResourceAccessState>;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700521using ResourceAccessRange = typename ResourceAccessRangeMap::key_type;
John Zulauf22aefed2021-03-11 18:14:35 -0700522using ResourceAccessRangeIndex = typename ResourceAccessRange::index_type;
John Zulauf355e49b2020-04-24 15:11:15 -0600523using ResourceRangeMergeIterator = sparse_container::parallel_iterator<ResourceAccessRangeMap, const ResourceAccessRangeMap>;
John Zulauf9cb530d2019-09-30 14:14:10 -0600524
John Zulauf3da08bb2022-08-01 17:56:56 -0600525struct FenceSyncState {
526 std::shared_ptr<const FENCE_STATE> fence;
527 ResourceUsageTag tag;
528 QueueId queue_id;
529 FenceSyncState();
530 FenceSyncState(const FenceSyncState &other) = default;
531 FenceSyncState(FenceSyncState &&other) = default;
532 FenceSyncState &operator=(const FenceSyncState &other) = default;
533 FenceSyncState &operator=(FenceSyncState &&other) = default;
534
535 FenceSyncState(const std::shared_ptr<const FENCE_STATE> &fence_, ResourceUsageTag tag_, QueueId queue_id_)
536 : fence(fence_), tag(tag_), queue_id(queue_id_) {}
537};
538
John Zulaufd0ec59f2021-03-13 14:25:08 -0700539class AttachmentViewGen {
540 public:
541 enum Gen { kViewSubresource = 0, kRenderArea = 1, kDepthOnlyRenderArea = 2, kStencilOnlyRenderArea = 3, kGenSize = 4 };
542 AttachmentViewGen(const IMAGE_VIEW_STATE *view_, const VkOffset3D &offset, const VkExtent3D &extent);
543 AttachmentViewGen(const AttachmentViewGen &other) = default;
544 AttachmentViewGen(AttachmentViewGen &&other) = default;
545 AccessAddressType GetAddressType() const;
546 const IMAGE_VIEW_STATE *GetViewState() const { return view_; }
547 const ImageRangeGen *GetRangeGen(Gen type) const;
548 bool IsValid() const { return gen_store_[Gen::kViewSubresource]; }
549 Gen GetDepthStencilRenderAreaGenType(bool depth_op, bool stencil_op) const;
550
551 private:
552 using RangeGenStore = layer_data::optional<ImageRangeGen>;
553 const IMAGE_VIEW_STATE *view_ = nullptr;
554 VkImageAspectFlags view_mask_ = 0U;
555 std::array<RangeGenStore, Gen::kGenSize> gen_store_;
556};
557
558using AttachmentViewGenVector = std::vector<AttachmentViewGen>;
559
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700560using SyncMemoryBarrier = SyncBarrier;
561struct SyncBufferMemoryBarrier {
562 using Buffer = std::shared_ptr<const BUFFER_STATE>;
563 Buffer buffer;
564 SyncBarrier barrier;
565 ResourceAccessRange range;
John Zulaufd5115702021-01-18 12:34:33 -0700566 bool IsLayoutTransition() const { return false; }
567 const ResourceAccessRange &Range() const { return range; };
568 const BUFFER_STATE *GetState() const { return buffer.get(); }
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700569 SyncBufferMemoryBarrier(const Buffer &buffer_, const SyncBarrier &barrier_, const ResourceAccessRange &range_)
570 : buffer(buffer_), barrier(barrier_), range(range_) {}
571 SyncBufferMemoryBarrier() = default;
572};
573
574struct SyncImageMemoryBarrier {
575 using Image = std::shared_ptr<const IMAGE_STATE>;
John Zulauf110413c2021-03-20 05:38:38 -0600576
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700577 Image image;
578 uint32_t index;
579 SyncBarrier barrier;
580 VkImageLayout old_layout;
581 VkImageLayout new_layout;
John Zulauf110413c2021-03-20 05:38:38 -0600582 VkImageSubresourceRange range;
John Zulaufd5115702021-01-18 12:34:33 -0700583
584 bool IsLayoutTransition() const { return old_layout != new_layout; }
John Zulauf110413c2021-03-20 05:38:38 -0600585 const VkImageSubresourceRange &Range() const { return range; };
John Zulaufd5115702021-01-18 12:34:33 -0700586 const IMAGE_STATE *GetState() const { return image.get(); }
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700587 SyncImageMemoryBarrier(const Image &image_, uint32_t index_, const SyncBarrier &barrier_, VkImageLayout old_layout_,
588 VkImageLayout new_layout_, const VkImageSubresourceRange &subresource_range_)
589 : image(image_),
590 index(index_),
591 barrier(barrier_),
592 old_layout(old_layout_),
593 new_layout(new_layout_),
John Zulauf110413c2021-03-20 05:38:38 -0600594 range(subresource_range_) {}
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700595 SyncImageMemoryBarrier() = default;
596};
597
John Zulaufbb890452021-12-14 11:30:18 -0700598template <typename SubpassNode>
599struct SubpassBarrierTrackback {
600 std::vector<SyncBarrier> barriers;
601 const SubpassNode *source_subpass = nullptr;
602 SubpassBarrierTrackback() = default;
603 SubpassBarrierTrackback(const SubpassBarrierTrackback &) = default;
604 SubpassBarrierTrackback(const SubpassNode *source_subpass_, VkQueueFlags queue_flags_,
605 const std::vector<const VkSubpassDependency2 *> &subpass_dependencies_)
606 : barriers(), source_subpass(source_subpass_) {
607 barriers.reserve(subpass_dependencies_.size());
608 for (const VkSubpassDependency2 *dependency : subpass_dependencies_) {
609 assert(dependency);
610 barriers.emplace_back(queue_flags_, *dependency);
611 }
612 }
John Zulauf06f6f1e2022-04-19 15:28:11 -0600613 SubpassBarrierTrackback(const SubpassNode *source_subpass_, const SyncBarrier &barrier_)
614 : barriers(1, barrier_), source_subpass(source_subpass_) {}
John Zulaufbb890452021-12-14 11:30:18 -0700615 SubpassBarrierTrackback &operator=(const SubpassBarrierTrackback &) = default;
616};
617
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700618class SyncOpBase {
619 public:
sjfricke0bea06e2022-06-05 09:22:26 +0900620 SyncOpBase() : cmd_type_(CMD_NONE) {}
621 SyncOpBase(CMD_TYPE cmd_type) : cmd_type_(cmd_type) {}
John Zulauf8eda1562021-04-13 17:06:41 -0600622 virtual ~SyncOpBase() = default;
623
sjfricke0bea06e2022-06-05 09:22:26 +0900624 const char *CmdName() const { return CommandTypeString(cmd_type_); }
John Zulaufbb890452021-12-14 11:30:18 -0700625
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700626 virtual bool Validate(const CommandBufferAccessContext &cb_context) const = 0;
John Zulaufdab327f2022-07-08 12:02:05 -0600627 virtual ResourceUsageTag Record(CommandBufferAccessContext *cb_context) = 0;
John Zulauf8eda1562021-04-13 17:06:41 -0600628 virtual bool ReplayValidate(ResourceUsageTag recorded_tag, const CommandBufferAccessContext &recorded_context,
John Zulauf0223f142022-07-06 09:05:39 -0600629 ResourceUsageTag base_tag, CommandExecutionContext &exec_context) const = 0;
630 virtual void ReplayRecord(CommandExecutionContext &exec_context, ResourceUsageTag tag) const = 0;
John Zulauf36ef9282021-02-02 11:47:24 -0700631
632 protected:
John Zulaufbb890452021-12-14 11:30:18 -0700633 // Only non-null and valid for SyncOps within a render pass instance WIP -- think about how to manage for non RPI calls within
634 // RPI and 2ndarys...
John Zulaufbb890452021-12-14 11:30:18 -0700635 uint32_t subpass_ = VK_SUBPASS_EXTERNAL;
sjfricke0bea06e2022-06-05 09:22:26 +0900636 CMD_TYPE cmd_type_;
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700637};
638
John Zulaufd5115702021-01-18 12:34:33 -0700639class SyncOpBarriers : public SyncOpBase {
640 protected:
641 template <typename Barriers, typename FunctorFactory>
John Zulauf00119522022-05-23 19:07:42 -0600642 static void ApplyBarriers(const Barriers &barriers, const FunctorFactory &factory, QueueId queue_id, ResourceUsageTag tag,
John Zulaufd5115702021-01-18 12:34:33 -0700643 AccessContext *context);
644 template <typename Barriers, typename FunctorFactory>
John Zulauf00119522022-05-23 19:07:42 -0600645 static void ApplyGlobalBarriers(const Barriers &barriers, const FunctorFactory &factory, QueueId queue_id, ResourceUsageTag tag,
John Zulaufd5115702021-01-18 12:34:33 -0700646 AccessContext *access_context);
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700647
sjfricke0bea06e2022-06-05 09:22:26 +0900648 SyncOpBarriers(CMD_TYPE cmd_type, const SyncValidator &sync_state, VkQueueFlags queue_flags, VkPipelineStageFlags srcStageMask,
John Zulaufd5115702021-01-18 12:34:33 -0700649 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount,
650 const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount,
651 const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount,
652 const VkImageMemoryBarrier *pImageMemoryBarriers);
sjfricke0bea06e2022-06-05 09:22:26 +0900653 SyncOpBarriers(CMD_TYPE cmd_type, const SyncValidator &sync_state, VkQueueFlags queue_flags, uint32_t event_count,
John Zulauf4edde622021-02-15 08:54:50 -0700654 const VkDependencyInfoKHR *pDependencyInfo);
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700655
John Zulauf8eda1562021-04-13 17:06:41 -0600656 ~SyncOpBarriers() override = default;
657
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700658 protected:
John Zulauf4edde622021-02-15 08:54:50 -0700659 struct BarrierSet {
660 VkDependencyFlags dependency_flags;
661 SyncExecScope src_exec_scope;
662 SyncExecScope dst_exec_scope;
663 std::vector<SyncMemoryBarrier> memory_barriers;
664 std::vector<SyncBufferMemoryBarrier> buffer_memory_barriers;
665 std::vector<SyncImageMemoryBarrier> image_memory_barriers;
666 bool single_exec_scope;
667 void MakeMemoryBarriers(const SyncExecScope &src, const SyncExecScope &dst, VkDependencyFlags dependencyFlags,
668 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers);
669 void MakeBufferMemoryBarriers(const SyncValidator &sync_state, const SyncExecScope &src, const SyncExecScope &dst,
670 VkDependencyFlags dependencyFlags, uint32_t bufferMemoryBarrierCount,
671 const VkBufferMemoryBarrier *pBufferMemoryBarriers);
672 void MakeImageMemoryBarriers(const SyncValidator &sync_state, const SyncExecScope &src, const SyncExecScope &dst,
673 VkDependencyFlags dependencyFlags, uint32_t imageMemoryBarrierCount,
674 const VkImageMemoryBarrier *pImageMemoryBarriers);
675 void MakeMemoryBarriers(VkQueueFlags queue_flags, VkDependencyFlags dependency_flags, uint32_t barrier_count,
Tony-LunarG3f6eceb2021-11-18 14:34:49 -0700676 const VkMemoryBarrier2 *barriers);
John Zulauf4edde622021-02-15 08:54:50 -0700677 void MakeBufferMemoryBarriers(const SyncValidator &sync_state, VkQueueFlags queue_flags, VkDependencyFlags dependency_flags,
Tony-LunarG3f6eceb2021-11-18 14:34:49 -0700678 uint32_t barrier_count, const VkBufferMemoryBarrier2 *barriers);
John Zulauf4edde622021-02-15 08:54:50 -0700679 void MakeImageMemoryBarriers(const SyncValidator &sync_state, VkQueueFlags queue_flags, VkDependencyFlags dependency_flags,
Tony-LunarG3f6eceb2021-11-18 14:34:49 -0700680 uint32_t barrier_count, const VkImageMemoryBarrier2 *barriers);
John Zulauf4edde622021-02-15 08:54:50 -0700681 };
682 std::vector<BarrierSet> barriers_;
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700683};
684
John Zulaufd5115702021-01-18 12:34:33 -0700685class SyncOpPipelineBarrier : public SyncOpBarriers {
686 public:
sjfricke0bea06e2022-06-05 09:22:26 +0900687 SyncOpPipelineBarrier(CMD_TYPE cmd_type, const SyncValidator &sync_state, VkQueueFlags queue_flags,
John Zulauf36ef9282021-02-02 11:47:24 -0700688 VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
689 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount,
John Zulaufd5115702021-01-18 12:34:33 -0700690 const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount,
691 const VkImageMemoryBarrier *pImageMemoryBarriers);
sjfricke0bea06e2022-06-05 09:22:26 +0900692 SyncOpPipelineBarrier(CMD_TYPE cmd_type, const SyncValidator &sync_state, VkQueueFlags queue_flags,
Jeremy Gebbendf3fcc32021-02-15 08:53:17 -0700693 const VkDependencyInfoKHR &pDependencyInfo);
John Zulauf8eda1562021-04-13 17:06:41 -0600694 ~SyncOpPipelineBarrier() override = default;
695
John Zulaufd5115702021-01-18 12:34:33 -0700696 bool Validate(const CommandBufferAccessContext &cb_context) const override;
John Zulaufdab327f2022-07-08 12:02:05 -0600697 ResourceUsageTag Record(CommandBufferAccessContext *cb_context) override;
John Zulauf8eda1562021-04-13 17:06:41 -0600698 bool ReplayValidate(ResourceUsageTag recorded_tag, const CommandBufferAccessContext &recorded_context,
John Zulauf0223f142022-07-06 09:05:39 -0600699 ResourceUsageTag base_tag, CommandExecutionContext &exec_context) const override;
700 void ReplayRecord(CommandExecutionContext &exec_context, ResourceUsageTag tag) const override;
John Zulaufd5115702021-01-18 12:34:33 -0700701};
702
703class SyncOpWaitEvents : public SyncOpBarriers {
704 public:
sjfricke0bea06e2022-06-05 09:22:26 +0900705 SyncOpWaitEvents(CMD_TYPE cmd_type, const SyncValidator &sync_state, VkQueueFlags queue_flags, uint32_t eventCount,
John Zulauf36ef9282021-02-02 11:47:24 -0700706 const VkEvent *pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
707 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount,
John Zulaufd5115702021-01-18 12:34:33 -0700708 const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount,
709 const VkImageMemoryBarrier *pImageMemoryBarriers);
John Zulauf4edde622021-02-15 08:54:50 -0700710
sjfricke0bea06e2022-06-05 09:22:26 +0900711 SyncOpWaitEvents(CMD_TYPE cmd_type, const SyncValidator &sync_state, VkQueueFlags queue_flags, uint32_t eventCount,
John Zulauf4edde622021-02-15 08:54:50 -0700712 const VkEvent *pEvents, const VkDependencyInfoKHR *pDependencyInfo);
John Zulauf8eda1562021-04-13 17:06:41 -0600713 ~SyncOpWaitEvents() override = default;
John Zulauf4edde622021-02-15 08:54:50 -0700714
John Zulaufd5115702021-01-18 12:34:33 -0700715 bool Validate(const CommandBufferAccessContext &cb_context) const override;
John Zulaufdab327f2022-07-08 12:02:05 -0600716 ResourceUsageTag Record(CommandBufferAccessContext *cb_context) override;
John Zulauf8eda1562021-04-13 17:06:41 -0600717 bool ReplayValidate(ResourceUsageTag recorded_tag, const CommandBufferAccessContext &recorded_context,
John Zulauf0223f142022-07-06 09:05:39 -0600718 ResourceUsageTag base_tag, CommandExecutionContext &exec_context) const override;
719 void ReplayRecord(CommandExecutionContext &exec_context, ResourceUsageTag tag) const override;
John Zulaufd5115702021-01-18 12:34:33 -0700720
721 protected:
John Zulauf610e28c2021-08-03 17:46:23 -0600722 static const char *const kIgnored;
John Zulaufbb890452021-12-14 11:30:18 -0700723 bool DoValidate(const CommandExecutionContext &ex_context, const ResourceUsageTag base_tag) const;
John Zulaufd5115702021-01-18 12:34:33 -0700724 // TODO PHASE2 This is the wrong thing to use for "replay".. as the event state will have moved on since the record
725 // TODO PHASE2 May need to capture by value w.r.t. "first use" or build up in calling/enqueue context through replay.
John Zulauf669dfd52021-01-27 17:15:28 -0700726 std::vector<std::shared_ptr<const EVENT_STATE>> events_;
727 void MakeEventsList(const SyncValidator &sync_state, uint32_t event_count, const VkEvent *events);
John Zulaufd5115702021-01-18 12:34:33 -0700728};
729
John Zulauf6ce24372021-01-30 05:56:25 -0700730class SyncOpResetEvent : public SyncOpBase {
731 public:
sjfricke0bea06e2022-06-05 09:22:26 +0900732 SyncOpResetEvent(CMD_TYPE cmd_type, const SyncValidator &sync_state, VkQueueFlags queue_flags, VkEvent event,
John Zulauf4edde622021-02-15 08:54:50 -0700733 VkPipelineStageFlags2KHR stageMask);
John Zulauf8eda1562021-04-13 17:06:41 -0600734 ~SyncOpResetEvent() override = default;
735
John Zulauf6ce24372021-01-30 05:56:25 -0700736 bool Validate(const CommandBufferAccessContext &cb_context) const override;
John Zulaufdab327f2022-07-08 12:02:05 -0600737 ResourceUsageTag Record(CommandBufferAccessContext *cb_context) override;
John Zulauf8eda1562021-04-13 17:06:41 -0600738 bool ReplayValidate(ResourceUsageTag recorded_tag, const CommandBufferAccessContext &recorded_context,
John Zulauf0223f142022-07-06 09:05:39 -0600739 ResourceUsageTag base_tag, CommandExecutionContext &exec_context) const override;
740 void ReplayRecord(CommandExecutionContext &exec_context, ResourceUsageTag tag) const override;
John Zulauf6ce24372021-01-30 05:56:25 -0700741
742 private:
John Zulaufbb890452021-12-14 11:30:18 -0700743 bool DoValidate(const CommandExecutionContext &ex_context, const ResourceUsageTag base_tag) const;
John Zulauf6ce24372021-01-30 05:56:25 -0700744 std::shared_ptr<const EVENT_STATE> event_;
745 SyncExecScope exec_scope_;
746};
747
748class SyncOpSetEvent : public SyncOpBase {
749 public:
sjfricke0bea06e2022-06-05 09:22:26 +0900750 SyncOpSetEvent(CMD_TYPE cmd_type, const SyncValidator &sync_state, VkQueueFlags queue_flags, VkEvent event,
John Zulaufe0757ba2022-06-10 16:51:45 -0600751 VkPipelineStageFlags2KHR stageMask, const AccessContext *access_context);
sjfricke0bea06e2022-06-05 09:22:26 +0900752 SyncOpSetEvent(CMD_TYPE cmd_type, const SyncValidator &sync_state, VkQueueFlags queue_flags, VkEvent event,
John Zulaufe0757ba2022-06-10 16:51:45 -0600753 const VkDependencyInfoKHR &dep_info, const AccessContext *access_context);
John Zulauf8eda1562021-04-13 17:06:41 -0600754 ~SyncOpSetEvent() override = default;
755
John Zulauf6ce24372021-01-30 05:56:25 -0700756 bool Validate(const CommandBufferAccessContext &cb_context) const override;
John Zulaufdab327f2022-07-08 12:02:05 -0600757 ResourceUsageTag Record(CommandBufferAccessContext *cb_context) override;
John Zulauf8eda1562021-04-13 17:06:41 -0600758 bool ReplayValidate(ResourceUsageTag recorded_tag, const CommandBufferAccessContext &recorded_context,
John Zulauf0223f142022-07-06 09:05:39 -0600759 ResourceUsageTag base_tag, CommandExecutionContext &exec_context) const override;
760 void ReplayRecord(CommandExecutionContext &exec_context, ResourceUsageTag tag) const override;
John Zulauf6ce24372021-01-30 05:56:25 -0700761
762 private:
John Zulaufbb890452021-12-14 11:30:18 -0700763 bool DoValidate(const CommandExecutionContext &ex_context, const ResourceUsageTag base_tag) const;
John Zulaufe0757ba2022-06-10 16:51:45 -0600764 void DoRecord(QueueId queue_id, ResourceUsageTag recorded_tag, const std::shared_ptr<const AccessContext> &access_context,
765 SyncEventsContext *events_context) const;
John Zulauf6ce24372021-01-30 05:56:25 -0700766 std::shared_ptr<const EVENT_STATE> event_;
John Zulaufe0757ba2022-06-10 16:51:45 -0600767 // The Access context of the command buffer at record set event time.
768 std::shared_ptr<const AccessContext> recorded_context_;
John Zulauf6ce24372021-01-30 05:56:25 -0700769 SyncExecScope src_exec_scope_;
John Zulauf4edde622021-02-15 08:54:50 -0700770 // Note that the dep info is *not* dehandled, but retained for comparison with a future WaitEvents2
Tony-LunarG273f32f2021-09-28 08:56:30 -0600771 std::shared_ptr<safe_VkDependencyInfo> dep_info_;
John Zulauf6ce24372021-01-30 05:56:25 -0700772};
John Zulauf64ffe552021-02-06 10:25:07 -0700773
774class SyncOpBeginRenderPass : public SyncOpBase {
775 public:
sjfricke0bea06e2022-06-05 09:22:26 +0900776 SyncOpBeginRenderPass(CMD_TYPE cmd_type, const SyncValidator &sync_state, const VkRenderPassBeginInfo *pRenderPassBegin,
sfricke-samsung85584a72021-09-30 21:43:38 -0700777 const VkSubpassBeginInfo *pSubpassBeginInfo);
John Zulauf8eda1562021-04-13 17:06:41 -0600778 ~SyncOpBeginRenderPass() override = default;
779
John Zulauf64ffe552021-02-06 10:25:07 -0700780 bool Validate(const CommandBufferAccessContext &cb_context) const override;
John Zulaufdab327f2022-07-08 12:02:05 -0600781 ResourceUsageTag Record(CommandBufferAccessContext *cb_context) override;
John Zulauf8eda1562021-04-13 17:06:41 -0600782 bool ReplayValidate(ResourceUsageTag recorded_tag, const CommandBufferAccessContext &recorded_context,
John Zulauf0223f142022-07-06 09:05:39 -0600783 ResourceUsageTag base_tag, CommandExecutionContext &exec_context) const override;
784 void ReplayRecord(CommandExecutionContext &exec_context, ResourceUsageTag tag) const override;
John Zulaufdab327f2022-07-08 12:02:05 -0600785 const RenderPassAccessContext *GetRenderPassAccessContext() const { return rp_context_; }
John Zulauf64ffe552021-02-06 10:25:07 -0700786
787 protected:
788 safe_VkRenderPassBeginInfo renderpass_begin_info_;
789 safe_VkSubpassBeginInfo subpass_begin_info_;
790 std::vector<std::shared_ptr<const IMAGE_VIEW_STATE>> shared_attachments_;
791 std::vector<const IMAGE_VIEW_STATE *> attachments_;
792 std::shared_ptr<const RENDER_PASS_STATE> rp_state_;
John Zulaufdab327f2022-07-08 12:02:05 -0600793 const RenderPassAccessContext *rp_context_;
John Zulauf64ffe552021-02-06 10:25:07 -0700794};
795
796class SyncOpNextSubpass : public SyncOpBase {
797 public:
sjfricke0bea06e2022-06-05 09:22:26 +0900798 SyncOpNextSubpass(CMD_TYPE cmd_type, const SyncValidator &sync_state, const VkSubpassBeginInfo *pSubpassBeginInfo,
sfricke-samsung85584a72021-09-30 21:43:38 -0700799 const VkSubpassEndInfo *pSubpassEndInfo);
John Zulauf8eda1562021-04-13 17:06:41 -0600800 ~SyncOpNextSubpass() override = default;
801
John Zulauf64ffe552021-02-06 10:25:07 -0700802 bool Validate(const CommandBufferAccessContext &cb_context) const override;
John Zulaufdab327f2022-07-08 12:02:05 -0600803 ResourceUsageTag Record(CommandBufferAccessContext *cb_context) override;
John Zulauf8eda1562021-04-13 17:06:41 -0600804 bool ReplayValidate(ResourceUsageTag recorded_tag, const CommandBufferAccessContext &recorded_context,
John Zulauf0223f142022-07-06 09:05:39 -0600805 ResourceUsageTag base_tag, CommandExecutionContext &exec_context) const override;
806 void ReplayRecord(CommandExecutionContext &exec_context, ResourceUsageTag tag) const override;
John Zulauf64ffe552021-02-06 10:25:07 -0700807
808 protected:
809 safe_VkSubpassBeginInfo subpass_begin_info_;
810 safe_VkSubpassEndInfo subpass_end_info_;
811};
812
813class SyncOpEndRenderPass : public SyncOpBase {
814 public:
sjfricke0bea06e2022-06-05 09:22:26 +0900815 SyncOpEndRenderPass(CMD_TYPE cmd_type, const SyncValidator &sync_state, const VkSubpassEndInfo *pSubpassEndInfo);
John Zulauf8eda1562021-04-13 17:06:41 -0600816 ~SyncOpEndRenderPass() override = default;
817
John Zulauf64ffe552021-02-06 10:25:07 -0700818 bool Validate(const CommandBufferAccessContext &cb_context) const override;
John Zulaufdab327f2022-07-08 12:02:05 -0600819 ResourceUsageTag Record(CommandBufferAccessContext *cb_context) override;
John Zulauf8eda1562021-04-13 17:06:41 -0600820 bool ReplayValidate(ResourceUsageTag recorded_tag, const CommandBufferAccessContext &recorded_context,
John Zulauf0223f142022-07-06 09:05:39 -0600821 ResourceUsageTag base_tag, CommandExecutionContext &exec_context) const override;
822 void ReplayRecord(CommandExecutionContext &exec_context, ResourceUsageTag tag) const override;
John Zulauf64ffe552021-02-06 10:25:07 -0700823
824 protected:
825 safe_VkSubpassEndInfo subpass_end_info_;
826};
827
John Zulauf540266b2020-04-06 18:54:53 -0600828class AccessContext {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700829 public:
John Zulauf69133422020-05-20 14:55:53 -0600830 enum DetectOptions : uint32_t {
John Zulauf355e49b2020-04-24 15:11:15 -0600831 kDetectPrevious = 1U << 0,
832 kDetectAsync = 1U << 1,
833 kDetectAll = (kDetectPrevious | kDetectAsync)
John Zulauf16adfc92020-04-08 10:28:33 -0600834 };
John Zulauf1d5f9c12022-05-13 14:51:08 -0600835 struct AddressRange {
836 AccessAddressType type;
837 ResourceAccessRange range;
838 AddressRange() = default; // the explicit constructor below isn't needed in 20, but would delete the default.
839 AddressRange(AccessAddressType type_, ResourceAccessRange range_) : type(type_), range(range_) {}
840 };
John Zulauf43cc7462020-12-03 12:33:12 -0700841 using MapArray = std::array<ResourceAccessRangeMap, static_cast<size_t>(AccessAddressType::kTypeCount)>;
John Zulauf16adfc92020-04-08 10:28:33 -0600842
John Zulaufbb890452021-12-14 11:30:18 -0700843 using TrackBack = SubpassBarrierTrackback<AccessContext>;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700844
John Zulauf355e49b2020-04-24 15:11:15 -0600845 HazardResult DetectHazard(const BUFFER_STATE &buffer, SyncStageAccessIndex usage_index, const ResourceAccessRange &range) const;
John Zulauf540266b2020-04-06 18:54:53 -0600846 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
Aitor Camachoe67f2c72022-06-08 14:41:58 +0200847 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset, const VkExtent3D &extent,
848 bool is_depth_sliced) const;
John Zulauf69133422020-05-20 14:55:53 -0600849 template <typename Detector>
John Zulaufd0ec59f2021-03-13 14:25:08 -0700850 HazardResult DetectHazard(Detector &detector, const AttachmentViewGen &view_gen, AttachmentViewGen::Gen gen_type,
851 DetectOptions options) const;
852 template <typename Detector>
John Zulauf69133422020-05-20 14:55:53 -0600853 HazardResult DetectHazard(Detector &detector, const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range,
Aitor Camachoe67f2c72022-06-08 14:41:58 +0200854 const VkOffset3D &offset, const VkExtent3D &extent, bool is_depth_sliced,
855 DetectOptions options) const;
John Zulauf110413c2021-03-20 05:38:38 -0600856 template <typename Detector>
857 HazardResult DetectHazard(Detector &detector, const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range,
Aitor Camachoe67f2c72022-06-08 14:41:58 +0200858 bool is_depth_sliced, DetectOptions options) const;
John Zulauf1507ee42020-05-18 11:33:09 -0600859 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
Aitor Camachoe67f2c72022-06-08 14:41:58 +0200860 const VkImageSubresourceRange &subresource_range, bool is_depth_sliced) const;
John Zulaufd0ec59f2021-03-13 14:25:08 -0700861 HazardResult DetectHazard(const AttachmentViewGen &view_gen, AttachmentViewGen::Gen gen_type,
862 SyncStageAccessIndex current_usage, SyncOrdering ordering_rule) const;
863
John Zulauf69133422020-05-20 14:55:53 -0600864 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
John Zulauf8e3c3e92021-01-06 11:19:36 -0700865 const VkImageSubresourceRange &subresource_range, SyncOrdering ordering_rule,
Aitor Camachoe67f2c72022-06-08 14:41:58 +0200866 const VkOffset3D &offset, const VkExtent3D &extent, bool is_depth_sliced) const;
John Zulaufe0757ba2022-06-10 16:51:45 -0600867 HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range,
868 VkPipelineStageFlags2KHR src_exec_scope, const SyncStageAccessFlags &src_access_scope,
869 QueueId queue_id, const SyncEventState &sync_event, DetectOptions options) const;
John Zulaufd0ec59f2021-03-13 14:25:08 -0700870 HazardResult DetectImageBarrierHazard(const AttachmentViewGen &attachment_view, const SyncBarrier &barrier,
871 DetectOptions options) const;
Jeremy Gebben40a22942020-12-22 14:22:06 -0700872 HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags2KHR src_exec_scope,
John Zulauf4a6105a2020-11-17 15:11:05 -0700873 const SyncStageAccessFlags &src_access_scope,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700874 const VkImageSubresourceRange &subresource_range, DetectOptions options) const;
John Zulaufe7f6a5e2021-01-16 14:31:18 -0700875 HazardResult DetectImageBarrierHazard(const SyncImageMemoryBarrier &image_barrier) const;
John Zulaufd0ec59f2021-03-13 14:25:08 -0700876 HazardResult DetectSubpassTransitionHazard(const TrackBack &track_back, const AttachmentViewGen &attach_view) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600877
John Zulaufb02c1eb2020-10-06 16:33:36 -0600878 void RecordLayoutTransitions(const RENDER_PASS_STATE &rp_state, uint32_t subpass,
John Zulauf14940722021-04-12 15:19:02 -0600879 const AttachmentViewGenVector &attachment_views, ResourceUsageTag tag);
John Zulaufb02c1eb2020-10-06 16:33:36 -0600880
John Zulauf0223f142022-07-06 09:05:39 -0600881 HazardResult DetectFirstUseHazard(QueueId queue_id, const ResourceUsageRange &tag_range,
882 const AccessContext &access_context) const;
John Zulaufae842002021-04-15 18:20:55 -0600883
John Zulaufe5da6e52020-03-18 15:32:18 -0600884 const TrackBack &GetDstExternalTrackBack() const { return dst_external_; }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600885 void Reset() {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600886 prev_.clear();
John Zulauf355e49b2020-04-24 15:11:15 -0600887 prev_by_subpass_.clear();
John Zulauf3d84f1b2020-03-09 13:33:25 -0600888 async_.clear();
John Zulauf22aefed2021-03-11 18:14:35 -0700889 src_external_ = nullptr;
John Zulaufa0a98292020-09-18 09:30:10 -0600890 dst_external_ = TrackBack();
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700891 start_tag_ = ResourceUsageTag();
John Zulauf16adfc92020-04-08 10:28:33 -0600892 for (auto &map : access_state_maps_) {
893 map.clear();
894 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600895 }
John Zulaufb02c1eb2020-10-06 16:33:36 -0600896
897 // Follow the context previous to access the access state, supporting "lazy" import into the context. Not intended for
898 // subpass layout transition, as the pending state handling is more complex
John Zulauf5f13a792020-03-10 07:31:21 -0600899 // TODO: See if returning the lower_bound would be useful from a performance POV -- look at the lower_bound overhead
900 // Would need to add a "hint" overload to parallel_iterator::invalidate_[AB] call, if so.
John Zulauf22aefed2021-03-11 18:14:35 -0700901 template <typename BarrierAction>
902 void ResolvePreviousAccessStack(AccessAddressType type, const ResourceAccessRange &range, ResourceAccessRangeMap *descent_map,
903 const ResourceAccessState *infill_state, const BarrierAction &previous_barrie) const;
John Zulauf43cc7462020-12-03 12:33:12 -0700904 void ResolvePreviousAccess(AccessAddressType type, const ResourceAccessRange &range, ResourceAccessRangeMap *descent_map,
John Zulauf22aefed2021-03-11 18:14:35 -0700905 const ResourceAccessState *infill_state,
906 const ResourceAccessStateFunction *previous_barrier = nullptr) const;
John Zulauf4a6105a2020-11-17 15:11:05 -0700907 void ResolvePreviousAccesses();
John Zulaufb02c1eb2020-10-06 16:33:36 -0600908 template <typename BarrierAction>
John Zulaufd0ec59f2021-03-13 14:25:08 -0700909 void ResolveAccessRange(const AttachmentViewGen &view_gen, AttachmentViewGen::Gen gen_type, BarrierAction &barrier_action,
910 ResourceAccessRangeMap *descent_map, const ResourceAccessState *infill_state) const;
John Zulaufb02c1eb2020-10-06 16:33:36 -0600911 template <typename BarrierAction>
John Zulauf43cc7462020-12-03 12:33:12 -0700912 void ResolveAccessRange(AccessAddressType type, const ResourceAccessRange &range, BarrierAction &barrier_action,
John Zulauf355e49b2020-04-24 15:11:15 -0600913 ResourceAccessRangeMap *resolve_map, const ResourceAccessState *infill_state,
914 bool recur_to_infill = true) const;
John Zulauf1d5f9c12022-05-13 14:51:08 -0600915 template <typename ResolveOp>
916 void ResolveFromContext(ResolveOp &&resolve_op, const AccessContext &from_context,
917 const ResourceAccessState *infill_state = nullptr, bool recur_to_infill = false);
John Zulaufb02c1eb2020-10-06 16:33:36 -0600918
John Zulauf8e3c3e92021-01-06 11:19:36 -0700919 void UpdateAccessState(const BUFFER_STATE &buffer, SyncStageAccessIndex current_usage, SyncOrdering ordering_rule,
John Zulauf14940722021-04-12 15:19:02 -0600920 const ResourceAccessRange &range, ResourceUsageTag tag);
John Zulauf8e3c3e92021-01-06 11:19:36 -0700921 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage, SyncOrdering ordering_rule,
John Zulauf110413c2021-03-20 05:38:38 -0600922 const VkImageSubresourceRange &subresource_range, const ResourceUsageTag &tag);
923 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage, SyncOrdering ordering_rule,
John Zulauf355e49b2020-04-24 15:11:15 -0600924 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset, const VkExtent3D &extent,
John Zulauf14940722021-04-12 15:19:02 -0600925 ResourceUsageTag tag);
John Zulaufd0ec59f2021-03-13 14:25:08 -0700926 void UpdateAccessState(const AttachmentViewGen &view_gen, AttachmentViewGen::Gen gen_type, SyncStageAccessIndex current_usage,
John Zulauf14940722021-04-12 15:19:02 -0600927 SyncOrdering ordering_rule, ResourceUsageTag tag);
John Zulauf8e3c3e92021-01-06 11:19:36 -0700928 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage, SyncOrdering ordering_rule,
John Zulauf3d84f1b2020-03-09 13:33:25 -0600929 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset, const VkExtent3D &extent,
John Zulauf14940722021-04-12 15:19:02 -0600930 ResourceUsageTag tag);
John Zulaufd0ec59f2021-03-13 14:25:08 -0700931 void UpdateAttachmentResolveAccess(const RENDER_PASS_STATE &rp_state, const AttachmentViewGenVector &attachment_views,
John Zulauf14940722021-04-12 15:19:02 -0600932 uint32_t subpass, ResourceUsageTag tag);
John Zulaufd0ec59f2021-03-13 14:25:08 -0700933 void UpdateAttachmentStoreAccess(const RENDER_PASS_STATE &rp_state, const AttachmentViewGenVector &attachment_views,
John Zulauf14940722021-04-12 15:19:02 -0600934 uint32_t subpass, ResourceUsageTag tag);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600935
John Zulauf540266b2020-04-06 18:54:53 -0600936 void ResolveChildContexts(const std::vector<AccessContext> &contexts);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600937
John Zulauf4fa68462021-04-26 21:04:22 -0600938 void ImportAsyncContexts(const AccessContext &from);
John Zulaufd0ec59f2021-03-13 14:25:08 -0700939 template <typename Action, typename RangeGen>
940 void ApplyUpdateAction(AccessAddressType address_type, const Action &action, RangeGen *range_gen_arg);
John Zulauf540266b2020-04-06 18:54:53 -0600941 template <typename Action>
John Zulaufd0ec59f2021-03-13 14:25:08 -0700942 void ApplyUpdateAction(const AttachmentViewGen &view_gen, AttachmentViewGen::Gen gen_type, const Action &action);
John Zulauf540266b2020-04-06 18:54:53 -0600943 template <typename Action>
John Zulaufd5115702021-01-18 12:34:33 -0700944 void ApplyToContext(const Action &barrier_action);
John Zulauf43cc7462020-12-03 12:33:12 -0700945 static AccessAddressType ImageAddressType(const IMAGE_STATE &image);
John Zulauf16adfc92020-04-08 10:28:33 -0600946
John Zulauf1d5f9c12022-05-13 14:51:08 -0600947 void DeleteAccess(const AddressRange &address);
John Zulauf540266b2020-04-06 18:54:53 -0600948 AccessContext(uint32_t subpass, VkQueueFlags queue_flags, const std::vector<SubpassDependencyGraphNode> &dependencies,
John Zulauf1a224292020-06-30 14:52:13 -0600949 const std::vector<AccessContext> &contexts, const AccessContext *external_context);
John Zulauf540266b2020-04-06 18:54:53 -0600950
951 AccessContext() { Reset(); }
John Zulauf7635de32020-05-29 17:14:15 -0600952 AccessContext(const AccessContext &copy_from) = default;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600953
John Zulauf43cc7462020-12-03 12:33:12 -0700954 ResourceAccessRangeMap &GetAccessStateMap(AccessAddressType type) { return access_state_maps_[static_cast<size_t>(type)]; }
955 const ResourceAccessRangeMap &GetAccessStateMap(AccessAddressType type) const {
956 return access_state_maps_[static_cast<size_t>(type)];
957 }
John Zulauf355e49b2020-04-24 15:11:15 -0600958 const TrackBack *GetTrackBackFromSubpass(uint32_t subpass) const {
959 if (subpass == VK_SUBPASS_EXTERNAL) {
John Zulauf22aefed2021-03-11 18:14:35 -0700960 return src_external_;
John Zulauf355e49b2020-04-24 15:11:15 -0600961 } else {
962 assert(subpass < prev_by_subpass_.size());
963 return prev_by_subpass_[subpass];
964 }
965 }
John Zulauf16adfc92020-04-08 10:28:33 -0600966
John Zulauf64ffe552021-02-06 10:25:07 -0700967 bool ValidateLayoutTransitions(const CommandExecutionContext &ex_context, const RENDER_PASS_STATE &rp_state,
John Zulaufd0ec59f2021-03-13 14:25:08 -0700968 const VkRect2D &render_area, uint32_t subpass, const AttachmentViewGenVector &attachment_views,
sjfricke0bea06e2022-06-05 09:22:26 +0900969 CMD_TYPE cmd_type) const;
John Zulauf64ffe552021-02-06 10:25:07 -0700970 bool ValidateLoadOperation(const CommandExecutionContext &ex_context, const RENDER_PASS_STATE &rp_state,
John Zulaufd0ec59f2021-03-13 14:25:08 -0700971 const VkRect2D &render_area, uint32_t subpass, const AttachmentViewGenVector &attachment_views,
sjfricke0bea06e2022-06-05 09:22:26 +0900972 CMD_TYPE cmd_type) const;
973 bool ValidateStoreOperation(const CommandExecutionContext &ex_context, const RENDER_PASS_STATE &rp_state,
John Zulaufd0ec59f2021-03-13 14:25:08 -0700974 const VkRect2D &render_area, uint32_t subpass, const AttachmentViewGenVector &attachment_views,
sjfricke0bea06e2022-06-05 09:22:26 +0900975 CMD_TYPE cmd_type) const;
John Zulauf64ffe552021-02-06 10:25:07 -0700976 bool ValidateResolveOperations(const CommandExecutionContext &ex_context, const RENDER_PASS_STATE &rp_state,
sjfricke0bea06e2022-06-05 09:22:26 +0900977 const VkRect2D &render_area, const AttachmentViewGenVector &attachment_views, CMD_TYPE cmd_type,
978 uint32_t subpass) const;
John Zulauf1507ee42020-05-18 11:33:09 -0600979
John Zulauf14940722021-04-12 15:19:02 -0600980 void SetStartTag(ResourceUsageTag tag) { start_tag_ = tag; }
John Zulauf4a6105a2020-11-17 15:11:05 -0700981 template <typename Action>
982 void ForAll(Action &&action);
John Zulauff26fca92022-08-15 11:53:34 -0600983 template <typename Action>
984 void ConstForAll(Action &&action) const;
John Zulauf3da08bb2022-08-01 17:56:56 -0600985 template <typename Predicate>
986 void EraseIf(Predicate &&pred);
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700987
John Zulaufecf4ac52022-06-06 10:08:42 -0600988 // For use during queue submit building up the QueueBatchContext AccessContext for validation, otherwise clear.
John Zulauf06f6f1e2022-04-19 15:28:11 -0600989 void AddAsyncContext(const AccessContext *context);
990 // For use during queue submit to avoid stale pointers;
John Zulauf06f6f1e2022-04-19 15:28:11 -0600991 void ClearAsyncContext(const AccessContext *context) { async_.clear(); }
992
John Zulauf3d84f1b2020-03-09 13:33:25 -0600993 private:
994 template <typename Detector>
John Zulaufe0757ba2022-06-10 16:51:45 -0600995 HazardResult DetectHazard(AccessAddressType type, Detector &detector, const ResourceAccessRange &range,
John Zulauf355e49b2020-04-24 15:11:15 -0600996 DetectOptions options) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600997 template <typename Detector>
John Zulauf43cc7462020-12-03 12:33:12 -0700998 HazardResult DetectAsyncHazard(AccessAddressType type, const Detector &detector, const ResourceAccessRange &range) const;
John Zulauf5f13a792020-03-10 07:31:21 -0600999 template <typename Detector>
John Zulaufe0757ba2022-06-10 16:51:45 -06001000 HazardResult DetectPreviousHazard(AccessAddressType type, Detector &detector, const ResourceAccessRange &range) const;
John Zulauf8e3c3e92021-01-06 11:19:36 -07001001 void UpdateAccessState(AccessAddressType type, SyncStageAccessIndex current_usage, SyncOrdering ordering_rule,
John Zulauf14940722021-04-12 15:19:02 -06001002 const ResourceAccessRange &range, ResourceUsageTag tag);
John Zulaufb02c1eb2020-10-06 16:33:36 -06001003
1004 MapArray access_state_maps_;
John Zulauf3d84f1b2020-03-09 13:33:25 -06001005 std::vector<TrackBack> prev_;
John Zulauf355e49b2020-04-24 15:11:15 -06001006 std::vector<TrackBack *> prev_by_subpass_;
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -07001007 std::vector<const AccessContext *> async_;
John Zulauf22aefed2021-03-11 18:14:35 -07001008 TrackBack *src_external_;
John Zulaufe5da6e52020-03-18 15:32:18 -06001009 TrackBack dst_external_;
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -07001010 ResourceUsageTag start_tag_;
John Zulauf3d84f1b2020-03-09 13:33:25 -06001011};
1012
John Zulaufe0757ba2022-06-10 16:51:45 -06001013struct SyncEventState {
1014 enum IgnoreReason { NotIgnored = 0, ResetWaitRace, Reset2WaitRace, SetRace, MissingStageBits, SetVsWait2, MissingSetEvent };
1015 using EventPointer = std::shared_ptr<const EVENT_STATE>;
1016 using ScopeMap = ResourceAccessRangeMap;
1017 EventPointer event;
1018 CMD_TYPE last_command; // Only Event commands are valid here.
1019 ResourceUsageTag last_command_tag; // Needed to filter replay validation
1020 CMD_TYPE unsynchronized_set;
1021 VkPipelineStageFlags2KHR barriers;
1022 SyncExecScope scope;
1023 ResourceUsageTag first_scope_tag;
1024 bool destroyed;
1025 std::shared_ptr<const AccessContext> first_scope;
1026
1027 SyncEventState()
1028 : event(),
1029 last_command(CMD_NONE),
1030 last_command_tag(0),
1031 unsynchronized_set(CMD_NONE),
1032 barriers(0U),
1033 scope(),
1034 first_scope_tag(),
1035 destroyed(true) {}
1036
1037 SyncEventState(const SyncEventState &) = default;
1038 SyncEventState(SyncEventState &&) = default;
1039
1040 SyncEventState(const SyncEventState::EventPointer &event_state) : SyncEventState() {
1041 event = event_state;
1042 destroyed = (event.get() == nullptr) || event_state->Destroyed();
1043 }
1044
1045 void ResetFirstScope();
1046 const ScopeMap &FirstScope(AccessAddressType address_type) const { return first_scope->GetAccessStateMap(address_type); }
1047 IgnoreReason IsIgnoredByWait(CMD_TYPE cmd_type, VkPipelineStageFlags2KHR srcStageMask) const;
1048 bool HasBarrier(VkPipelineStageFlags2KHR stageMask, VkPipelineStageFlags2KHR exec_scope) const;
1049};
1050
1051class SyncEventsContext {
1052 public:
1053 using Map = layer_data::unordered_map<const EVENT_STATE *, std::shared_ptr<SyncEventState>>;
1054 using iterator = Map::iterator;
1055 using const_iterator = Map::const_iterator;
1056
1057 SyncEventState *GetFromShared(const SyncEventState::EventPointer &event_state) {
1058 const auto find_it = map_.find(event_state.get());
1059 if (find_it == map_.end()) {
1060 if (!event_state.get()) return nullptr;
1061
1062 const auto *event_plain_ptr = event_state.get();
1063 auto sync_state = std::make_shared<SyncEventState>(event_state);
1064 auto insert_pair = map_.emplace(event_plain_ptr, sync_state);
1065 return insert_pair.first->second.get();
1066 }
1067 return find_it->second.get();
1068 }
1069
1070 const SyncEventState *Get(const EVENT_STATE *event_state) const {
1071 const auto find_it = map_.find(event_state);
1072 if (find_it == map_.end()) {
1073 return nullptr;
1074 }
1075 return find_it->second.get();
1076 }
1077 const SyncEventState *Get(const SyncEventState::EventPointer &event_state) const { return Get(event_state.get()); }
1078
1079 void ApplyBarrier(const SyncExecScope &src, const SyncExecScope &dst, ResourceUsageTag tag);
1080 void ApplyTaggedWait(VkQueueFlags queue_flags, ResourceUsageTag tag);
1081
1082 // stl style naming for range-for support
1083 inline iterator begin() { return map_.begin(); }
1084 inline const_iterator begin() const { return map_.begin(); }
1085 inline iterator end() { return map_.end(); }
1086 inline const_iterator end() const { return map_.end(); }
1087
1088 void Destroy(const EVENT_STATE *event_state) {
1089 auto sync_it = map_.find(event_state);
1090 if (sync_it != map_.end()) {
1091 sync_it->second->destroyed = true;
1092 map_.erase(sync_it);
1093 }
1094 }
1095 void Clear() { map_.clear(); }
1096
1097 SyncEventsContext &DeepCopy(const SyncEventsContext &from);
1098
1099 private:
1100 Map map_;
1101};
1102
John Zulauf355e49b2020-04-24 15:11:15 -06001103class RenderPassAccessContext {
1104 public:
John Zulaufd0ec59f2021-03-13 14:25:08 -07001105 static AttachmentViewGenVector CreateAttachmentViewGen(const VkRect2D &render_area,
1106 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views);
John Zulauf64ffe552021-02-06 10:25:07 -07001107 RenderPassAccessContext() : rp_state_(nullptr), render_area_(VkRect2D()), current_subpass_(0) {}
1108 RenderPassAccessContext(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area, VkQueueFlags queue_flags,
1109 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const AccessContext *external_context);
John Zulauf355e49b2020-04-24 15:11:15 -06001110
sjfricke0bea06e2022-06-05 09:22:26 +09001111 bool ValidateDrawSubpassAttachment(const CommandExecutionContext &ex_context, const CMD_BUFFER_STATE &cmd_buffer,
1112 CMD_TYPE cmd_type) const;
1113 void RecordDrawSubpassAttachment(const CMD_BUFFER_STATE &cmd_buffer, ResourceUsageTag tag);
1114 bool ValidateNextSubpass(const CommandExecutionContext &ex_context, CMD_TYPE cmd_type) const;
1115 bool ValidateEndRenderPass(const CommandExecutionContext &ex_context, CMD_TYPE cmd_type) const;
1116 bool ValidateFinalSubpassLayoutTransitions(const CommandExecutionContext &ex_context, CMD_TYPE cmd_type) const;
John Zulauf355e49b2020-04-24 15:11:15 -06001117
John Zulauf14940722021-04-12 15:19:02 -06001118 void RecordLayoutTransitions(ResourceUsageTag tag);
1119 void RecordLoadOperations(ResourceUsageTag tag);
John Zulauf41a9c7c2021-12-07 15:59:53 -07001120 void RecordBeginRenderPass(ResourceUsageTag tag, ResourceUsageTag load_tag);
1121 void RecordNextSubpass(ResourceUsageTag store_tag, ResourceUsageTag barrier_tag, ResourceUsageTag load_tag);
1122 void RecordEndRenderPass(AccessContext *external_context, ResourceUsageTag store_tag, ResourceUsageTag barrier_tag);
John Zulauf355e49b2020-04-24 15:11:15 -06001123
John Zulauf540266b2020-04-06 18:54:53 -06001124 AccessContext &CurrentContext() { return subpass_contexts_[current_subpass_]; }
1125 const AccessContext &CurrentContext() const { return subpass_contexts_[current_subpass_]; }
John Zulauf355e49b2020-04-24 15:11:15 -06001126 const std::vector<AccessContext> &GetContexts() const { return subpass_contexts_; }
1127 uint32_t GetCurrentSubpass() const { return current_subpass_; }
1128 const RENDER_PASS_STATE *GetRenderPassState() const { return rp_state_; }
John Zulauf64ffe552021-02-06 10:25:07 -07001129 AccessContext *CreateStoreResolveProxy() const;
John Zulauf355e49b2020-04-24 15:11:15 -06001130
1131 private:
John Zulauf355e49b2020-04-24 15:11:15 -06001132 const RENDER_PASS_STATE *rp_state_;
John Zulauf64ffe552021-02-06 10:25:07 -07001133 const VkRect2D render_area_;
John Zulauf355e49b2020-04-24 15:11:15 -06001134 uint32_t current_subpass_;
1135 std::vector<AccessContext> subpass_contexts_;
John Zulaufd0ec59f2021-03-13 14:25:08 -07001136 AttachmentViewGenVector attachment_views_;
John Zulauf3d84f1b2020-03-09 13:33:25 -06001137};
1138
John Zulauf64ffe552021-02-06 10:25:07 -07001139// Command execution context is the base class for command buffer and queue contexts
1140// Preventing unintented leakage of subclass specific state, storing enough information
1141// for message logging.
1142// TODO: determine where to draw the design split for tag tracking (is there anything command to Queues and CB's)
1143class CommandExecutionContext {
John Zulauf3d84f1b2020-03-09 13:33:25 -06001144 public:
John Zulauf3c788ef2022-02-22 12:12:30 -07001145 using AccessLog = std::vector<ResourceUsageRecord>;
John Zulauf64ffe552021-02-06 10:25:07 -07001146 CommandExecutionContext() : sync_state_(nullptr) {}
John Zulauf3c788ef2022-02-22 12:12:30 -07001147 CommandExecutionContext(const SyncValidator *sync_validator) : sync_state_(sync_validator) {}
John Zulauf64ffe552021-02-06 10:25:07 -07001148 virtual ~CommandExecutionContext() = default;
John Zulaufbb890452021-12-14 11:30:18 -07001149 virtual AccessContext *GetCurrentAccessContext() = 0;
1150 virtual SyncEventsContext *GetCurrentEventsContext() = 0;
1151 virtual const AccessContext *GetCurrentAccessContext() const = 0;
1152 virtual const SyncEventsContext *GetCurrentEventsContext() const = 0;
John Zulauf00119522022-05-23 19:07:42 -06001153 virtual QueueId GetQueueId() const = 0;
John Zulaufbb890452021-12-14 11:30:18 -07001154
John Zulauf64ffe552021-02-06 10:25:07 -07001155 const SyncValidator &GetSyncState() const {
1156 assert(sync_state_);
1157 return *sync_state_;
1158 }
John Zulauf3c788ef2022-02-22 12:12:30 -07001159
John Zulauf3c788ef2022-02-22 12:12:30 -07001160 ResourceUsageRange ImportRecordedAccessLog(const CommandBufferAccessContext &recorded_context);
John Zulauf06f6f1e2022-04-19 15:28:11 -06001161 std::string FormatHazard(const HazardResult &hazard) const;
1162
John Zulaufbb890452021-12-14 11:30:18 -07001163 virtual ResourceUsageTag GetTagLimit() const = 0;
1164 virtual VulkanTypedHandle Handle() const = 0;
John Zulauf4fa68462021-04-26 21:04:22 -06001165 virtual std::string FormatUsage(ResourceUsageTag tag) const = 0;
John Zulauf3c788ef2022-02-22 12:12:30 -07001166 virtual void InsertRecordedAccessLogEntries(const CommandBufferAccessContext &cb_context) = 0;
John Zulauf64ffe552021-02-06 10:25:07 -07001167
John Zulaufdab327f2022-07-08 12:02:05 -06001168 virtual HazardResult DetectFirstUseHazard(const ResourceUsageRange &tag_range) = 0;
1169 virtual void BeginRenderPassReplay(const SyncOpBeginRenderPass &begin_op, ResourceUsageTag tag) {
1170 assert("Must override if use by derived type is valid" == nullptr);
1171 }
1172 virtual void NextSubpassReplay() { assert("Must override if use by derived type is valid" == nullptr); }
1173 virtual void EndRenderPassReplay() { assert("Must override if use by derived type is valid" == nullptr); }
1174
John Zulauf0223f142022-07-06 09:05:39 -06001175 bool ValidForSyncOps() const;
1176
John Zulauf64ffe552021-02-06 10:25:07 -07001177 protected:
John Zulaufdab327f2022-07-08 12:02:05 -06001178 class ReplayGuard {
1179 public:
1180 ReplayGuard(CommandExecutionContext &exec_context, const CommandBufferAccessContext &recorded_context)
1181 : exec_context_(exec_context) {
1182 exec_context_.BeginCommandBufferReplay(recorded_context);
1183 }
1184 ~ReplayGuard() { exec_context_.EndCommandBufferReplay(); }
1185
1186 private:
1187 CommandExecutionContext &exec_context_;
1188 };
1189 friend ReplayGuard;
1190
John Zulauf3c788ef2022-02-22 12:12:30 -07001191 const SyncValidator *sync_state_;
John Zulaufdab327f2022-07-08 12:02:05 -06001192 const CommandBufferAccessContext *current_replay_;
1193
1194 private:
1195 // Only allow the replay guard to manage the begin/end
1196 void BeginCommandBufferReplay(const CommandBufferAccessContext &recorded) { current_replay_ = &recorded; }
1197 void EndCommandBufferReplay() { current_replay_ = nullptr; }
John Zulauf64ffe552021-02-06 10:25:07 -07001198};
1199
1200class CommandBufferAccessContext : public CommandExecutionContext {
1201 public:
John Zulauf8eda1562021-04-13 17:06:41 -06001202 using SyncOpPointer = std::shared_ptr<SyncOpBase>;
1203 struct SyncOpEntry {
1204 ResourceUsageTag tag;
1205 SyncOpPointer sync_op;
1206 SyncOpEntry(ResourceUsageTag tag_, SyncOpPointer &&sync_op_) : tag(tag_), sync_op(std::move(sync_op_)) {}
1207 SyncOpEntry() = default;
1208 SyncOpEntry(const SyncOpEntry &other) = default;
1209 };
1210
John Zulauf3c788ef2022-02-22 12:12:30 -07001211 CommandBufferAccessContext(const SyncValidator *sync_validator = nullptr)
John Zulauf64ffe552021-02-06 10:25:07 -07001212 : CommandExecutionContext(sync_validator),
John Zulauf4fa68462021-04-26 21:04:22 -06001213 cb_state_(),
1214 queue_flags_(),
1215 destroyed_(false),
John Zulauf14940722021-04-12 15:19:02 -06001216 access_log_(),
John Zulauf3c2a0b32021-07-14 11:14:52 -06001217 cbs_referenced_(),
John Zulauffaea0ee2021-01-14 14:01:32 -07001218 command_number_(0),
1219 subcommand_number_(0),
John Zulauf355e49b2020-04-24 15:11:15 -06001220 reset_count_(0),
John Zulauf355e49b2020-04-24 15:11:15 -06001221 cb_access_context_(),
1222 current_context_(&cb_access_context_),
John Zulauf669dfd52021-01-27 17:15:28 -07001223 events_context_(),
John Zulauf4fa68462021-04-26 21:04:22 -06001224 render_pass_contexts_(),
1225 current_renderpass_context_(),
1226 sync_ops_() {}
John Zulauf355e49b2020-04-24 15:11:15 -06001227 CommandBufferAccessContext(SyncValidator &sync_validator, std::shared_ptr<CMD_BUFFER_STATE> &cb_state, VkQueueFlags queue_flags)
John Zulauf64ffe552021-02-06 10:25:07 -07001228 : CommandBufferAccessContext(&sync_validator) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06001229 cb_state_ = cb_state;
1230 queue_flags_ = queue_flags;
1231 }
John Zulauf4fa68462021-04-26 21:04:22 -06001232
1233 struct AsProxyContext {};
1234 CommandBufferAccessContext(const CommandBufferAccessContext &real_context, AsProxyContext dummy);
1235
John Zulauf64ffe552021-02-06 10:25:07 -07001236 ~CommandBufferAccessContext() override = default;
1237 CommandExecutionContext &GetExecutionContext() { return *this; }
1238 const CommandExecutionContext &GetExecutionContext() const { return *this; }
John Zulauf5c5e88d2019-12-26 11:22:02 -07001239
1240 void Reset() {
John Zulauf14940722021-04-12 15:19:02 -06001241 access_log_.clear();
John Zulauf3c2a0b32021-07-14 11:14:52 -06001242 cbs_referenced_.clear();
John Zulauf8eda1562021-04-13 17:06:41 -06001243 sync_ops_.clear();
John Zulauf355e49b2020-04-24 15:11:15 -06001244 command_number_ = 0;
John Zulauffaea0ee2021-01-14 14:01:32 -07001245 subcommand_number_ = 0;
John Zulauf355e49b2020-04-24 15:11:15 -06001246 reset_count_++;
1247 cb_access_context_.Reset();
John Zulauf3d84f1b2020-03-09 13:33:25 -06001248 render_pass_contexts_.clear();
John Zulauf355e49b2020-04-24 15:11:15 -06001249 current_context_ = &cb_access_context_;
John Zulauf3d84f1b2020-03-09 13:33:25 -06001250 current_renderpass_context_ = nullptr;
John Zulauf669dfd52021-01-27 17:15:28 -07001251 events_context_.Clear();
John Zulauf5c5e88d2019-12-26 11:22:02 -07001252 }
John Zulaufe7f6a5e2021-01-16 14:31:18 -07001253 void MarkDestroyed() { destroyed_ = true; }
1254 bool IsDestroyed() const { return destroyed_; }
John Zulauf5c5e88d2019-12-26 11:22:02 -07001255
John Zulauf4fa68462021-04-26 21:04:22 -06001256 std::string FormatUsage(ResourceUsageTag tag) const override;
John Zulauf06f6f1e2022-04-19 15:28:11 -06001257 std::string FormatUsage(const ResourceFirstAccess &access) const; // Only command buffers have "first usage"
John Zulaufbb890452021-12-14 11:30:18 -07001258 AccessContext *GetCurrentAccessContext() override { return current_context_; }
1259 SyncEventsContext *GetCurrentEventsContext() override { return &events_context_; }
1260 const AccessContext *GetCurrentAccessContext() const override { return current_context_; }
1261 const SyncEventsContext *GetCurrentEventsContext() const override { return &events_context_; }
John Zulauf00119522022-05-23 19:07:42 -06001262 QueueId GetQueueId() const override;
John Zulaufbb890452021-12-14 11:30:18 -07001263
John Zulauf64ffe552021-02-06 10:25:07 -07001264 RenderPassAccessContext *GetCurrentRenderPassContext() { return current_renderpass_context_; }
John Zulauf64ffe552021-02-06 10:25:07 -07001265 const RenderPassAccessContext *GetCurrentRenderPassContext() const { return current_renderpass_context_; }
sjfricke0bea06e2022-06-05 09:22:26 +09001266 ResourceUsageTag RecordBeginRenderPass(CMD_TYPE cmd_type, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
John Zulauf41a9c7c2021-12-07 15:59:53 -07001267 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views);
John Zulaufd5115702021-01-18 12:34:33 -07001268
sjfricke0bea06e2022-06-05 09:22:26 +09001269 bool ValidateDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint, CMD_TYPE cmd_type) const;
John Zulauf14940722021-04-12 15:19:02 -06001270 void RecordDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint, ResourceUsageTag tag);
sjfricke0bea06e2022-06-05 09:22:26 +09001271 bool ValidateDrawVertex(uint32_t vertexCount, uint32_t firstVertex, CMD_TYPE cmd_type) const;
John Zulauf14940722021-04-12 15:19:02 -06001272 void RecordDrawVertex(uint32_t vertexCount, uint32_t firstVertex, ResourceUsageTag tag);
sjfricke0bea06e2022-06-05 09:22:26 +09001273 bool ValidateDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, CMD_TYPE cmd_type) const;
John Zulauf14940722021-04-12 15:19:02 -06001274 void RecordDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, ResourceUsageTag tag);
sjfricke0bea06e2022-06-05 09:22:26 +09001275 bool ValidateDrawSubpassAttachment(CMD_TYPE cmd_type) const;
John Zulauf14940722021-04-12 15:19:02 -06001276 void RecordDrawSubpassAttachment(ResourceUsageTag tag);
sjfricke0bea06e2022-06-05 09:22:26 +09001277 ResourceUsageTag RecordNextSubpass(CMD_TYPE cmd_type);
1278 ResourceUsageTag RecordEndRenderPass(CMD_TYPE cmd_type);
John Zulauf4a6105a2020-11-17 15:11:05 -07001279 void RecordDestroyEvent(VkEvent event);
John Zulauf49beb112020-11-04 16:06:31 -07001280
John Zulauf0223f142022-07-06 09:05:39 -06001281 bool ValidateFirstUse(CommandExecutionContext &exec_context, const char *func_name, uint32_t index) const;
sjfricke0bea06e2022-06-05 09:22:26 +09001282 void RecordExecutedCommandBuffer(const CommandBufferAccessContext &recorded_context);
John Zulauf1d5f9c12022-05-13 14:51:08 -06001283 void ResolveExecutedCommandBuffer(const AccessContext &recorded_context, ResourceUsageTag offset);
John Zulauf4fa68462021-04-26 21:04:22 -06001284
John Zulaufdab327f2022-07-08 12:02:05 -06001285 HazardResult DetectFirstUseHazard(const ResourceUsageRange &tag_range) override;
1286
John Zulauf3d84f1b2020-03-09 13:33:25 -06001287 const CMD_BUFFER_STATE *GetCommandBufferState() const { return cb_state_.get(); }
1288 VkQueueFlags GetQueueFlags() const { return queue_flags_; }
John Zulauffaea0ee2021-01-14 14:01:32 -07001289
John Zulauf41a9c7c2021-12-07 15:59:53 -07001290 ResourceUsageTag NextSubcommandTag(CMD_TYPE command, ResourceUsageRecord::SubcommandType subcommand);
John Zulaufbb890452021-12-14 11:30:18 -07001291 ResourceUsageTag GetTagLimit() const override { return access_log_.size(); }
1292 VulkanTypedHandle Handle() const override {
1293 if (cb_state_) {
1294 return cb_state_->Handle();
1295 }
1296 return VulkanTypedHandle(static_cast<VkCommandBuffer>(VK_NULL_HANDLE), kVulkanObjectTypeCommandBuffer);
1297 }
John Zulauffaea0ee2021-01-14 14:01:32 -07001298
John Zulauf41a9c7c2021-12-07 15:59:53 -07001299 ResourceUsageTag NextCommandTag(CMD_TYPE command,
1300 ResourceUsageRecord::SubcommandType subcommand = ResourceUsageRecord::SubcommandType::kNone);
1301 ResourceUsageTag NextIndexedCommandTag(CMD_TYPE command, uint32_t index);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001302
John Zulauf3c788ef2022-02-22 12:12:30 -07001303 std::shared_ptr<const CMD_BUFFER_STATE> GetCBStateShared() const { return cb_state_; }
1304
John Zulauffaea0ee2021-01-14 14:01:32 -07001305 const CMD_BUFFER_STATE &GetCBState() const {
1306 assert(cb_state_);
1307 return *(cb_state_.get());
1308 }
1309 CMD_BUFFER_STATE &GetCBState() {
1310 assert(cb_state_);
1311 return *(cb_state_.get());
1312 }
John Zulauffaea0ee2021-01-14 14:01:32 -07001313
John Zulauf1bf30522021-09-03 15:39:06 -06001314 template <class T, class... Args>
1315 void RecordSyncOp(Args &&...args) {
1316 // T must be as derived from SyncOpBase or the compiler will flag the next line as an error.
1317 SyncOpPointer sync_op(std::make_shared<T>(std::forward<Args>(args)...));
John Zulaufbb890452021-12-14 11:30:18 -07001318 RecordSyncOp(std::move(sync_op)); // Call the non-template version
John Zulauf1bf30522021-09-03 15:39:06 -06001319 }
John Zulauf3c788ef2022-02-22 12:12:30 -07001320 const AccessLog &GetAccessLog() const { return access_log_; }
1321 void InsertRecordedAccessLogEntries(const CommandBufferAccessContext &cb_context) override;
John Zulauf06f6f1e2022-04-19 15:28:11 -06001322 const std::vector<SyncOpEntry> &GetSyncOps() const { return sync_ops_; };
John Zulauf8eda1562021-04-13 17:06:41 -06001323
John Zulauf3d84f1b2020-03-09 13:33:25 -06001324 private:
John Zulaufbb890452021-12-14 11:30:18 -07001325 // As this is passing around a shared pointer to record, move to avoid needless atomics.
1326 void RecordSyncOp(SyncOpPointer &&sync_op);
John Zulauf4fa68462021-04-26 21:04:22 -06001327 std::shared_ptr<CMD_BUFFER_STATE> cb_state_;
1328 VkQueueFlags queue_flags_;
1329 bool destroyed_;
1330
John Zulauf3c788ef2022-02-22 12:12:30 -07001331 AccessLog access_log_;
John Zulauf3c2a0b32021-07-14 11:14:52 -06001332 layer_data::unordered_set<std::shared_ptr<const CMD_BUFFER_STATE>> cbs_referenced_;
John Zulauf355e49b2020-04-24 15:11:15 -06001333 uint32_t command_number_;
John Zulauffaea0ee2021-01-14 14:01:32 -07001334 uint32_t subcommand_number_;
John Zulauf355e49b2020-04-24 15:11:15 -06001335 uint32_t reset_count_;
John Zulauf4fa68462021-04-26 21:04:22 -06001336
John Zulauf355e49b2020-04-24 15:11:15 -06001337 AccessContext cb_access_context_;
John Zulauf540266b2020-04-06 18:54:53 -06001338 AccessContext *current_context_;
John Zulauf669dfd52021-01-27 17:15:28 -07001339 SyncEventsContext events_context_;
John Zulauf4fa68462021-04-26 21:04:22 -06001340
1341 // Don't need the following for an active proxy cb context
John Zulaufab84f242022-08-04 18:38:40 -06001342 std::vector<std::unique_ptr<RenderPassAccessContext>> render_pass_contexts_;
John Zulauf4fa68462021-04-26 21:04:22 -06001343 RenderPassAccessContext *current_renderpass_context_;
1344 std::vector<SyncOpEntry> sync_ops_;
John Zulauf9cb530d2019-09-30 14:14:10 -06001345};
1346
John Zulauf697c0e12022-04-19 16:31:12 -06001347class QueueSyncState;
1348
1349// Store the ResourceUsageRecords for the global tag range. The prev_ field allows for
1350// const Validation phase access from the cmd state "overlay" seamlessly.
1351class AccessLogger {
1352 public:
1353 struct BatchRecord {
1354 BatchRecord() = default;
1355 BatchRecord(const BatchRecord &other) = default;
1356 BatchRecord(BatchRecord &&other) = default;
1357 BatchRecord(const QueueSyncState *q, uint64_t submit, uint32_t batch)
1358 : queue(q), submit_index(submit), batch_index(batch) {}
1359 BatchRecord &operator=(const BatchRecord &other) = default;
1360 const QueueSyncState *queue;
1361 uint64_t submit_index;
1362 uint32_t batch_index;
1363 };
1364
1365 struct AccessRecord {
1366 const BatchRecord *batch;
1367 const ResourceUsageRecord *record;
1368 bool IsValid() const { return batch && record; }
1369 };
1370
1371 // BatchLog lookup is batch relative, thus the batch doesn't need to track it's offset
1372 class BatchLog {
1373 public:
1374 BatchLog() = default;
1375 BatchLog(const BatchLog &batch) = default;
1376 BatchLog(BatchLog &&other) = default;
1377 BatchLog &operator=(const BatchLog &other) = default;
1378 BatchLog &operator=(BatchLog &&other) = default;
1379 BatchLog(const BatchRecord &batch) : batch_(batch) {}
1380
1381 size_t Size() const { return log_.size(); }
1382 const BatchRecord &GetBatch() const { return batch_; }
1383 AccessRecord operator[](size_t index) const;
1384
1385 void Append(const CommandExecutionContext::AccessLog &other);
1386
1387 private:
1388 BatchRecord batch_;
1389 layer_data::unordered_set<std::shared_ptr<const CMD_BUFFER_STATE>> cbs_referenced_;
1390 CommandExecutionContext::AccessLog log_;
1391 };
1392
1393 using AccessLogRangeMap = sparse_container::range_map<ResourceUsageTag, BatchLog>;
1394
1395 AccessLogger(const AccessLogger *prev = nullptr) : prev_(prev) {}
1396 // AccessLogger lookup is based on global tags
1397 AccessRecord operator[](ResourceUsageTag tag) const;
1398 BatchLog *AddBatch(const QueueSyncState *queue_state, uint64_t submit_id, uint32_t batch_id, const ResourceUsageRange &range);
1399 void MergeMove(AccessLogger &&child);
1400 void Reset();
1401
1402 private:
1403 const AccessLogger *prev_;
1404 AccessLogRangeMap access_log_map_;
1405};
1406
John Zulauf697c0e12022-04-19 16:31:12 -06001407class QueueBatchContext : public CommandExecutionContext {
1408 public:
John Zulaufdab327f2022-07-08 12:02:05 -06001409 struct RenderPassReplayState {
1410 // A minimal subset of the functionality present in the RenderPassAccessContext. Since the accesses are recorded in the
1411 // first_use information of the recorded access contexts, s.t. all we need to support is the barrier/resolve operations
1412 RenderPassReplayState() { Reset(); }
1413 AccessContext *Begin(VkQueueFlags queue_flags, const SyncOpBeginRenderPass &begin_op_,
1414 const AccessContext &external_context);
1415 AccessContext *Next();
1416 void End(AccessContext &external_context);
1417
1418 const SyncOpBeginRenderPass *begin_op = nullptr;
1419 const AccessContext *replay_context = nullptr;
1420 uint32_t subpass = VK_SUBPASS_EXTERNAL;
1421 std::vector<AccessContext> subpass_contexts;
1422 void Reset() {
1423 begin_op = nullptr;
1424 replay_context = nullptr;
1425 subpass = VK_SUBPASS_EXTERNAL;
1426 subpass_contexts.clear();
1427 }
1428 operator bool() const { return begin_op != nullptr; }
1429 };
1430
John Zulauf1d5f9c12022-05-13 14:51:08 -06001431 using ConstBatchSet = layer_data::unordered_set<std::shared_ptr<const QueueBatchContext>>;
1432 using BatchSet = layer_data::unordered_set<std::shared_ptr<QueueBatchContext>>;
1433 static constexpr bool TruePred(const std::shared_ptr<const QueueBatchContext> &) { return true; }
John Zulauf697c0e12022-04-19 16:31:12 -06001434 struct CmdBufferEntry {
1435 uint32_t index = 0;
1436 std::shared_ptr<const CommandBufferAccessContext> cb;
1437 CmdBufferEntry(uint32_t index_, std::shared_ptr<const CommandBufferAccessContext> &&cb_)
1438 : index(index_), cb(std::move(cb_)) {}
1439 };
John Zulauf1d5f9c12022-05-13 14:51:08 -06001440
John Zulauf697c0e12022-04-19 16:31:12 -06001441 using CommandBuffers = std::vector<CmdBufferEntry>;
1442
John Zulaufa8700a52022-08-18 16:22:08 -06001443 QueueBatchContext(const SyncValidator &sync_state, const QueueSyncState &queue_state);
1444 QueueBatchContext() = delete;
1445
John Zulauf697c0e12022-04-19 16:31:12 -06001446 std::string FormatUsage(ResourceUsageTag tag) const override;
John Zulaufdab327f2022-07-08 12:02:05 -06001447 AccessContext *GetCurrentAccessContext() override { return current_access_context_; }
1448 const AccessContext *GetCurrentAccessContext() const override { return current_access_context_; }
John Zulauf697c0e12022-04-19 16:31:12 -06001449 SyncEventsContext *GetCurrentEventsContext() override { return &events_context_; }
1450 const SyncEventsContext *GetCurrentEventsContext() const override { return &events_context_; }
1451 const QueueSyncState *GetQueueSyncState() const { return queue_state_; }
1452 VkQueueFlags GetQueueFlags() const;
John Zulauf00119522022-05-23 19:07:42 -06001453 QueueId GetQueueId() const override;
John Zulauf697c0e12022-04-19 16:31:12 -06001454
1455 void SetBatchLog(AccessLogger &loggger, uint64_t sumbit_id, uint32_t batch_id);
1456 void ResetAccessLog() {
1457 logger_ = nullptr;
1458 batch_log_ = nullptr;
1459 }
John Zulaufe0757ba2022-06-10 16:51:45 -06001460 void ResetEventsContext() { events_context_.Clear(); }
John Zulauf697c0e12022-04-19 16:31:12 -06001461 ResourceUsageTag GetTagLimit() const override { return batch_log_->Size() + tag_range_.begin; }
1462 // begin is the tag bias / .size() is the number of total records that should eventually be in access_log_
1463 ResourceUsageRange GetTagRange() const { return tag_range_; }
1464 void InsertRecordedAccessLogEntries(const CommandBufferAccessContext &cb_context) override;
1465
1466 void SetTagBias(ResourceUsageTag);
John Zulaufa8700a52022-08-18 16:22:08 -06001467 void SetupAccessContext(const std::shared_ptr<const QueueBatchContext> &prev, const VkSubmitInfo2 &submit_info,
1468 SignaledSemaphores &signaled_semaphores);
1469 void SetupCommandBufferInfo(const VkSubmitInfo2 &submit_info);
John Zulauf697c0e12022-04-19 16:31:12 -06001470
John Zulaufa8700a52022-08-18 16:22:08 -06001471 bool DoQueueSubmitValidate(const SyncValidator &sync_state, QueueSubmitCmdState &cmd_state, const VkSubmitInfo2 &submit_info);
John Zulaufcb7e1672022-05-04 13:46:08 -06001472
John Zulauf1d5f9c12022-05-13 14:51:08 -06001473 void ResolveSubmittedCommandBuffer(const AccessContext &recorded_context, ResourceUsageTag offset);
1474
John Zulauf697c0e12022-04-19 16:31:12 -06001475 VulkanTypedHandle Handle() const override;
1476
John Zulauf1d5f9c12022-05-13 14:51:08 -06001477 void ApplyTaggedWait(QueueId queue_id, ResourceUsageTag tag);
1478 void ApplyDeviceWait();
1479
John Zulaufdab327f2022-07-08 12:02:05 -06001480 HazardResult DetectFirstUseHazard(const ResourceUsageRange &tag_range) override;
1481 void BeginRenderPassReplay(const SyncOpBeginRenderPass &begin_op, ResourceUsageTag tag) override;
1482 void NextSubpassReplay() override;
1483 void EndRenderPassReplay() override;
1484
John Zulauf697c0e12022-04-19 16:31:12 -06001485 private:
John Zulaufecf4ac52022-06-06 10:08:42 -06001486 std::shared_ptr<QueueBatchContext> ResolveOneWaitSemaphore(VkSemaphore sem, VkPipelineStageFlags2 wait_mask,
1487 SignaledSemaphores &signaled);
John Zulauf697c0e12022-04-19 16:31:12 -06001488
1489 const QueueSyncState *queue_state_ = nullptr;
1490 ResourceUsageRange tag_range_ = ResourceUsageRange(0, 0); // Range of tags referenced by cbs_referenced
1491
1492 AccessContext access_context_;
John Zulaufdab327f2022-07-08 12:02:05 -06001493 AccessContext *current_access_context_;
John Zulauf697c0e12022-04-19 16:31:12 -06001494 SyncEventsContext events_context_;
1495
1496 // Clear these after validation and import
1497 CommandBuffers command_buffers_;
John Zulauf1d5f9c12022-05-13 14:51:08 -06001498 ConstBatchSet async_batches_;
John Zulauf697c0e12022-04-19 16:31:12 -06001499 // When null use the global logger
1500 AccessLogger *logger_ = nullptr;
1501 AccessLogger::BatchLog *batch_log_ = nullptr;
John Zulaufdab327f2022-07-08 12:02:05 -06001502 RenderPassReplayState rp_replay_;
John Zulauf697c0e12022-04-19 16:31:12 -06001503};
John Zulaufbbda4572022-04-19 16:20:45 -06001504
1505class QueueSyncState {
1506 public:
John Zulauf1d5f9c12022-05-13 14:51:08 -06001507 constexpr static QueueId kQueueIdBase = QueueId(0);
1508 constexpr static QueueId kQueueIdInvalid = ~kQueueIdBase;
1509 QueueSyncState(const std::shared_ptr<QUEUE_STATE> &queue_state, VkQueueFlags queue_flags, QueueId id)
1510 : submit_index_(0), queue_state_(queue_state), last_batch_(), queue_flags_(queue_flags), id_(id) {}
John Zulaufbbda4572022-04-19 16:20:45 -06001511
1512 VulkanTypedHandle Handle() const {
1513 if (queue_state_) {
1514 return queue_state_->Handle();
1515 }
1516 return VulkanTypedHandle(static_cast<VkQueue>(VK_NULL_HANDLE), kVulkanObjectTypeQueue);
1517 }
1518 std::shared_ptr<const QueueBatchContext> LastBatch() const { return last_batch_; }
John Zulauf1d5f9c12022-05-13 14:51:08 -06001519 std::shared_ptr<QueueBatchContext> LastBatch() { return last_batch_; }
John Zulauf697c0e12022-04-19 16:31:12 -06001520 void SetLastBatch(std::shared_ptr<QueueBatchContext> &&last);
John Zulaufbbda4572022-04-19 16:20:45 -06001521 QUEUE_STATE *GetQueueState() { return queue_state_.get(); }
1522 const QUEUE_STATE *GetQueueState() const { return queue_state_.get(); }
1523 VkQueueFlags GetQueueFlags() const { return queue_flags_; }
John Zulauf1d5f9c12022-05-13 14:51:08 -06001524 QueueId GetQueueId() const { return id_; }
John Zulaufbbda4572022-04-19 16:20:45 -06001525
John Zulauf697c0e12022-04-19 16:31:12 -06001526 uint64_t ReserveSubmitId() const; // Method is const but updates mutable sumbit_index atomically.
1527
John Zulaufbbda4572022-04-19 16:20:45 -06001528 private:
1529 mutable std::atomic<uint64_t> submit_index_;
1530 std::shared_ptr<QUEUE_STATE> queue_state_;
1531 std::shared_ptr<QueueBatchContext> last_batch_;
1532 const VkQueueFlags queue_flags_;
John Zulauf1d5f9c12022-05-13 14:51:08 -06001533 QueueId id_;
John Zulaufbbda4572022-04-19 16:20:45 -06001534};
1535
John Zulaufa8700a52022-08-18 16:22:08 -06001536// The converter needs to be more complex than simply an array of VkSubmitInfo2 structures.
1537// In order to convert from Info->Info2, arrays of VkSemaphoreSubmitInfo and VkCommandBufferSubmitInfo
1538// structures must be created for the pWaitSemaphoreInfos, pCommandBufferInfos, and pSignalSemaphoreInfos
1539// which comprise the converted VkSubmitInfo information. The created VkSubmitInfo2 structure then references the storage
1540// of the arrays, which must have a lifespan longer than the conversion, s.t. the ensuing valdation/record operations
1541// can reference them. The resulting VkSubmitInfo2 is then copied into an additional which takes the place of the pSubmits
1542// parameter.
1543struct SubmitInfoConverter {
1544 struct BatchStore {
1545 BatchStore(const VkSubmitInfo &info);
1546
1547 static VkSemaphoreSubmitInfo WaitSemaphore(const VkSubmitInfo &info, uint32_t index);
1548 static VkCommandBufferSubmitInfo CommandBuffer(const VkSubmitInfo &info, uint32_t index);
1549 static VkSemaphoreSubmitInfo SignalSemaphore(const VkSubmitInfo &info, uint32_t index);
1550
1551 std::vector<VkSemaphoreSubmitInfo> waits;
1552 std::vector<VkCommandBufferSubmitInfo> cbs;
1553 std::vector<VkSemaphoreSubmitInfo> signals;
1554 VkSubmitInfo2 info2;
1555 };
1556
1557 SubmitInfoConverter(uint32_t count, const VkSubmitInfo *infos);
1558
1559 std::vector<BatchStore> info_store;
1560 std::vector<VkSubmitInfo2> info2s;
1561};
1562
John Zulauf9cb530d2019-09-30 14:14:10 -06001563class SyncValidator : public ValidationStateTracker, public SyncStageAccess {
1564 public:
John Zulauf9cb530d2019-09-30 14:14:10 -06001565 using StateTracker = ValidationStateTracker;
John Zulaufea943c52022-02-22 11:05:17 -07001566 SyncValidator() { container_type = LayerObjectTypeSyncValidation; }
John Zulauf888bb9d2022-05-20 16:13:00 -06001567 virtual ~SyncValidator() { ResetCommandBufferCallbacks(); };
John Zulauf9cb530d2019-09-30 14:14:10 -06001568
John Zulauf697c0e12022-04-19 16:31:12 -06001569 // Global tag range for submitted command buffers resource usage logs
1570 mutable std::atomic<ResourceUsageTag> tag_limit_{0}; // This is reserved in Validation phase, thus mutable and atomic
1571 ResourceUsageRange ReserveGlobalTagRange(size_t tag_count) const; // Note that the tag_limit_ is mutable this has side effects
1572 // This is a snapshot value only
John Zulauf697c0e12022-04-19 16:31:12 -06001573 AccessLogger global_access_log_;
1574
John Zulaufea943c52022-02-22 11:05:17 -07001575 layer_data::unordered_map<VkCommandBuffer, std::shared_ptr<CommandBufferAccessContext>> cb_access_state;
John Zulaufe7f6a5e2021-01-16 14:31:18 -07001576
John Zulauf1d5f9c12022-05-13 14:51:08 -06001577 using QueueSyncStatesMap = layer_data::unordered_map<VkQueue, std::shared_ptr<QueueSyncState>>;
John Zulaufbbda4572022-04-19 16:20:45 -06001578 layer_data::unordered_map<VkQueue, std::shared_ptr<QueueSyncState>> queue_sync_states_;
John Zulaufcb7e1672022-05-04 13:46:08 -06001579 SignaledSemaphores signaled_semaphores_;
John Zulaufe7f6a5e2021-01-16 14:31:18 -07001580
John Zulauf3da08bb2022-08-01 17:56:56 -06001581 using SignaledFences = layer_data::unordered_map<VkFence, FenceSyncState>;
1582 using SignaledFence = SignaledFences::value_type;
1583 SignaledFences waitable_fences_;
1584
1585 void ApplyTaggedWait(QueueId queue_id, ResourceUsageTag tag);
1586
1587 void UpdateFenceWaitInfo(VkFence fence, QueueId queue_id, ResourceUsageTag tag);
1588 void WaitForFence(VkFence fence);
1589
John Zulaufbbda4572022-04-19 16:20:45 -06001590 const QueueSyncState *GetQueueSyncState(VkQueue queue) const;
1591 QueueSyncState *GetQueueSyncState(VkQueue queue);
1592 std::shared_ptr<const QueueSyncState> GetQueueSyncStateShared(VkQueue queue) const;
1593 std::shared_ptr<QueueSyncState> GetQueueSyncStateShared(VkQueue queue);
1594
John Zulaufe0757ba2022-06-10 16:51:45 -06001595 QueueBatchContext::BatchSet GetQueueBatchSnapshot();
1596
John Zulauf1d5f9c12022-05-13 14:51:08 -06001597 template <typename Predicate>
1598 QueueBatchContext::ConstBatchSet GetQueueLastBatchSnapshot(Predicate &&pred) const;
1599 QueueBatchContext::ConstBatchSet GetQueueLastBatchSnapshot() const {
1600 return GetQueueLastBatchSnapshot(QueueBatchContext::TruePred);
1601 };
1602
1603 template <typename Predicate>
1604 QueueBatchContext::BatchSet GetQueueLastBatchSnapshot(Predicate &&pred);
1605 QueueBatchContext::BatchSet GetQueueLastBatchSnapshot() { return GetQueueLastBatchSnapshot(QueueBatchContext::TruePred); };
John Zulaufbbda4572022-04-19 16:20:45 -06001606
1607 std::shared_ptr<CommandBufferAccessContext> AccessContextFactory(VkCommandBuffer command_buffer);
John Zulaufea943c52022-02-22 11:05:17 -07001608 CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer);
1609 CommandBufferAccessContext *GetAccessContextNoInsert(VkCommandBuffer command_buffer);
1610 const CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) const;
1611 std::shared_ptr<CommandBufferAccessContext> GetAccessContextShared(VkCommandBuffer command_buffer);
1612 std::shared_ptr<const CommandBufferAccessContext> GetAccessContextShared(VkCommandBuffer command_buffer) const;
John Zulauf9cb530d2019-09-30 14:14:10 -06001613
John Zulaufd1f85d42020-04-15 12:23:15 -06001614 void ResetCommandBufferCallback(VkCommandBuffer command_buffer);
1615 void FreeCommandBufferCallback(VkCommandBuffer command_buffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001616 void RecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
sjfricke0bea06e2022-06-05 09:22:26 +09001617 const VkSubpassBeginInfo *pSubpassBeginInfo, CMD_TYPE cmd_type);
John Zulauf64ffe552021-02-06 10:25:07 -07001618 void RecordCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
sfricke-samsung85584a72021-09-30 21:43:38 -07001619 const VkSubpassEndInfo *pSubpassEndInfo, CMD_TYPE command);
sjfricke0bea06e2022-06-05 09:22:26 +09001620 void RecordCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo, CMD_TYPE cmd_type);
John Zulauf33fc1d52020-07-17 11:01:10 -06001621 bool SupressedBoundDescriptorWAW(const HazardResult &hazard) const;
John Zulauf9cb530d2019-09-30 14:14:10 -06001622
Jeremy Gebben36a3b832022-03-23 10:54:18 -06001623 void CreateDevice(const VkDeviceCreateInfo *pCreateInfo) override;
John Zulauf9cb530d2019-09-30 14:14:10 -06001624
John Zulauf355e49b2020-04-24 15:11:15 -06001625 bool ValidateBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
sjfricke0bea06e2022-06-05 09:22:26 +09001626 const VkSubpassBeginInfo *pSubpassBeginInfo, CMD_TYPE cmd_type) const;
John Zulauf355e49b2020-04-24 15:11:15 -06001627
1628 bool PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001629 VkSubpassContents contents) const override;
John Zulauf355e49b2020-04-24 15:11:15 -06001630
1631 bool PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08001632 const VkSubpassBeginInfo *pSubpassBeginInfo) const override;
John Zulauf355e49b2020-04-24 15:11:15 -06001633
1634 bool PreCallValidateCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08001635 const VkSubpassBeginInfo *pSubpassBeginInfo) const override;
John Zulauf355e49b2020-04-24 15:11:15 -06001636
John Zulauf9cb530d2019-09-30 14:14:10 -06001637 bool PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001638 const VkBufferCopy *pRegions) const override;
John Zulauf9cb530d2019-09-30 14:14:10 -06001639
1640 void PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001641 const VkBufferCopy *pRegions) override;
John Zulauf9cb530d2019-09-30 14:14:10 -06001642
John Zulauf4a6105a2020-11-17 15:11:05 -07001643 void PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) override;
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001644 bool PreCallValidateCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR *pCopyBufferInfos) const override;
Tony-LunarGef035472021-11-02 10:23:33 -06001645 bool PreCallValidateCmdCopyBuffer2(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 *pCopyBufferInfos) const override;
1646 bool ValidateCmdCopyBuffer2(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 *pCopyBufferInfos, CMD_TYPE cmd_type) const;
Jeff Leger178b1e52020-10-05 12:22:23 -04001647
Tony-LunarGef035472021-11-02 10:23:33 -06001648 void RecordCmdCopyBuffer2(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR *pCopyBufferInfos, CMD_TYPE cmd_type);
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001649 void PreCallRecordCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR *pCopyBufferInfos) override;
Tony-LunarGef035472021-11-02 10:23:33 -06001650 void PreCallRecordCmdCopyBuffer2(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 *pCopyBufferInfos) override;
Jeff Leger178b1e52020-10-05 12:22:23 -04001651
John Zulauf5c5e88d2019-12-26 11:22:02 -07001652 bool PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
1653 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001654 const VkImageCopy *pRegions) const override;
John Zulauf5c5e88d2019-12-26 11:22:02 -07001655
1656 void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001657 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions) override;
John Zulauf5c5e88d2019-12-26 11:22:02 -07001658
Tony-LunarGb61514a2021-11-02 12:36:51 -06001659 bool ValidateCmdCopyImage2(VkCommandBuffer commandBuffer, const VkCopyImageInfo2 *pCopyImageInfo, CMD_TYPE cmd_type) const;
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001660 bool PreCallValidateCmdCopyImage2KHR(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR *pCopyImageInfo) const override;
Tony-LunarGb61514a2021-11-02 12:36:51 -06001661 bool PreCallValidateCmdCopyImage2(VkCommandBuffer commandBuffer, const VkCopyImageInfo2 *pCopyImageInfo) const override;
Jeff Leger178b1e52020-10-05 12:22:23 -04001662
Tony-LunarGb61514a2021-11-02 12:36:51 -06001663 void RecordCmdCopyImage2(VkCommandBuffer commandBuffer, const VkCopyImageInfo2 *pCopyImageInfo, CMD_TYPE cmd_type);
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001664 void PreCallRecordCmdCopyImage2KHR(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR *pCopyImageInfo) override;
Tony-LunarGb61514a2021-11-02 12:36:51 -06001665 void PreCallRecordCmdCopyImage2(VkCommandBuffer commandBuffer, const VkCopyImageInfo2 *pCopyImageInfo) override;
Jeff Leger178b1e52020-10-05 12:22:23 -04001666
John Zulauf9cb530d2019-09-30 14:14:10 -06001667 bool PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
1668 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
1669 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
1670 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1671 uint32_t imageMemoryBarrierCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001672 const VkImageMemoryBarrier *pImageMemoryBarriers) const override;
John Zulauf9cb530d2019-09-30 14:14:10 -06001673
1674 void PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
1675 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
1676 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
1677 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001678 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -06001679
Jeremy Gebbendf3fcc32021-02-15 08:53:17 -07001680 bool PreCallValidateCmdPipelineBarrier2KHR(VkCommandBuffer commandBuffer,
1681 const VkDependencyInfoKHR *pDependencyInfo) const override;
Tony-LunarG3f6eceb2021-11-18 14:34:49 -07001682 bool PreCallValidateCmdPipelineBarrier2(VkCommandBuffer commandBuffer, const VkDependencyInfo *pDependencyInfo) const override;
Jeremy Gebbendf3fcc32021-02-15 08:53:17 -07001683 void PreCallRecordCmdPipelineBarrier2KHR(VkCommandBuffer commandBuffer, const VkDependencyInfoKHR *pDependencyInfo) override;
Tony-LunarG3f6eceb2021-11-18 14:34:49 -07001684 void PreCallRecordCmdPipelineBarrier2(VkCommandBuffer commandBuffer, const VkDependencyInfo *pDependencyInfo) override;
Jeremy Gebbendf3fcc32021-02-15 08:53:17 -07001685
John Zulauf3d84f1b2020-03-09 13:33:25 -06001686 void PostCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001687 VkResult result) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -06001688
1689 void PostCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001690 VkSubpassContents contents) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -06001691 void PostCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001692 const VkSubpassBeginInfo *pSubpassBeginInfo) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -06001693 void PostCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001694 const VkSubpassBeginInfo *pSubpassBeginInfo) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -06001695
Mike Schuchardt2df08912020-12-15 16:28:09 -08001696 bool ValidateCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
sjfricke0bea06e2022-06-05 09:22:26 +09001697 const VkSubpassEndInfo *pSubpassEndInfo, CMD_TYPE cmd_type) const;
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001698 bool PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const override;
Mike Schuchardt2df08912020-12-15 16:28:09 -08001699 bool PreCallValidateCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
1700 const VkSubpassEndInfo *pSubpassEndInfo) const override;
1701 bool PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
1702 const VkSubpassEndInfo *pSubpassEndInfo) const override;
John Zulauf355e49b2020-04-24 15:11:15 -06001703
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001704 void PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -06001705 void PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001706 const VkSubpassEndInfo *pSubpassEndInfo) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -06001707 void PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001708 const VkSubpassEndInfo *pSubpassEndInfo) override;
John Zulauf3d84f1b2020-03-09 13:33:25 -06001709
sjfricke0bea06e2022-06-05 09:22:26 +09001710 bool ValidateCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo, CMD_TYPE cmd_type) const;
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001711 bool PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) const override;
Mike Schuchardt2df08912020-12-15 16:28:09 -08001712 bool PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) const override;
1713 bool PreCallValidateCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) const override;
John Zulauf355e49b2020-04-24 15:11:15 -06001714
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001715 void PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) override;
1716 void PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) override;
1717 void PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) override;
Jeff Leger178b1e52020-10-05 12:22:23 -04001718
sfricke-samsung71f04e32022-03-16 01:21:21 -05001719 template <typename RegionType>
Jeff Leger178b1e52020-10-05 12:22:23 -04001720 bool ValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
sfricke-samsung71f04e32022-03-16 01:21:21 -05001721 VkImageLayout dstImageLayout, uint32_t regionCount, const RegionType *pRegions,
Tony Barbour845d29b2021-11-09 11:43:14 -07001722 CMD_TYPE cmd_type) const;
locke-lunarga19c71d2020-03-02 18:17:04 -07001723 bool PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
1724 VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001725 const VkBufferImageCopy *pRegions) const override;
Jeff Leger178b1e52020-10-05 12:22:23 -04001726 bool PreCallValidateCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001727 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) const override;
Tony Barbour845d29b2021-11-09 11:43:14 -07001728 bool PreCallValidateCmdCopyBufferToImage2(VkCommandBuffer commandBuffer,
1729 const VkCopyBufferToImageInfo2 *pCopyBufferToImageInfo) const override;
locke-lunarga19c71d2020-03-02 18:17:04 -07001730
sfricke-samsung71f04e32022-03-16 01:21:21 -05001731 template <typename RegionType>
Jeff Leger178b1e52020-10-05 12:22:23 -04001732 void RecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
sfricke-samsung71f04e32022-03-16 01:21:21 -05001733 VkImageLayout dstImageLayout, uint32_t regionCount, const RegionType *pRegions,
Tony Barbour845d29b2021-11-09 11:43:14 -07001734 CMD_TYPE cmd_type);
locke-lunarga19c71d2020-03-02 18:17:04 -07001735 void PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001736 VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions) override;
Jeff Leger178b1e52020-10-05 12:22:23 -04001737 void PreCallRecordCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001738 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) override;
Tony Barbour845d29b2021-11-09 11:43:14 -07001739 void PreCallRecordCmdCopyBufferToImage2(VkCommandBuffer commandBuffer,
1740 const VkCopyBufferToImageInfo2 *pCopyBufferToImageInfo) override;
locke-lunarga19c71d2020-03-02 18:17:04 -07001741
sfricke-samsung71f04e32022-03-16 01:21:21 -05001742 template <typename RegionType>
Jeff Leger178b1e52020-10-05 12:22:23 -04001743 bool ValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
sfricke-samsung71f04e32022-03-16 01:21:21 -05001744 VkBuffer dstBuffer, uint32_t regionCount, const RegionType *pRegions,
Tony-LunarGaf3632a2021-11-10 15:51:57 -07001745 CMD_TYPE cmd_type) const;
locke-lunarga19c71d2020-03-02 18:17:04 -07001746 bool PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001747 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) const override;
Jeff Leger178b1e52020-10-05 12:22:23 -04001748 bool PreCallValidateCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001749 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) const override;
Tony-LunarGaf3632a2021-11-10 15:51:57 -07001750 bool PreCallValidateCmdCopyImageToBuffer2(VkCommandBuffer commandBuffer,
1751 const VkCopyImageToBufferInfo2 *pCopyImageToBufferInfo) const override;
locke-lunarga19c71d2020-03-02 18:17:04 -07001752
sfricke-samsung71f04e32022-03-16 01:21:21 -05001753 template <typename RegionType>
Jeff Leger178b1e52020-10-05 12:22:23 -04001754 void RecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
sfricke-samsung71f04e32022-03-16 01:21:21 -05001755 VkBuffer dstBuffer, uint32_t regionCount, const RegionType *pRegions, CMD_TYPE cmd_type);
locke-lunarga19c71d2020-03-02 18:17:04 -07001756 void PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001757 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) override;
Jeff Leger178b1e52020-10-05 12:22:23 -04001758 void PreCallRecordCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001759 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) override;
Tony-LunarGaf3632a2021-11-10 15:51:57 -07001760 void PreCallRecordCmdCopyImageToBuffer2(VkCommandBuffer commandBuffer,
1761 const VkCopyImageToBufferInfo2 *pCopyImageToBufferInfo) override;
Jeff Leger178b1e52020-10-05 12:22:23 -04001762
1763 template <typename RegionType>
1764 bool ValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
1765 VkImageLayout dstImageLayout, uint32_t regionCount, const RegionType *pRegions, VkFilter filter,
sjfricke0bea06e2022-06-05 09:22:26 +09001766 CMD_TYPE cmd_type) const;
locke-lunarga19c71d2020-03-02 18:17:04 -07001767
1768 bool PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
1769 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001770 const VkImageBlit *pRegions, VkFilter filter) const override;
1771 bool PreCallValidateCmdBlitImage2KHR(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR *pBlitImageInfo) const override;
Tony-LunarG542ae912021-11-04 16:06:44 -06001772 bool PreCallValidateCmdBlitImage2(VkCommandBuffer commandBuffer, const VkBlitImageInfo2 *pBlitImageInfo) const override;
locke-lunarga19c71d2020-03-02 18:17:04 -07001773
Jeff Leger178b1e52020-10-05 12:22:23 -04001774 template <typename RegionType>
1775 void RecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
1776 VkImageLayout dstImageLayout, uint32_t regionCount, const RegionType *pRegions, VkFilter filter,
1777 ResourceUsageTag tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07001778 void PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
1779 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001780 VkFilter filter) override;
1781 void PreCallRecordCmdBlitImage2KHR(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR *pBlitImageInfo) override;
Tony-LunarG542ae912021-11-04 16:06:44 -06001782 void PreCallRecordCmdBlitImage2(VkCommandBuffer commandBuffer, const VkBlitImageInfo2 *pBlitImageInfo) override;
locke-lunarg36ba2592020-04-03 09:42:04 -06001783
John Zulauffaea0ee2021-01-14 14:01:32 -07001784 bool ValidateIndirectBuffer(const CommandBufferAccessContext &cb_context, const AccessContext &context,
1785 VkCommandBuffer commandBuffer, const VkDeviceSize struct_size, const VkBuffer buffer,
1786 const VkDeviceSize offset, const uint32_t drawCount, const uint32_t stride,
sjfricke0bea06e2022-06-05 09:22:26 +09001787 CMD_TYPE cmd_type) const;
John Zulauf14940722021-04-12 15:19:02 -06001788 void RecordIndirectBuffer(AccessContext &context, ResourceUsageTag tag, const VkDeviceSize struct_size, const VkBuffer buffer,
1789 const VkDeviceSize offset, const uint32_t drawCount, uint32_t stride);
locke-lunarg36ba2592020-04-03 09:42:04 -06001790
John Zulauffaea0ee2021-01-14 14:01:32 -07001791 bool ValidateCountBuffer(const CommandBufferAccessContext &cb_context, const AccessContext &context,
sjfricke0bea06e2022-06-05 09:22:26 +09001792 VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, CMD_TYPE cmd_type) const;
John Zulauf14940722021-04-12 15:19:02 -06001793 void RecordCountBuffer(AccessContext &context, ResourceUsageTag tag, VkBuffer buffer, VkDeviceSize offset);
locke-lunarg93d68af2020-05-12 17:18:03 -06001794
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001795 bool PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) const override;
1796 void PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001797
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001798 bool PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const override;
1799 void PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001800
1801 bool PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001802 uint32_t firstInstance) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001803 void PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001804 uint32_t firstInstance) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001805
1806 bool PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001807 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001808 void PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001809 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001810
1811 bool PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001812 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001813 void PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001814 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001815
1816 bool PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001817 uint32_t drawCount, uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001818 void PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001819 uint32_t drawCount, uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001820
locke-lunargff255f92020-05-13 18:53:52 -06001821 bool ValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer,
1822 VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride,
sjfricke0bea06e2022-06-05 09:22:26 +09001823 CMD_TYPE cmd_type) const;
locke-lunarge1a67022020-04-29 00:15:36 -06001824 bool PreCallValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1825 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001826 uint32_t stride) const override;
sfricke-samsung85584a72021-09-30 21:43:38 -07001827 void RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer,
1828 VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, CMD_TYPE cmd_type);
locke-lunarge1a67022020-04-29 00:15:36 -06001829 void PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1830 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001831 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001832 bool PreCallValidateCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1833 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001834 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001835 void PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1836 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001837 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001838 bool PreCallValidateCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1839 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001840 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001841 void PreCallRecordCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1842 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001843 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001844
locke-lunargff255f92020-05-13 18:53:52 -06001845 bool ValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1846 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
sjfricke0bea06e2022-06-05 09:22:26 +09001847 uint32_t stride, CMD_TYPE cmd_type) const;
locke-lunarge1a67022020-04-29 00:15:36 -06001848 bool PreCallValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1849 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001850 uint32_t stride) const override;
sfricke-samsung85584a72021-09-30 21:43:38 -07001851 void RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1852 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
1853 uint32_t stride, CMD_TYPE cmd_type);
locke-lunarge1a67022020-04-29 00:15:36 -06001854 void PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1855 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001856 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001857 bool PreCallValidateCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1858 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001859 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001860 void PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1861 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001862 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001863 bool PreCallValidateCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1864 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001865 uint32_t stride) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001866 void PreCallRecordCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1867 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001868 uint32_t stride) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001869
1870 bool PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
1871 const VkClearColorValue *pColor, uint32_t rangeCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001872 const VkImageSubresourceRange *pRanges) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001873 void PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
1874 const VkClearColorValue *pColor, uint32_t rangeCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001875 const VkImageSubresourceRange *pRanges) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001876
1877 bool PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
1878 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001879 const VkImageSubresourceRange *pRanges) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001880 void PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
1881 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001882 const VkImageSubresourceRange *pRanges) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001883
1884 bool PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
1885 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001886 VkDeviceSize stride, VkQueryResultFlags flags) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001887 void PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
1888 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001889 VkQueryResultFlags flags) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001890
1891 bool PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001892 uint32_t data) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001893 void PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001894 uint32_t data) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001895
1896 bool PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
1897 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001898 const VkImageResolve *pRegions) const override;
Jeff Leger178b1e52020-10-05 12:22:23 -04001899
locke-lunarge1a67022020-04-29 00:15:36 -06001900 void PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
1901 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001902 const VkImageResolve *pRegions) override;
locke-lunarge1a67022020-04-29 00:15:36 -06001903
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001904 bool PreCallValidateCmdResolveImage2KHR(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR *pResolveImageInfo) const override;
Tony-LunarG562fc102021-11-12 13:58:35 -07001905 bool PreCallValidateCmdResolveImage2(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo) const override;
1906 bool ValidateCmdResolveImage2(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo, CMD_TYPE cmd_type) const;
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001907 void PreCallRecordCmdResolveImage2KHR(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR *pResolveImageInfo) override;
Tony-LunarG562fc102021-11-12 13:58:35 -07001908 void PreCallRecordCmdResolveImage2(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo) override;
1909 void RecordCmdResolveImage2(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo, CMD_TYPE cmd_type);
Jeff Leger178b1e52020-10-05 12:22:23 -04001910
locke-lunarge1a67022020-04-29 00:15:36 -06001911 bool PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001912 VkDeviceSize dataSize, const void *pData) const override;
locke-lunarge1a67022020-04-29 00:15:36 -06001913 void PreCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001914 VkDeviceSize dataSize, const void *pData) override;
locke-lunargff255f92020-05-13 18:53:52 -06001915
1916 bool PreCallValidateCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001917 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) const override;
locke-lunargff255f92020-05-13 18:53:52 -06001918 void PreCallRecordCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
Jeremy Gebbenf8924692020-10-28 16:27:14 -06001919 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) override;
John Zulauf49beb112020-11-04 16:06:31 -07001920
1921 bool PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const override;
1922 void PostCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) override;
1923
John Zulauf4edde622021-02-15 08:54:50 -07001924 bool PreCallValidateCmdSetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event,
1925 const VkDependencyInfoKHR *pDependencyInfo) const override;
Tony-LunarGc43525f2021-11-15 16:12:38 -07001926 bool PreCallValidateCmdSetEvent2(VkCommandBuffer commandBuffer, VkEvent event,
1927 const VkDependencyInfo *pDependencyInfo) const override;
John Zulauf4edde622021-02-15 08:54:50 -07001928 void PostCallRecordCmdSetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event,
1929 const VkDependencyInfoKHR *pDependencyInfo) override;
Tony-LunarGc43525f2021-11-15 16:12:38 -07001930 void PostCallRecordCmdSetEvent2(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo *pDependencyInfo) override;
John Zulauf4edde622021-02-15 08:54:50 -07001931
John Zulauf49beb112020-11-04 16:06:31 -07001932 bool PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const override;
1933 void PostCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) override;
1934
John Zulauf4edde622021-02-15 08:54:50 -07001935 bool PreCallValidateCmdResetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event,
1936 VkPipelineStageFlags2KHR stageMask) const override;
Tony-LunarGa2662db2021-11-16 07:26:24 -07001937 bool PreCallValidateCmdResetEvent2(VkCommandBuffer commandBuffer, VkEvent event,
1938 VkPipelineStageFlags2 stageMask) const override;
John Zulauf4edde622021-02-15 08:54:50 -07001939 void PostCallRecordCmdResetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask) override;
Tony-LunarGa2662db2021-11-16 07:26:24 -07001940 void PostCallRecordCmdResetEvent2(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask) override;
John Zulauf4edde622021-02-15 08:54:50 -07001941
John Zulauf49beb112020-11-04 16:06:31 -07001942 bool PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
1943 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
1944 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
1945 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1946 uint32_t imageMemoryBarrierCount,
1947 const VkImageMemoryBarrier *pImageMemoryBarriers) const override;
1948 void PostCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
1949 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
1950 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
1951 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1952 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) override;
John Zulauf4edde622021-02-15 08:54:50 -07001953 bool PreCallValidateCmdWaitEvents2KHR(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
1954 const VkDependencyInfoKHR *pDependencyInfos) const override;
1955 void PostCallRecordCmdWaitEvents2KHR(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
1956 const VkDependencyInfoKHR *pDependencyInfos) override;
Tony-LunarG1364cf52021-11-17 16:10:11 -07001957 bool PreCallValidateCmdWaitEvents2(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
1958 const VkDependencyInfo *pDependencyInfos) const override;
1959 void PostCallRecordCmdWaitEvents2(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
1960 const VkDependencyInfo *pDependencyInfos) override;
Jeremy Gebbendf3fcc32021-02-15 08:53:17 -07001961 bool PreCallValidateCmdWriteBufferMarker2AMD(VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkBuffer dstBuffer,
1962 VkDeviceSize dstOffset, uint32_t marker) const override;
1963 void PreCallRecordCmdWriteBufferMarker2AMD(VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkBuffer dstBuffer,
1964 VkDeviceSize dstOffset, uint32_t marker) override;
John Zulaufae842002021-04-15 18:20:55 -06001965 bool PreCallValidateCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount,
1966 const VkCommandBuffer *pCommandBuffers) const override;
1967 void PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount,
1968 const VkCommandBuffer *pCommandBuffers) override;
John Zulauf1d5f9c12022-05-13 14:51:08 -06001969 void PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) override;
1970 void PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) override;
John Zulaufa8700a52022-08-18 16:22:08 -06001971 bool ValidateQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 *pSubmits, VkFence fence,
1972 const char *func_name) const;
John Zulaufbbda4572022-04-19 16:20:45 -06001973 bool PreCallValidateQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1974 VkFence fence) const override;
John Zulaufa8700a52022-08-18 16:22:08 -06001975 void RecordQueueSubmit(VkQueue queue, VkFence fence, VkResult result);
John Zulaufbbda4572022-04-19 16:20:45 -06001976 void PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence,
1977 VkResult result) override;
1978 bool PreCallValidateQueueSubmit2KHR(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR *pSubmits,
1979 VkFence fence) const override;
1980 void PostCallRecordQueueSubmit2KHR(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR *pSubmits, VkFence fence,
1981 VkResult result) override;
John Zulaufa8700a52022-08-18 16:22:08 -06001982 bool PreCallValidateQueueSubmit2(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR *pSubmits,
1983 VkFence fence) const override;
1984 void PostCallRecordQueueSubmit2(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR *pSubmits, VkFence fence,
1985 VkResult result) override;
John Zulauf3da08bb2022-08-01 17:56:56 -06001986 void PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) override;
1987 void PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll,
1988 uint64_t timeout, VkResult result) override;
John Zulauf9cb530d2019-09-30 14:14:10 -06001989};