blob: 92f9659b3df40f45554f16184b19e28ead610f1a [file] [log] [blame]
John Zulauf3d84f1b2020-03-09 13:33:25 -06001/*
2 * Copyright (c) 2019-2020 Valve Corporation
3 * Copyright (c) 2019-2020 LunarG, Inc.
John Zulauf9cb530d2019-09-30 14:14:10 -06004 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 * Author: John Zulauf <jzulauf@lunarg.com>
18 */
19
20#pragma once
21
22#include <map>
23#include <memory>
24#include <unordered_map>
25#include <vulkan/vulkan.h>
26
27#include "synchronization_validation_types.h"
28#include "state_tracker.h"
29
John Zulauf2f952d22020-02-10 11:34:51 -070030enum SyncHazard {
31 NONE = 0,
32 READ_AFTER_WRITE,
33 WRITE_AFTER_READ,
34 WRITE_AFTER_WRITE,
35 READ_RACING_WRITE,
36 WRITE_RACING_WRITE,
37 WRITE_RACING_READ,
38};
John Zulauf9cb530d2019-09-30 14:14:10 -060039
40// Useful Utilites for manipulating StageAccess parameters, suitable as base class to save typing
41struct SyncStageAccess {
42 static SyncStageAccessFlagBits FlagBit(SyncStageAccessIndex stage_access) {
43 return syncStageAccessInfoByStageAccessIndex[stage_access].stage_access_bit;
44 }
45
46 static bool IsRead(SyncStageAccessFlagBits stage_access_bit) { return 0 != (stage_access_bit & syncStageAccessReadMask); }
47 static bool IsRead(SyncStageAccessIndex stage_access_index) { return IsRead(FlagBit(stage_access_index)); }
48
49 static bool IsWrite(SyncStageAccessFlagBits stage_access_bit) { return 0 != (stage_access_bit & syncStageAccessWriteMask); }
50 static bool IsWrite(SyncStageAccessIndex stage_access_index) { return IsWrite(FlagBit(stage_access_index)); }
51 static VkPipelineStageFlagBits PipelineStageBit(SyncStageAccessIndex stage_access_index) {
52 return syncStageAccessInfoByStageAccessIndex[stage_access_index].stage_mask;
53 }
54 static SyncStageAccessFlags AccessScopeByStage(VkPipelineStageFlags stages);
55 static SyncStageAccessFlags AccessScopeByAccess(VkAccessFlags access);
56 static SyncStageAccessFlags AccessScope(VkPipelineStageFlags stages, VkAccessFlags access);
57 static SyncStageAccessFlags AccessScope(SyncStageAccessFlags stage_scope, VkAccessFlags accesses) {
58 return stage_scope & AccessScopeByAccess(accesses);
59 }
60};
61
John Zulauf5f13a792020-03-10 07:31:21 -060062struct ResourceUsageTag {
63 uint64_t index;
64 ResourceUsageTag &operator++() {
65 index++;
66 return *this;
67 }
68 bool IsBefore(const ResourceUsageTag &rhs) const { return index < rhs.index; }
John Zulaufe5da6e52020-03-18 15:32:18 -060069 bool operator==(const ResourceUsageTag &rhs) const { return (index == rhs.index); }
70 bool operator!=(const ResourceUsageTag &rhs) const { return !(*this == rhs); }
John Zulauf5f13a792020-03-10 07:31:21 -060071};
72
John Zulauf9cb530d2019-09-30 14:14:10 -060073struct HazardResult {
74 SyncHazard hazard = NONE;
75 ResourceUsageTag tag = ResourceUsageTag();
76 void Set(SyncHazard hazard_, const ResourceUsageTag &tag_) {
77 hazard = hazard_;
78 tag = tag_;
79 }
80};
81
John Zulauf3d84f1b2020-03-09 13:33:25 -060082struct SyncBarrier {
83 VkPipelineStageFlags src_exec_scope;
84 SyncStageAccessFlags src_access_scope;
85 VkPipelineStageFlags dst_exec_scope;
86 SyncStageAccessFlags dst_access_scope;
87 SyncBarrier() = default;
88 SyncBarrier &operator=(const SyncBarrier &) = default;
89 SyncBarrier(VkQueueFlags gueue_flags, const VkSubpassDependency2 &sub_pass_barrier);
90};
91using SyncBarrierStack = std::vector<const SyncBarrier *>;
92
John Zulauf9cb530d2019-09-30 14:14:10 -060093class ResourceAccessState : public SyncStageAccess {
94 protected:
95 // Mutliple read operations can be simlutaneously (and independently) synchronized,
96 // given the only the second execution scope creates a dependency chain, we have to track each,
97 // but only up to one per pipeline stage (as another read from the *same* stage become more recent,
98 // and applicable one for hazard detection
99 struct ReadState {
100 VkPipelineStageFlagBits stage; // The stage of this read
101 VkPipelineStageFlags barriers; // all applicable barriered stages
102 ResourceUsageTag tag;
John Zulaufe5da6e52020-03-18 15:32:18 -0600103 bool operator==(const ReadState &rhs) const {
104 bool same = (stage == rhs.stage) && (barriers == rhs.barriers) && (tag == rhs.tag);
105 return same;
106 }
107 bool operator!=(const ReadState &rhs) const { return !(*this == rhs); }
John Zulauf9cb530d2019-09-30 14:14:10 -0600108 };
109
John Zulauf3d84f1b2020-03-09 13:33:25 -0600110 static ResourceAccessState ApplyBarrierStack(const ResourceAccessState &that, const SyncBarrierStack &barrier_stack);
111
John Zulauf9cb530d2019-09-30 14:14:10 -0600112 public:
John Zulauf3d84f1b2020-03-09 13:33:25 -0600113 HazardResult DetectHazard(SyncStageAccessIndex usage_index, SyncBarrierStack *barrier_stack) const;
John Zulauf9cb530d2019-09-30 14:14:10 -0600114 HazardResult DetectHazard(SyncStageAccessIndex usage_index) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600115
116 HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags src_exec_scope,
117 SyncStageAccessFlags src_access_scope, SyncBarrierStack *barrier_stack) const;
John Zulauf0cb5be22020-01-23 12:18:22 -0700118 HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags src_stage_mask,
119 SyncStageAccessFlags source_scope) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600120
121 HazardResult DetectAsyncHazard(SyncStageAccessIndex usage_index) const;
122
John Zulauf9cb530d2019-09-30 14:14:10 -0600123 void Update(SyncStageAccessIndex usage_index, const ResourceUsageTag &tag);
John Zulauf5f13a792020-03-10 07:31:21 -0600124 void Resolve(const ResourceAccessState &other);
125 void ApplyBarrier(const SyncBarrier &barrier);
John Zulauf9cb530d2019-09-30 14:14:10 -0600126 void ApplyExecutionBarrier(VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask);
127 void ApplyMemoryAccessBarrier(VkPipelineStageFlags src_stage_mask, SyncStageAccessFlags src_scope,
128 VkPipelineStageFlags dst_stage_mask, SyncStageAccessFlags dst_scope);
129
130 ResourceAccessState()
131 : write_barriers(~SyncStageAccessFlags(0)), write_dependency_chain(0), last_read_count(0), last_read_stages(0) {}
132
John Zulauf3d84f1b2020-03-09 13:33:25 -0600133 bool HasWriteOp() const { return last_write != 0; }
John Zulaufe5da6e52020-03-18 15:32:18 -0600134 bool operator==(const ResourceAccessState &rhs) const {
135 bool same = (write_barriers == rhs.write_barriers) && (write_dependency_chain == rhs.write_dependency_chain) &&
136 (last_read_count == rhs.last_read_count) && (last_read_stages == rhs.last_read_stages) &&
137 (write_tag == rhs.write_tag);
138 for (uint32_t i = 0; same && i < last_read_count; i++) {
139 same |= last_reads[i] == rhs.last_reads[i];
140 }
141 return same;
142 }
143 bool operator!=(const ResourceAccessState &rhs) const { return !(*this == rhs); }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600144
John Zulauf9cb530d2019-09-30 14:14:10 -0600145 private:
146 bool IsWriteHazard(SyncStageAccessFlagBits usage) const { return 0 != (usage & ~write_barriers); }
147 bool IsReadHazard(VkPipelineStageFlagBits stage, const ReadState &read_access) const {
148 return 0 != (stage & ~read_access.barriers);
149 }
John Zulauf0cb5be22020-01-23 12:18:22 -0700150 bool IsReadHazard(VkPipelineStageFlags stage_mask, const ReadState &read_access) const {
151 return stage_mask != (stage_mask & read_access.barriers);
152 }
John Zulauf9cb530d2019-09-30 14:14:10 -0600153 // With reads, each must be "safe" relative to it's prior write, so we need only
154 // save the most recent write operation (as anything *transitively* unsafe would arleady
155 // be included
156 SyncStageAccessFlags write_barriers; // union of applicable barrier masks since last write
157 VkPipelineStageFlags write_dependency_chain; // intiially zero, but accumulating the dstStages of barriers if they chain.
158 uint32_t last_read_count;
159 VkPipelineStageFlags last_read_stages;
160
161 ResourceUsageTag write_tag;
162
163 std::array<ReadState, 8 * sizeof(VkPipelineStageFlags)> last_reads;
164 SyncStageAccessFlagBits last_write; // only the most recent write
165};
166
John Zulauf16adfc92020-04-08 10:28:33 -0600167using ResourceAccessRangeMap = sparse_container::range_map<VkDeviceSize, ResourceAccessState>;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700168using ResourceAccessRange = typename ResourceAccessRangeMap::key_type;
John Zulauf9cb530d2019-09-30 14:14:10 -0600169
John Zulauf540266b2020-04-06 18:54:53 -0600170class AccessContext {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700171 public:
John Zulauf16adfc92020-04-08 10:28:33 -0600172 enum AddressType : int {
173 kLinearAddress = 0,
174 kIdealizedAddress = 1,
175 kMaxAddressType = 1
176 };
177
John Zulauf3d84f1b2020-03-09 13:33:25 -0600178 struct TrackBack {
179 SyncBarrier barrier;
John Zulauf540266b2020-04-06 18:54:53 -0600180 AccessContext *context;
181 TrackBack(AccessContext *context_, VkQueueFlags queue_flags_, const VkSubpassDependency2 &subpass_barrier_)
John Zulauf3d84f1b2020-03-09 13:33:25 -0600182 : barrier(queue_flags_, subpass_barrier_), context(context_) {}
183 TrackBack &operator=(const TrackBack &) = default;
184 TrackBack() = default;
185 };
John Zulauf5c5e88d2019-12-26 11:22:02 -0700186
John Zulauf16adfc92020-04-08 10:28:33 -0600187 HazardResult DetectHazard(const BUFFER_STATE & buffer, SyncStageAccessIndex usage_index,
188 const ResourceAccessRange & range) const;
John Zulauf540266b2020-04-06 18:54:53 -0600189 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
John Zulauf3d84f1b2020-03-09 13:33:25 -0600190 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset,
191 const VkExtent3D &extent) const;
John Zulauf16adfc92020-04-08 10:28:33 -0600192 HazardResult DetectImageBarrierHazard(const IMAGE_STATE & image, VkPipelineStageFlags src_exec_scope,
193 SyncStageAccessFlags src_stage_accesses,
194 const VkImageMemoryBarrier & barrier) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600195
John Zulaufe5da6e52020-03-18 15:32:18 -0600196 const TrackBack &GetDstExternalTrackBack() const { return dst_external_; }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600197 void Reset() {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600198 prev_.clear();
199 async_.clear();
John Zulaufe5da6e52020-03-18 15:32:18 -0600200 src_external_ = TrackBack();
John Zulauf16adfc92020-04-08 10:28:33 -0600201 for (auto &map : access_state_maps_) {
202 map.clear();
203 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600204 }
John Zulauf5f13a792020-03-10 07:31:21 -0600205 // TODO: See if returning the lower_bound would be useful from a performance POV -- look at the lower_bound overhead
206 // Would need to add a "hint" overload to parallel_iterator::invalidate_[AB] call, if so.
John Zulauf16adfc92020-04-08 10:28:33 -0600207 void ResolvePreviousAccess(AddressType type, const ResourceAccessRange &range,
John Zulauf5f13a792020-03-10 07:31:21 -0600208 ResourceAccessRangeMap *descent_map, const ResourceAccessState *infill_state) const;
John Zulauf540266b2020-04-06 18:54:53 -0600209 void ResolvePreviousAccess(const IMAGE_STATE &image_state, const VkImageSubresourceRange &subresource_range,
John Zulauf16adfc92020-04-08 10:28:33 -0600210 AddressType address_type, ResourceAccessRangeMap *descent_map, const ResourceAccessState *infill_state) const;
211 void ResolveTrackBack(AddressType type, const ResourceAccessRange &range,
212 const TrackBack &track_back, ResourceAccessRangeMap *descent_map,
John Zulaufe5da6e52020-03-18 15:32:18 -0600213 const ResourceAccessState *infill_state, bool recur_to_infill = true) const;
John Zulauf16adfc92020-04-08 10:28:33 -0600214 void UpdateAccessState(const BUFFER_STATE & buffer, SyncStageAccessIndex current_usage, const ResourceAccessRange & range, const ResourceUsageTag & tag);
John Zulauf540266b2020-04-06 18:54:53 -0600215 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
John Zulauf3d84f1b2020-03-09 13:33:25 -0600216 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset, const VkExtent3D &extent,
217 const ResourceUsageTag &tag);
218
John Zulauf540266b2020-04-06 18:54:53 -0600219 void ResolveChildContexts(const std::vector<AccessContext> &contexts);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600220
John Zulauf540266b2020-04-06 18:54:53 -0600221 template <typename Action>
222 void UpdateMemoryAccess(const BUFFER_STATE &buffer, const ResourceAccessRange &range, const Action action);
223 template <typename Action>
224 void UpdateMemoryAccess(const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range, const Action action);
225
226 template <typename Action>
227 void ApplyGlobalBarriers(const Action &barrier_action);
228
John Zulauf16adfc92020-04-08 10:28:33 -0600229 static AddressType ImageAddressType(const IMAGE_STATE &image);
230 static VkDeviceSize ResourceBaseAddress(const BINDABLE & bindable);
231
John Zulauf540266b2020-04-06 18:54:53 -0600232 AccessContext(uint32_t subpass, VkQueueFlags queue_flags, const std::vector<SubpassDependencyGraphNode> &dependencies,
233 const std::vector<AccessContext> &contexts, AccessContext *external_context);
234
235 AccessContext() { Reset(); }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600236
John Zulauf16adfc92020-04-08 10:28:33 -0600237 ResourceAccessRangeMap &GetAccessStateMap(AddressType type) { return access_state_maps_[type]; }
238 const ResourceAccessRangeMap &GetAccessStateMap(AddressType type) const { return access_state_maps_[type]; }
239 ResourceAccessRangeMap &GetLinearMap() { return GetAccessStateMap(AddressType::kLinearAddress); }
240 const ResourceAccessRangeMap &GetLinearMap() const { return GetAccessStateMap(AddressType::kLinearAddress); }
241 ResourceAccessRangeMap &GetIdealizedMap() { return GetAccessStateMap(AddressType::kIdealizedAddress); }
242 const ResourceAccessRangeMap &GetIdealizedMap() const { return GetAccessStateMap(AddressType::kIdealizedAddress); }
243
244
245
John Zulauf3d84f1b2020-03-09 13:33:25 -0600246 private:
John Zulauf16adfc92020-04-08 10:28:33 -0600247 using ParallelMapIterator = sparse_container::parallel_iterator<ResourceAccessRangeMap, const ResourceAccessRangeMap>;
248
249 HazardResult DetectHazard(AddressType type, SyncStageAccessIndex usage_index,
250 const ResourceAccessRange &range) const;
251 HazardResult DetectBarrierHazard(AddressType type, SyncStageAccessIndex current_usage,
252 VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope,
253 const ResourceAccessRange &range) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600254 template <typename Detector>
John Zulauf16adfc92020-04-08 10:28:33 -0600255 HazardResult DetectHazard(AddressType type, const Detector &detector, const ResourceAccessRange &range) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600256 template <typename Detector>
John Zulauf16adfc92020-04-08 10:28:33 -0600257 HazardResult DetectAsyncHazard(AddressType type, const Detector &detector,
John Zulauf3d84f1b2020-03-09 13:33:25 -0600258 const ResourceAccessRange &range) const;
John Zulauf5f13a792020-03-10 07:31:21 -0600259 template <typename Detector>
John Zulauf16adfc92020-04-08 10:28:33 -0600260 HazardResult DetectPreviousHazard(AddressType type, const Detector &detector,
John Zulauf5f13a792020-03-10 07:31:21 -0600261 const ResourceAccessRange &range) const;
John Zulauf16adfc92020-04-08 10:28:33 -0600262 void UpdateAccessState(AddressType type, SyncStageAccessIndex current_usage, const ResourceAccessRange &range,
263 const ResourceUsageTag &tag);
264 constexpr static int kAddressTypeCount = AddressType::kMaxAddressType + 1;
265 static const std::array<AddressType, kAddressTypeCount> kAddressTypes;
266 std::array<ResourceAccessRangeMap, kAddressTypeCount> access_state_maps_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600267 std::vector<TrackBack> prev_;
John Zulauf540266b2020-04-06 18:54:53 -0600268 std::vector<AccessContext *> async_;
John Zulaufe5da6e52020-03-18 15:32:18 -0600269 TrackBack src_external_;
270 TrackBack dst_external_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600271};
272
273struct RenderPassAccessContext {
274 uint32_t current_subpass_;
John Zulauf540266b2020-04-06 18:54:53 -0600275 std::vector<AccessContext> subpass_contexts_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600276 const std::vector<SubpassDependencyGraphNode> *dependencies_;
277 RenderPassAccessContext(VkQueueFlags queue_flags, const std::vector<SubpassDependencyGraphNode> *dependencies,
John Zulauf540266b2020-04-06 18:54:53 -0600278 AccessContext *external_context)
John Zulauf3d84f1b2020-03-09 13:33:25 -0600279 : current_subpass_(0), dependencies_(dependencies) {
280 if (dependencies_) {
281 subpass_contexts_.emplace_back(0, queue_flags, *dependencies_, subpass_contexts_, external_context);
282 }
283 }
John Zulauf540266b2020-04-06 18:54:53 -0600284 void NextSubpass(VkQueueFlags queue_flags, AccessContext *external_context) {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600285 current_subpass_++;
286 subpass_contexts_.emplace_back(current_subpass_, queue_flags, *dependencies_, subpass_contexts_, external_context);
287 assert(subpass_contexts_.size() == (current_subpass_ + 1));
288 }
John Zulauf540266b2020-04-06 18:54:53 -0600289 AccessContext &CurrentContext() { return subpass_contexts_[current_subpass_]; }
290 const AccessContext &CurrentContext() const { return subpass_contexts_[current_subpass_]; }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600291};
292
293class CommandBufferAccessContext {
294 public:
295 CommandBufferAccessContext()
296 : render_pass_contexts_(),
297 cb_tracker_context_(),
298 current_context_(&cb_tracker_context_),
299 current_renderpass_context_(),
300 cb_state_(),
301 queue_flags_() {}
302 CommandBufferAccessContext(std::shared_ptr<CMD_BUFFER_STATE> &cb_state, VkQueueFlags queue_flags)
303 : CommandBufferAccessContext() {
304 cb_state_ = cb_state;
305 queue_flags_ = queue_flags;
306 }
John Zulauf5c5e88d2019-12-26 11:22:02 -0700307
308 void Reset() {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600309 cb_tracker_context_.Reset();
310 render_pass_contexts_.clear();
311 current_context_ = &cb_tracker_context_;
312 current_renderpass_context_ = nullptr;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700313 }
314
John Zulauf540266b2020-04-06 18:54:53 -0600315 AccessContext *GetCurrentAccessContext() { return current_context_; }
316 const AccessContext *GetCurrentAccessContext() const { return current_context_; }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600317 void BeginRenderPass(const RENDER_PASS_STATE &render_pass);
318 void NextRenderPass(const RENDER_PASS_STATE &render_pass);
319 void EndRenderPass(const RENDER_PASS_STATE &render_pass);
320 CMD_BUFFER_STATE *GetCommandBufferState() { return cb_state_.get(); }
321 const CMD_BUFFER_STATE *GetCommandBufferState() const { return cb_state_.get(); }
322 VkQueueFlags GetQueueFlags() const { return queue_flags_; }
323
324 private:
325 std::vector<RenderPassAccessContext> render_pass_contexts_;
John Zulauf540266b2020-04-06 18:54:53 -0600326 AccessContext cb_tracker_context_;
327 AccessContext *current_context_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600328 RenderPassAccessContext *current_renderpass_context_;
329 std::shared_ptr<CMD_BUFFER_STATE> cb_state_;
330 VkQueueFlags queue_flags_;
John Zulauf9cb530d2019-09-30 14:14:10 -0600331};
332
333class SyncValidator : public ValidationStateTracker, public SyncStageAccess {
334 public:
335 SyncValidator() { container_type = LayerObjectTypeSyncValidation; }
336 using StateTracker = ValidationStateTracker;
337
338 using StateTracker::AccessorTraitsTypes;
John Zulauf5f13a792020-03-10 07:31:21 -0600339 ResourceUsageTag tag; // Find a better tagging scheme...
John Zulauf3d84f1b2020-03-09 13:33:25 -0600340 std::unordered_map<VkCommandBuffer, std::unique_ptr<CommandBufferAccessContext>> cb_access_state;
341 CommandBufferAccessContext *GetAccessContextImpl(VkCommandBuffer command_buffer, bool do_insert) {
John Zulauf9cb530d2019-09-30 14:14:10 -0600342 auto found_it = cb_access_state.find(command_buffer);
343 if (found_it == cb_access_state.end()) {
344 if (!do_insert) return nullptr;
345 // If we don't have one, make it.
John Zulauf3d84f1b2020-03-09 13:33:25 -0600346 auto cb_state = GetShared<CMD_BUFFER_STATE>(command_buffer);
347 assert(cb_state.get());
348 auto queue_flags = GetQueueFlags(*cb_state);
349 std::unique_ptr<CommandBufferAccessContext> context(new CommandBufferAccessContext(cb_state, queue_flags));
350 auto insert_pair = cb_access_state.insert(std::make_pair(command_buffer, std::move(context)));
John Zulauf9cb530d2019-09-30 14:14:10 -0600351 found_it = insert_pair.first;
352 }
353 return found_it->second.get();
354 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600355 CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) {
356 return GetAccessContextImpl(command_buffer, true); // true -> do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600357 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600358 CommandBufferAccessContext *GetAccessContextNoInsert(VkCommandBuffer command_buffer) {
359 return GetAccessContextImpl(command_buffer, false); // false -> don't do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600360 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600361
362 const CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) const {
John Zulauf9cb530d2019-09-30 14:14:10 -0600363 const auto found_it = cb_access_state.find(command_buffer);
364 if (found_it == cb_access_state.end()) {
365 return nullptr;
366 }
367 return found_it->second.get();
368 }
369
John Zulauf540266b2020-04-06 18:54:53 -0600370 void ApplyGlobalBarriers(AccessContext *context, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
John Zulauf9cb530d2019-09-30 14:14:10 -0600371 SyncStageAccessFlags src_stage_scope, SyncStageAccessFlags dst_stage_scope,
372 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers);
John Zulauf540266b2020-04-06 18:54:53 -0600373 void ApplyBufferBarriers(AccessContext *context, VkPipelineStageFlags src_stage_mask, SyncStageAccessFlags src_stage_scope,
374 VkPipelineStageFlags dst_stage_mask, SyncStageAccessFlags dst_stage_scope, uint32_t barrier_count,
375 const VkBufferMemoryBarrier *barriers);
376 void ApplyImageBarriers(AccessContext *context, VkPipelineStageFlags src_stage_mask, SyncStageAccessFlags src_stage_scope,
377 VkPipelineStageFlags dst_stage_mask, SyncStageAccessFlags dst_stage_scope, uint32_t barrier_count,
378 const VkImageMemoryBarrier *barriers);
John Zulauf9cb530d2019-09-30 14:14:10 -0600379
John Zulaufd1f85d42020-04-15 12:23:15 -0600380 void ResetCommandBufferCallback(VkCommandBuffer command_buffer);
381 void FreeCommandBufferCallback(VkCommandBuffer command_buffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600382 void RecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
383 const VkSubpassBeginInfo *pSubpassBeginInfo);
384 void RecordCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
385 const VkSubpassEndInfo *pSubpassEndInfo);
386 void RecordCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo);
John Zulauf9cb530d2019-09-30 14:14:10 -0600387
388 void PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
389 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result);
390
391 bool PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
392 const VkBufferCopy *pRegions) const;
393
394 void PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
395 const VkBufferCopy *pRegions);
396
John Zulauf5c5e88d2019-12-26 11:22:02 -0700397 bool PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
398 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
399 const VkImageCopy *pRegions) const;
400
401 void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
402 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions);
403
John Zulauf9cb530d2019-09-30 14:14:10 -0600404 bool PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
405 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
406 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
407 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
408 uint32_t imageMemoryBarrierCount,
409 const VkImageMemoryBarrier *pImageMemoryBarriers) const;
410
411 void PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
412 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
413 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
414 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
415 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600416
417 void PostCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo,
418 VkResult result);
419
420 void PostCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
421 VkSubpassContents contents);
422 void PostCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
423 const VkSubpassBeginInfo *pSubpassBeginInfo);
424 void PostCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
425 const VkSubpassBeginInfo *pSubpassBeginInfo);
426
427 void PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents);
428 void PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
429 const VkSubpassEndInfo *pSubpassEndInfo);
430 void PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
431 const VkSubpassEndInfo *pSubpassEndInfo);
432
433 void PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer);
434 void PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo);
435 void PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo);
locke-lunarga19c71d2020-03-02 18:17:04 -0700436 bool PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
437 VkImageLayout dstImageLayout, uint32_t regionCount,
438 const VkBufferImageCopy *pRegions) const;
439
440 void PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
441 VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions);
442
443 bool PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
444 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) const;
445
446 void PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
447 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions);
448
449 bool PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
450 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
451 const VkImageBlit *pRegions, VkFilter filter) const;
452
453 void PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
454 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions,
455 VkFilter filter);
John Zulauf9cb530d2019-09-30 14:14:10 -0600456};