blob: ff64dfe5c5ec5f2ef2c6f686d2e0f7a02cf4c295 [file] [log] [blame]
John Zulauf3d84f1b2020-03-09 13:33:25 -06001/*
2 * Copyright (c) 2019-2020 Valve Corporation
3 * Copyright (c) 2019-2020 LunarG, Inc.
John Zulauf9cb530d2019-09-30 14:14:10 -06004 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 * Author: John Zulauf <jzulauf@lunarg.com>
18 */
19
20#pragma once
21
22#include <map>
23#include <memory>
24#include <unordered_map>
25#include <vulkan/vulkan.h>
26
27#include "synchronization_validation_types.h"
28#include "state_tracker.h"
29
John Zulauf2f952d22020-02-10 11:34:51 -070030enum SyncHazard {
31 NONE = 0,
32 READ_AFTER_WRITE,
33 WRITE_AFTER_READ,
34 WRITE_AFTER_WRITE,
35 READ_RACING_WRITE,
36 WRITE_RACING_WRITE,
37 WRITE_RACING_READ,
38};
John Zulauf9cb530d2019-09-30 14:14:10 -060039
40// Useful Utilites for manipulating StageAccess parameters, suitable as base class to save typing
41struct SyncStageAccess {
42 static SyncStageAccessFlagBits FlagBit(SyncStageAccessIndex stage_access) {
43 return syncStageAccessInfoByStageAccessIndex[stage_access].stage_access_bit;
44 }
45
46 static bool IsRead(SyncStageAccessFlagBits stage_access_bit) { return 0 != (stage_access_bit & syncStageAccessReadMask); }
47 static bool IsRead(SyncStageAccessIndex stage_access_index) { return IsRead(FlagBit(stage_access_index)); }
48
49 static bool IsWrite(SyncStageAccessFlagBits stage_access_bit) { return 0 != (stage_access_bit & syncStageAccessWriteMask); }
50 static bool IsWrite(SyncStageAccessIndex stage_access_index) { return IsWrite(FlagBit(stage_access_index)); }
51 static VkPipelineStageFlagBits PipelineStageBit(SyncStageAccessIndex stage_access_index) {
52 return syncStageAccessInfoByStageAccessIndex[stage_access_index].stage_mask;
53 }
54 static SyncStageAccessFlags AccessScopeByStage(VkPipelineStageFlags stages);
55 static SyncStageAccessFlags AccessScopeByAccess(VkAccessFlags access);
56 static SyncStageAccessFlags AccessScope(VkPipelineStageFlags stages, VkAccessFlags access);
57 static SyncStageAccessFlags AccessScope(SyncStageAccessFlags stage_scope, VkAccessFlags accesses) {
58 return stage_scope & AccessScopeByAccess(accesses);
59 }
60};
61
John Zulauf5f13a792020-03-10 07:31:21 -060062struct ResourceUsageTag {
63 uint64_t index;
64 ResourceUsageTag &operator++() {
65 index++;
66 return *this;
67 }
68 bool IsBefore(const ResourceUsageTag &rhs) const { return index < rhs.index; }
John Zulaufe5da6e52020-03-18 15:32:18 -060069 bool operator==(const ResourceUsageTag &rhs) const { return (index == rhs.index); }
70 bool operator!=(const ResourceUsageTag &rhs) const { return !(*this == rhs); }
John Zulauf5f13a792020-03-10 07:31:21 -060071};
72
John Zulauf9cb530d2019-09-30 14:14:10 -060073struct HazardResult {
74 SyncHazard hazard = NONE;
75 ResourceUsageTag tag = ResourceUsageTag();
76 void Set(SyncHazard hazard_, const ResourceUsageTag &tag_) {
77 hazard = hazard_;
78 tag = tag_;
79 }
80};
81
John Zulauf3d84f1b2020-03-09 13:33:25 -060082struct SyncBarrier {
83 VkPipelineStageFlags src_exec_scope;
84 SyncStageAccessFlags src_access_scope;
85 VkPipelineStageFlags dst_exec_scope;
86 SyncStageAccessFlags dst_access_scope;
87 SyncBarrier() = default;
88 SyncBarrier &operator=(const SyncBarrier &) = default;
89 SyncBarrier(VkQueueFlags gueue_flags, const VkSubpassDependency2 &sub_pass_barrier);
90};
91using SyncBarrierStack = std::vector<const SyncBarrier *>;
92
John Zulauf9cb530d2019-09-30 14:14:10 -060093class ResourceAccessState : public SyncStageAccess {
94 protected:
95 // Mutliple read operations can be simlutaneously (and independently) synchronized,
96 // given the only the second execution scope creates a dependency chain, we have to track each,
97 // but only up to one per pipeline stage (as another read from the *same* stage become more recent,
98 // and applicable one for hazard detection
99 struct ReadState {
100 VkPipelineStageFlagBits stage; // The stage of this read
101 VkPipelineStageFlags barriers; // all applicable barriered stages
102 ResourceUsageTag tag;
John Zulaufe5da6e52020-03-18 15:32:18 -0600103 bool operator==(const ReadState &rhs) const {
104 bool same = (stage == rhs.stage) && (barriers == rhs.barriers) && (tag == rhs.tag);
105 return same;
106 }
107 bool operator!=(const ReadState &rhs) const { return !(*this == rhs); }
John Zulauf9cb530d2019-09-30 14:14:10 -0600108 };
109
John Zulauf3d84f1b2020-03-09 13:33:25 -0600110 static ResourceAccessState ApplyBarrierStack(const ResourceAccessState &that, const SyncBarrierStack &barrier_stack);
111
John Zulauf9cb530d2019-09-30 14:14:10 -0600112 public:
John Zulauf3d84f1b2020-03-09 13:33:25 -0600113 HazardResult DetectHazard(SyncStageAccessIndex usage_index, SyncBarrierStack *barrier_stack) const;
John Zulauf9cb530d2019-09-30 14:14:10 -0600114 HazardResult DetectHazard(SyncStageAccessIndex usage_index) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600115
116 HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags src_exec_scope,
117 SyncStageAccessFlags src_access_scope, SyncBarrierStack *barrier_stack) const;
John Zulauf0cb5be22020-01-23 12:18:22 -0700118 HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags src_stage_mask,
119 SyncStageAccessFlags source_scope) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600120
121 HazardResult DetectAsyncHazard(SyncStageAccessIndex usage_index) const;
122
John Zulauf9cb530d2019-09-30 14:14:10 -0600123 void Update(SyncStageAccessIndex usage_index, const ResourceUsageTag &tag);
John Zulauf5f13a792020-03-10 07:31:21 -0600124 void Resolve(const ResourceAccessState &other);
125 void ApplyBarrier(const SyncBarrier &barrier);
John Zulauf9cb530d2019-09-30 14:14:10 -0600126 void ApplyExecutionBarrier(VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask);
127 void ApplyMemoryAccessBarrier(VkPipelineStageFlags src_stage_mask, SyncStageAccessFlags src_scope,
128 VkPipelineStageFlags dst_stage_mask, SyncStageAccessFlags dst_scope);
129
130 ResourceAccessState()
131 : write_barriers(~SyncStageAccessFlags(0)), write_dependency_chain(0), last_read_count(0), last_read_stages(0) {}
132
John Zulauf3d84f1b2020-03-09 13:33:25 -0600133 bool HasWriteOp() const { return last_write != 0; }
John Zulaufe5da6e52020-03-18 15:32:18 -0600134 bool operator==(const ResourceAccessState &rhs) const {
135 bool same = (write_barriers == rhs.write_barriers) && (write_dependency_chain == rhs.write_dependency_chain) &&
136 (last_read_count == rhs.last_read_count) && (last_read_stages == rhs.last_read_stages) &&
137 (write_tag == rhs.write_tag);
138 for (uint32_t i = 0; same && i < last_read_count; i++) {
139 same |= last_reads[i] == rhs.last_reads[i];
140 }
141 return same;
142 }
143 bool operator!=(const ResourceAccessState &rhs) const { return !(*this == rhs); }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600144
John Zulauf9cb530d2019-09-30 14:14:10 -0600145 private:
146 bool IsWriteHazard(SyncStageAccessFlagBits usage) const { return 0 != (usage & ~write_barriers); }
147 bool IsReadHazard(VkPipelineStageFlagBits stage, const ReadState &read_access) const {
148 return 0 != (stage & ~read_access.barriers);
149 }
John Zulauf0cb5be22020-01-23 12:18:22 -0700150 bool IsReadHazard(VkPipelineStageFlags stage_mask, const ReadState &read_access) const {
151 return stage_mask != (stage_mask & read_access.barriers);
152 }
John Zulauf9cb530d2019-09-30 14:14:10 -0600153 // With reads, each must be "safe" relative to it's prior write, so we need only
154 // save the most recent write operation (as anything *transitively* unsafe would arleady
155 // be included
156 SyncStageAccessFlags write_barriers; // union of applicable barrier masks since last write
157 VkPipelineStageFlags write_dependency_chain; // intiially zero, but accumulating the dstStages of barriers if they chain.
158 uint32_t last_read_count;
159 VkPipelineStageFlags last_read_stages;
160
161 ResourceUsageTag write_tag;
162
163 std::array<ReadState, 8 * sizeof(VkPipelineStageFlags)> last_reads;
164 SyncStageAccessFlagBits last_write; // only the most recent write
165};
166
John Zulauf5c5e88d2019-12-26 11:22:02 -0700167using ResourceAccessRangeMap = sparse_container::range_map<uint64_t, ResourceAccessState>;
168using ResourceAccessRange = typename ResourceAccessRangeMap::key_type;
John Zulauf9cb530d2019-09-30 14:14:10 -0600169
John Zulauf3d84f1b2020-03-09 13:33:25 -0600170class AccessTrackerContext;
171// This class owns none of the objects pointed to.
172class AccessTracker {
John Zulauf9cb530d2019-09-30 14:14:10 -0600173 public:
John Zulauf3d84f1b2020-03-09 13:33:25 -0600174 AccessTracker(AccessTrackerContext *context) : accesses_() {}
175 ResourceAccessRangeMap &GetCurrentAccessMap() { return accesses_; }
176 const ResourceAccessRangeMap &GetCurrentAccessMap() const { return accesses_; }
177 void UpdateAccessState(SyncStageAccessIndex current_usage, const ResourceAccessRange &range, const ResourceUsageTag &tag);
178 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
179 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset, const VkExtent3D &extent,
180 const ResourceUsageTag &tag);
John Zulauf9cb530d2019-09-30 14:14:10 -0600181
John Zulauf3d84f1b2020-03-09 13:33:25 -0600182 private:
183 ResourceAccessRangeMap accesses_;
184// TODO: Cache the track back tree to save on repeated map lookups
185#if 0
186 struct TrackBack {
187 const VkSubpassDependency2 *barrier;
188 AccessTracker *tracker;
189 };
190 std::vector<TrackBack> prev_;
191 std::vector<AccessTracker *> async_;
192 TrackBack external_;
193 AccessTrackerContext *context_;
194#endif
195};
196
197class AccessTrackerContext {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700198 protected:
John Zulauf9cb530d2019-09-30 14:14:10 -0600199 // TODO -- hide the details of the implementation..
John Zulauf5c5e88d2019-12-26 11:22:02 -0700200 template <typename Map, typename Key>
John Zulauf3d84f1b2020-03-09 13:33:25 -0600201 static typename Map::mapped_type *GetImpl(Map *map, Key key, AccessTrackerContext *context) {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700202 auto find_it = map->find(key);
203 if (find_it == map->end()) {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600204 if (!context) return nullptr;
205 auto insert_pair = map->insert(std::make_pair(key, typename Map::mapped_type(context)));
John Zulauf9cb530d2019-09-30 14:14:10 -0600206 find_it = insert_pair.first;
207 }
208 return &find_it->second;
209 }
John Zulauf9cb530d2019-09-30 14:14:10 -0600210
John Zulauf5c5e88d2019-12-26 11:22:02 -0700211 template <typename Map, typename Key>
212 static const typename Map::mapped_type *GetConstImpl(const Map *map, Key key) {
213 auto find_it = map->find(key);
214 if (find_it == map->cend()) {
John Zulauf9cb530d2019-09-30 14:14:10 -0600215 return nullptr;
216 }
217 return &find_it->second;
218 }
219
John Zulauf5c5e88d2019-12-26 11:22:02 -0700220 public:
John Zulauf3d84f1b2020-03-09 13:33:25 -0600221 using AccessTrackerMap = std::unordered_map<VulkanTypedHandle, AccessTracker>;
222 struct TrackBack {
223 SyncBarrier barrier;
224 AccessTrackerContext *context;
225 TrackBack(AccessTrackerContext *context_, VkQueueFlags queue_flags_, const VkSubpassDependency2 &subpass_barrier_)
226 : barrier(queue_flags_, subpass_barrier_), context(context_) {}
227 TrackBack &operator=(const TrackBack &) = default;
228 TrackBack() = default;
229 };
John Zulauf5c5e88d2019-12-26 11:22:02 -0700230
John Zulauf3d84f1b2020-03-09 13:33:25 -0600231 AccessTracker *GetAccessTracker(const VulkanTypedHandle &handle) { return GetImpl(&access_tracker_map_, handle, this); }
232 AccessTracker *GetAccessTrackerNoInsert(const VulkanTypedHandle &handle) {
233 return GetImpl(&access_tracker_map_, handle, nullptr);
234 }
235 const AccessTracker *GetAccessTracker(const VulkanTypedHandle &handle) const {
236 return GetConstImpl(&access_tracker_map_, handle);
237 }
238 HazardResult DetectHazard(const VulkanTypedHandle &handle, SyncStageAccessIndex usage_index,
239 const ResourceAccessRange &range) const;
240 HazardResult DetectBarrierHazard(const VulkanTypedHandle &handle, SyncStageAccessIndex current_usage,
241 VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope,
242 const ResourceAccessRange &range) const;
243 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
244 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset,
245 const VkExtent3D &extent) const;
246
247 const AccessTrackerMap &GetAccessTrackerMap() const { return access_tracker_map_; }
248 AccessTrackerMap &GetAccessTrackerMap() { return access_tracker_map_; }
John Zulaufe5da6e52020-03-18 15:32:18 -0600249
250 const TrackBack &GetDstExternalTrackBack() const { return dst_external_; }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600251 void Reset() {
252 access_tracker_map_.clear();
253 prev_.clear();
254 async_.clear();
John Zulaufe5da6e52020-03-18 15:32:18 -0600255 src_external_ = TrackBack();
John Zulauf3d84f1b2020-03-09 13:33:25 -0600256 }
John Zulauf5f13a792020-03-10 07:31:21 -0600257 // TODO: See if returning the lower_bound would be useful from a performance POV -- look at the lower_bound overhead
258 // Would need to add a "hint" overload to parallel_iterator::invalidate_[AB] call, if so.
259 void ResolvePreviousAccess(const VulkanTypedHandle &handle, const ResourceAccessRange &range,
260 ResourceAccessRangeMap *descent_map, const ResourceAccessState *infill_state) const;
John Zulaufe5da6e52020-03-18 15:32:18 -0600261 void ResolveTrackBack(const VulkanTypedHandle &handle, const ResourceAccessRange &range,
262 const AccessTrackerContext::TrackBack &track_back, ResourceAccessRangeMap *descent_map,
263 const ResourceAccessState *infill_state, bool recur_to_infill = true) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600264 void UpdateAccessState(const VulkanTypedHandle &handle, SyncStageAccessIndex current_usage, const ResourceAccessRange &range,
265 const ResourceUsageTag &tag);
266 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
267 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset, const VkExtent3D &extent,
268 const ResourceUsageTag &tag);
269
270 AccessTrackerContext(uint32_t subpass, VkQueueFlags queue_flags, const std::vector<SubpassDependencyGraphNode> &dependencies,
271 const std::vector<AccessTrackerContext> &contexts, AccessTrackerContext *external_context);
272
273 AccessTrackerContext() { Reset(); }
274
275 private:
276 template <typename Detector>
John Zulauf3d84f1b2020-03-09 13:33:25 -0600277 HazardResult DetectHazard(const VulkanTypedHandle &handle, const Detector &detector, const ResourceAccessRange &range) const;
278 template <typename Detector>
279 HazardResult DetectAsyncHazard(const VulkanTypedHandle &handle, const Detector &detector,
280 const ResourceAccessRange &range) const;
John Zulauf5f13a792020-03-10 07:31:21 -0600281 template <typename Detector>
282 HazardResult DetectPreviousHazard(const VulkanTypedHandle &handle, const Detector &detector,
283 const ResourceAccessRange &range) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600284
285 AccessTrackerMap access_tracker_map_;
286
287 std::vector<TrackBack> prev_;
288 std::vector<AccessTrackerContext *> async_;
John Zulaufe5da6e52020-03-18 15:32:18 -0600289 TrackBack src_external_;
290 TrackBack dst_external_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600291};
292
293struct RenderPassAccessContext {
294 uint32_t current_subpass_;
295 std::vector<AccessTrackerContext> subpass_contexts_;
296 const std::vector<SubpassDependencyGraphNode> *dependencies_;
297 RenderPassAccessContext(VkQueueFlags queue_flags, const std::vector<SubpassDependencyGraphNode> *dependencies,
298 AccessTrackerContext *external_context)
299 : current_subpass_(0), dependencies_(dependencies) {
300 if (dependencies_) {
301 subpass_contexts_.emplace_back(0, queue_flags, *dependencies_, subpass_contexts_, external_context);
302 }
303 }
304 void NextSubpass(VkQueueFlags queue_flags, AccessTrackerContext *external_context) {
305 current_subpass_++;
306 subpass_contexts_.emplace_back(current_subpass_, queue_flags, *dependencies_, subpass_contexts_, external_context);
307 assert(subpass_contexts_.size() == (current_subpass_ + 1));
308 }
309 AccessTrackerContext &CurrentContext() { return subpass_contexts_[current_subpass_]; }
310 const AccessTrackerContext &CurrentContext() const { return subpass_contexts_[current_subpass_]; }
311};
312
313class CommandBufferAccessContext {
314 public:
315 CommandBufferAccessContext()
316 : render_pass_contexts_(),
317 cb_tracker_context_(),
318 current_context_(&cb_tracker_context_),
319 current_renderpass_context_(),
320 cb_state_(),
321 queue_flags_() {}
322 CommandBufferAccessContext(std::shared_ptr<CMD_BUFFER_STATE> &cb_state, VkQueueFlags queue_flags)
323 : CommandBufferAccessContext() {
324 cb_state_ = cb_state;
325 queue_flags_ = queue_flags;
326 }
John Zulauf5c5e88d2019-12-26 11:22:02 -0700327
328 void Reset() {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600329 cb_tracker_context_.Reset();
330 render_pass_contexts_.clear();
331 current_context_ = &cb_tracker_context_;
332 current_renderpass_context_ = nullptr;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700333 }
334
John Zulauf3d84f1b2020-03-09 13:33:25 -0600335 AccessTrackerContext *GetCurrentAccessContext() { return current_context_; }
336 const AccessTrackerContext *GetCurrentAccessContext() const { return current_context_; }
337 void BeginRenderPass(const RENDER_PASS_STATE &render_pass);
338 void NextRenderPass(const RENDER_PASS_STATE &render_pass);
339 void EndRenderPass(const RENDER_PASS_STATE &render_pass);
340 CMD_BUFFER_STATE *GetCommandBufferState() { return cb_state_.get(); }
341 const CMD_BUFFER_STATE *GetCommandBufferState() const { return cb_state_.get(); }
342 VkQueueFlags GetQueueFlags() const { return queue_flags_; }
343
344 private:
345 std::vector<RenderPassAccessContext> render_pass_contexts_;
346 AccessTrackerContext cb_tracker_context_;
347 AccessTrackerContext *current_context_;
348 RenderPassAccessContext *current_renderpass_context_;
349 std::shared_ptr<CMD_BUFFER_STATE> cb_state_;
350 VkQueueFlags queue_flags_;
John Zulauf9cb530d2019-09-30 14:14:10 -0600351};
352
353class SyncValidator : public ValidationStateTracker, public SyncStageAccess {
354 public:
355 SyncValidator() { container_type = LayerObjectTypeSyncValidation; }
356 using StateTracker = ValidationStateTracker;
357
358 using StateTracker::AccessorTraitsTypes;
John Zulauf5f13a792020-03-10 07:31:21 -0600359 ResourceUsageTag tag; // Find a better tagging scheme...
John Zulauf3d84f1b2020-03-09 13:33:25 -0600360 std::unordered_map<VkCommandBuffer, std::unique_ptr<CommandBufferAccessContext>> cb_access_state;
361 CommandBufferAccessContext *GetAccessContextImpl(VkCommandBuffer command_buffer, bool do_insert) {
John Zulauf9cb530d2019-09-30 14:14:10 -0600362 auto found_it = cb_access_state.find(command_buffer);
363 if (found_it == cb_access_state.end()) {
364 if (!do_insert) return nullptr;
365 // If we don't have one, make it.
John Zulauf3d84f1b2020-03-09 13:33:25 -0600366 auto cb_state = GetShared<CMD_BUFFER_STATE>(command_buffer);
367 assert(cb_state.get());
368 auto queue_flags = GetQueueFlags(*cb_state);
369 std::unique_ptr<CommandBufferAccessContext> context(new CommandBufferAccessContext(cb_state, queue_flags));
370 auto insert_pair = cb_access_state.insert(std::make_pair(command_buffer, std::move(context)));
John Zulauf9cb530d2019-09-30 14:14:10 -0600371 found_it = insert_pair.first;
372 }
373 return found_it->second.get();
374 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600375 CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) {
376 return GetAccessContextImpl(command_buffer, true); // true -> do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600377 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600378 CommandBufferAccessContext *GetAccessContextNoInsert(VkCommandBuffer command_buffer) {
379 return GetAccessContextImpl(command_buffer, false); // false -> don't do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600380 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600381
382 const CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) const {
John Zulauf9cb530d2019-09-30 14:14:10 -0600383 const auto found_it = cb_access_state.find(command_buffer);
384 if (found_it == cb_access_state.end()) {
385 return nullptr;
386 }
387 return found_it->second.get();
388 }
389
John Zulauf3d84f1b2020-03-09 13:33:25 -0600390 void ApplyGlobalBarriers(AccessTrackerContext *context, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
John Zulauf9cb530d2019-09-30 14:14:10 -0600391 SyncStageAccessFlags src_stage_scope, SyncStageAccessFlags dst_stage_scope,
392 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600393 void ApplyBufferBarriers(AccessTrackerContext *context, VkPipelineStageFlags src_stage_mask,
John Zulauf9cb530d2019-09-30 14:14:10 -0600394 SyncStageAccessFlags src_stage_scope, VkPipelineStageFlags dst_stage_mask,
395 SyncStageAccessFlags dst_stage_scope, uint32_t barrier_count, const VkBufferMemoryBarrier *barriers);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600396 void ApplyImageBarriers(AccessTrackerContext *context, VkPipelineStageFlags src_stage_mask,
John Zulauf5c5e88d2019-12-26 11:22:02 -0700397 SyncStageAccessFlags src_stage_scope, VkPipelineStageFlags dst_stage_mask,
398 SyncStageAccessFlags dst_stage_scope, uint32_t barrier_count, const VkImageMemoryBarrier *barriers);
John Zulauf9cb530d2019-09-30 14:14:10 -0600399
400 void ResetCommandBuffer(VkCommandBuffer command_buffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600401 void RecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
402 const VkSubpassBeginInfo *pSubpassBeginInfo);
403 void RecordCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
404 const VkSubpassEndInfo *pSubpassEndInfo);
405 void RecordCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo);
John Zulauf9cb530d2019-09-30 14:14:10 -0600406
407 void PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
408 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result);
409
410 bool PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
411 const VkBufferCopy *pRegions) const;
412
413 void PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
414 const VkBufferCopy *pRegions);
415
John Zulauf5c5e88d2019-12-26 11:22:02 -0700416 bool PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
417 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
418 const VkImageCopy *pRegions) const;
419
420 void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
421 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions);
422
John Zulauf9cb530d2019-09-30 14:14:10 -0600423 bool PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
424 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
425 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
426 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
427 uint32_t imageMemoryBarrierCount,
428 const VkImageMemoryBarrier *pImageMemoryBarriers) const;
429
430 void PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
431 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
432 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
433 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
434 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600435
436 void PostCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo,
437 VkResult result);
438
439 void PostCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
440 VkSubpassContents contents);
441 void PostCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
442 const VkSubpassBeginInfo *pSubpassBeginInfo);
443 void PostCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
444 const VkSubpassBeginInfo *pSubpassBeginInfo);
445
446 void PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents);
447 void PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
448 const VkSubpassEndInfo *pSubpassEndInfo);
449 void PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
450 const VkSubpassEndInfo *pSubpassEndInfo);
451
452 void PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer);
453 void PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo);
454 void PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo);
locke-lunarga19c71d2020-03-02 18:17:04 -0700455 bool PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
456 VkImageLayout dstImageLayout, uint32_t regionCount,
457 const VkBufferImageCopy *pRegions) const;
458
459 void PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
460 VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions);
461
462 bool PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
463 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) const;
464
465 void PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
466 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions);
467
468 bool PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
469 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
470 const VkImageBlit *pRegions, VkFilter filter) const;
471
472 void PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
473 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions,
474 VkFilter filter);
John Zulauf9cb530d2019-09-30 14:14:10 -0600475};