blob: 43bd711c9676987701d45428ae82ca7c78201e20 [file] [log] [blame]
John Zulauf3d84f1b2020-03-09 13:33:25 -06001/*
2 * Copyright (c) 2019-2020 Valve Corporation
3 * Copyright (c) 2019-2020 LunarG, Inc.
John Zulauf9cb530d2019-09-30 14:14:10 -06004 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 * Author: John Zulauf <jzulauf@lunarg.com>
18 */
19
20#pragma once
21
22#include <map>
23#include <memory>
24#include <unordered_map>
25#include <vulkan/vulkan.h>
26
27#include "synchronization_validation_types.h"
28#include "state_tracker.h"
29
John Zulauf2f952d22020-02-10 11:34:51 -070030enum SyncHazard {
31 NONE = 0,
32 READ_AFTER_WRITE,
33 WRITE_AFTER_READ,
34 WRITE_AFTER_WRITE,
35 READ_RACING_WRITE,
36 WRITE_RACING_WRITE,
37 WRITE_RACING_READ,
38};
John Zulauf9cb530d2019-09-30 14:14:10 -060039
40// Useful Utilites for manipulating StageAccess parameters, suitable as base class to save typing
41struct SyncStageAccess {
42 static SyncStageAccessFlagBits FlagBit(SyncStageAccessIndex stage_access) {
43 return syncStageAccessInfoByStageAccessIndex[stage_access].stage_access_bit;
44 }
45
46 static bool IsRead(SyncStageAccessFlagBits stage_access_bit) { return 0 != (stage_access_bit & syncStageAccessReadMask); }
47 static bool IsRead(SyncStageAccessIndex stage_access_index) { return IsRead(FlagBit(stage_access_index)); }
48
49 static bool IsWrite(SyncStageAccessFlagBits stage_access_bit) { return 0 != (stage_access_bit & syncStageAccessWriteMask); }
50 static bool IsWrite(SyncStageAccessIndex stage_access_index) { return IsWrite(FlagBit(stage_access_index)); }
51 static VkPipelineStageFlagBits PipelineStageBit(SyncStageAccessIndex stage_access_index) {
52 return syncStageAccessInfoByStageAccessIndex[stage_access_index].stage_mask;
53 }
54 static SyncStageAccessFlags AccessScopeByStage(VkPipelineStageFlags stages);
55 static SyncStageAccessFlags AccessScopeByAccess(VkAccessFlags access);
56 static SyncStageAccessFlags AccessScope(VkPipelineStageFlags stages, VkAccessFlags access);
57 static SyncStageAccessFlags AccessScope(SyncStageAccessFlags stage_scope, VkAccessFlags accesses) {
58 return stage_scope & AccessScopeByAccess(accesses);
59 }
60};
61
John Zulauf5f13a792020-03-10 07:31:21 -060062struct ResourceUsageTag {
63 uint64_t index;
64 ResourceUsageTag &operator++() {
65 index++;
66 return *this;
67 }
68 bool IsBefore(const ResourceUsageTag &rhs) const { return index < rhs.index; }
69};
70
John Zulauf9cb530d2019-09-30 14:14:10 -060071struct HazardResult {
72 SyncHazard hazard = NONE;
73 ResourceUsageTag tag = ResourceUsageTag();
74 void Set(SyncHazard hazard_, const ResourceUsageTag &tag_) {
75 hazard = hazard_;
76 tag = tag_;
77 }
78};
79
John Zulauf3d84f1b2020-03-09 13:33:25 -060080struct SyncBarrier {
81 VkPipelineStageFlags src_exec_scope;
82 SyncStageAccessFlags src_access_scope;
83 VkPipelineStageFlags dst_exec_scope;
84 SyncStageAccessFlags dst_access_scope;
85 SyncBarrier() = default;
86 SyncBarrier &operator=(const SyncBarrier &) = default;
87 SyncBarrier(VkQueueFlags gueue_flags, const VkSubpassDependency2 &sub_pass_barrier);
88};
89using SyncBarrierStack = std::vector<const SyncBarrier *>;
90
John Zulauf9cb530d2019-09-30 14:14:10 -060091class ResourceAccessState : public SyncStageAccess {
92 protected:
93 // Mutliple read operations can be simlutaneously (and independently) synchronized,
94 // given the only the second execution scope creates a dependency chain, we have to track each,
95 // but only up to one per pipeline stage (as another read from the *same* stage become more recent,
96 // and applicable one for hazard detection
97 struct ReadState {
98 VkPipelineStageFlagBits stage; // The stage of this read
99 VkPipelineStageFlags barriers; // all applicable barriered stages
100 ResourceUsageTag tag;
101 };
102
John Zulauf3d84f1b2020-03-09 13:33:25 -0600103 static ResourceAccessState ApplyBarrierStack(const ResourceAccessState &that, const SyncBarrierStack &barrier_stack);
104
John Zulauf9cb530d2019-09-30 14:14:10 -0600105 public:
John Zulauf3d84f1b2020-03-09 13:33:25 -0600106 HazardResult DetectHazard(SyncStageAccessIndex usage_index, SyncBarrierStack *barrier_stack) const;
John Zulauf9cb530d2019-09-30 14:14:10 -0600107 HazardResult DetectHazard(SyncStageAccessIndex usage_index) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600108
109 HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags src_exec_scope,
110 SyncStageAccessFlags src_access_scope, SyncBarrierStack *barrier_stack) const;
John Zulauf0cb5be22020-01-23 12:18:22 -0700111 HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags src_stage_mask,
112 SyncStageAccessFlags source_scope) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600113
114 HazardResult DetectAsyncHazard(SyncStageAccessIndex usage_index) const;
115
John Zulauf9cb530d2019-09-30 14:14:10 -0600116 void Update(SyncStageAccessIndex usage_index, const ResourceUsageTag &tag);
John Zulauf5f13a792020-03-10 07:31:21 -0600117 void Resolve(const ResourceAccessState &other);
118 void ApplyBarrier(const SyncBarrier &barrier);
John Zulauf9cb530d2019-09-30 14:14:10 -0600119 void ApplyExecutionBarrier(VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask);
120 void ApplyMemoryAccessBarrier(VkPipelineStageFlags src_stage_mask, SyncStageAccessFlags src_scope,
121 VkPipelineStageFlags dst_stage_mask, SyncStageAccessFlags dst_scope);
122
123 ResourceAccessState()
124 : write_barriers(~SyncStageAccessFlags(0)), write_dependency_chain(0), last_read_count(0), last_read_stages(0) {}
125
John Zulauf3d84f1b2020-03-09 13:33:25 -0600126 bool HasWriteOp() const { return last_write != 0; }
127
John Zulauf9cb530d2019-09-30 14:14:10 -0600128 private:
129 bool IsWriteHazard(SyncStageAccessFlagBits usage) const { return 0 != (usage & ~write_barriers); }
130 bool IsReadHazard(VkPipelineStageFlagBits stage, const ReadState &read_access) const {
131 return 0 != (stage & ~read_access.barriers);
132 }
John Zulauf0cb5be22020-01-23 12:18:22 -0700133 bool IsReadHazard(VkPipelineStageFlags stage_mask, const ReadState &read_access) const {
134 return stage_mask != (stage_mask & read_access.barriers);
135 }
John Zulauf9cb530d2019-09-30 14:14:10 -0600136 // With reads, each must be "safe" relative to it's prior write, so we need only
137 // save the most recent write operation (as anything *transitively* unsafe would arleady
138 // be included
139 SyncStageAccessFlags write_barriers; // union of applicable barrier masks since last write
140 VkPipelineStageFlags write_dependency_chain; // intiially zero, but accumulating the dstStages of barriers if they chain.
141 uint32_t last_read_count;
142 VkPipelineStageFlags last_read_stages;
143
144 ResourceUsageTag write_tag;
145
146 std::array<ReadState, 8 * sizeof(VkPipelineStageFlags)> last_reads;
147 SyncStageAccessFlagBits last_write; // only the most recent write
148};
149
John Zulauf5c5e88d2019-12-26 11:22:02 -0700150using ResourceAccessRangeMap = sparse_container::range_map<uint64_t, ResourceAccessState>;
151using ResourceAccessRange = typename ResourceAccessRangeMap::key_type;
John Zulauf9cb530d2019-09-30 14:14:10 -0600152
John Zulauf3d84f1b2020-03-09 13:33:25 -0600153class AccessTrackerContext;
154// This class owns none of the objects pointed to.
155class AccessTracker {
John Zulauf9cb530d2019-09-30 14:14:10 -0600156 public:
John Zulauf3d84f1b2020-03-09 13:33:25 -0600157 AccessTracker(AccessTrackerContext *context) : accesses_() {}
158 ResourceAccessRangeMap &GetCurrentAccessMap() { return accesses_; }
159 const ResourceAccessRangeMap &GetCurrentAccessMap() const { return accesses_; }
160 void UpdateAccessState(SyncStageAccessIndex current_usage, const ResourceAccessRange &range, const ResourceUsageTag &tag);
161 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
162 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset, const VkExtent3D &extent,
163 const ResourceUsageTag &tag);
John Zulauf9cb530d2019-09-30 14:14:10 -0600164
John Zulauf3d84f1b2020-03-09 13:33:25 -0600165 private:
166 ResourceAccessRangeMap accesses_;
167// TODO: Cache the track back tree to save on repeated map lookups
168#if 0
169 struct TrackBack {
170 const VkSubpassDependency2 *barrier;
171 AccessTracker *tracker;
172 };
173 std::vector<TrackBack> prev_;
174 std::vector<AccessTracker *> async_;
175 TrackBack external_;
176 AccessTrackerContext *context_;
177#endif
178};
179
180class AccessTrackerContext {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700181 protected:
John Zulauf9cb530d2019-09-30 14:14:10 -0600182 // TODO -- hide the details of the implementation..
John Zulauf5c5e88d2019-12-26 11:22:02 -0700183 template <typename Map, typename Key>
John Zulauf3d84f1b2020-03-09 13:33:25 -0600184 static typename Map::mapped_type *GetImpl(Map *map, Key key, AccessTrackerContext *context) {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700185 auto find_it = map->find(key);
186 if (find_it == map->end()) {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600187 if (!context) return nullptr;
188 auto insert_pair = map->insert(std::make_pair(key, typename Map::mapped_type(context)));
John Zulauf9cb530d2019-09-30 14:14:10 -0600189 find_it = insert_pair.first;
190 }
191 return &find_it->second;
192 }
John Zulauf9cb530d2019-09-30 14:14:10 -0600193
John Zulauf5c5e88d2019-12-26 11:22:02 -0700194 template <typename Map, typename Key>
195 static const typename Map::mapped_type *GetConstImpl(const Map *map, Key key) {
196 auto find_it = map->find(key);
197 if (find_it == map->cend()) {
John Zulauf9cb530d2019-09-30 14:14:10 -0600198 return nullptr;
199 }
200 return &find_it->second;
201 }
202
John Zulauf5c5e88d2019-12-26 11:22:02 -0700203 public:
John Zulauf3d84f1b2020-03-09 13:33:25 -0600204 using AccessTrackerMap = std::unordered_map<VulkanTypedHandle, AccessTracker>;
205 struct TrackBack {
206 SyncBarrier barrier;
207 AccessTrackerContext *context;
208 TrackBack(AccessTrackerContext *context_, VkQueueFlags queue_flags_, const VkSubpassDependency2 &subpass_barrier_)
209 : barrier(queue_flags_, subpass_barrier_), context(context_) {}
210 TrackBack &operator=(const TrackBack &) = default;
211 TrackBack() = default;
212 };
John Zulauf5c5e88d2019-12-26 11:22:02 -0700213
John Zulauf3d84f1b2020-03-09 13:33:25 -0600214 AccessTracker *GetAccessTracker(const VulkanTypedHandle &handle) { return GetImpl(&access_tracker_map_, handle, this); }
215 AccessTracker *GetAccessTrackerNoInsert(const VulkanTypedHandle &handle) {
216 return GetImpl(&access_tracker_map_, handle, nullptr);
217 }
218 const AccessTracker *GetAccessTracker(const VulkanTypedHandle &handle) const {
219 return GetConstImpl(&access_tracker_map_, handle);
220 }
221 HazardResult DetectHazard(const VulkanTypedHandle &handle, SyncStageAccessIndex usage_index,
222 const ResourceAccessRange &range) const;
223 HazardResult DetectBarrierHazard(const VulkanTypedHandle &handle, SyncStageAccessIndex current_usage,
224 VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope,
225 const ResourceAccessRange &range) const;
226 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
227 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset,
228 const VkExtent3D &extent) const;
229
230 const AccessTrackerMap &GetAccessTrackerMap() const { return access_tracker_map_; }
231 AccessTrackerMap &GetAccessTrackerMap() { return access_tracker_map_; }
232 void Reset() {
233 access_tracker_map_.clear();
234 prev_.clear();
235 async_.clear();
236 external_ = TrackBack();
237 }
John Zulauf5f13a792020-03-10 07:31:21 -0600238 // TODO: See if returning the lower_bound would be useful from a performance POV -- look at the lower_bound overhead
239 // Would need to add a "hint" overload to parallel_iterator::invalidate_[AB] call, if so.
240 void ResolvePreviousAccess(const VulkanTypedHandle &handle, const ResourceAccessRange &range,
241 ResourceAccessRangeMap *descent_map, const ResourceAccessState *infill_state) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600242 void UpdateAccessState(const VulkanTypedHandle &handle, SyncStageAccessIndex current_usage, const ResourceAccessRange &range,
243 const ResourceUsageTag &tag);
244 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
245 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset, const VkExtent3D &extent,
246 const ResourceUsageTag &tag);
247
248 AccessTrackerContext(uint32_t subpass, VkQueueFlags queue_flags, const std::vector<SubpassDependencyGraphNode> &dependencies,
249 const std::vector<AccessTrackerContext> &contexts, AccessTrackerContext *external_context);
250
251 AccessTrackerContext() { Reset(); }
252
253 private:
254 template <typename Detector>
John Zulauf3d84f1b2020-03-09 13:33:25 -0600255 HazardResult DetectHazard(const VulkanTypedHandle &handle, const Detector &detector, const ResourceAccessRange &range) const;
256 template <typename Detector>
257 HazardResult DetectAsyncHazard(const VulkanTypedHandle &handle, const Detector &detector,
258 const ResourceAccessRange &range) const;
John Zulauf5f13a792020-03-10 07:31:21 -0600259 template <typename Detector>
260 HazardResult DetectPreviousHazard(const VulkanTypedHandle &handle, const Detector &detector,
261 const ResourceAccessRange &range) const;
262 void ResolveTrackBack(const VulkanTypedHandle &handle, const ResourceAccessRange &range,
263 const AccessTrackerContext::TrackBack &track_back, ResourceAccessRangeMap *descent_map,
264 const ResourceAccessState *infill_state) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600265
266 AccessTrackerMap access_tracker_map_;
267
268 std::vector<TrackBack> prev_;
269 std::vector<AccessTrackerContext *> async_;
270 TrackBack external_;
271};
272
273struct RenderPassAccessContext {
274 uint32_t current_subpass_;
275 std::vector<AccessTrackerContext> subpass_contexts_;
276 const std::vector<SubpassDependencyGraphNode> *dependencies_;
277 RenderPassAccessContext(VkQueueFlags queue_flags, const std::vector<SubpassDependencyGraphNode> *dependencies,
278 AccessTrackerContext *external_context)
279 : current_subpass_(0), dependencies_(dependencies) {
280 if (dependencies_) {
281 subpass_contexts_.emplace_back(0, queue_flags, *dependencies_, subpass_contexts_, external_context);
282 }
283 }
284 void NextSubpass(VkQueueFlags queue_flags, AccessTrackerContext *external_context) {
285 current_subpass_++;
286 subpass_contexts_.emplace_back(current_subpass_, queue_flags, *dependencies_, subpass_contexts_, external_context);
287 assert(subpass_contexts_.size() == (current_subpass_ + 1));
288 }
289 AccessTrackerContext &CurrentContext() { return subpass_contexts_[current_subpass_]; }
290 const AccessTrackerContext &CurrentContext() const { return subpass_contexts_[current_subpass_]; }
291};
292
293class CommandBufferAccessContext {
294 public:
295 CommandBufferAccessContext()
296 : render_pass_contexts_(),
297 cb_tracker_context_(),
298 current_context_(&cb_tracker_context_),
299 current_renderpass_context_(),
300 cb_state_(),
301 queue_flags_() {}
302 CommandBufferAccessContext(std::shared_ptr<CMD_BUFFER_STATE> &cb_state, VkQueueFlags queue_flags)
303 : CommandBufferAccessContext() {
304 cb_state_ = cb_state;
305 queue_flags_ = queue_flags;
306 }
John Zulauf5c5e88d2019-12-26 11:22:02 -0700307
308 void Reset() {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600309 cb_tracker_context_.Reset();
310 render_pass_contexts_.clear();
311 current_context_ = &cb_tracker_context_;
312 current_renderpass_context_ = nullptr;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700313 }
314
John Zulauf3d84f1b2020-03-09 13:33:25 -0600315 AccessTrackerContext *GetCurrentAccessContext() { return current_context_; }
316 const AccessTrackerContext *GetCurrentAccessContext() const { return current_context_; }
317 void BeginRenderPass(const RENDER_PASS_STATE &render_pass);
318 void NextRenderPass(const RENDER_PASS_STATE &render_pass);
319 void EndRenderPass(const RENDER_PASS_STATE &render_pass);
320 CMD_BUFFER_STATE *GetCommandBufferState() { return cb_state_.get(); }
321 const CMD_BUFFER_STATE *GetCommandBufferState() const { return cb_state_.get(); }
322 VkQueueFlags GetQueueFlags() const { return queue_flags_; }
323
324 private:
325 std::vector<RenderPassAccessContext> render_pass_contexts_;
326 AccessTrackerContext cb_tracker_context_;
327 AccessTrackerContext *current_context_;
328 RenderPassAccessContext *current_renderpass_context_;
329 std::shared_ptr<CMD_BUFFER_STATE> cb_state_;
330 VkQueueFlags queue_flags_;
John Zulauf9cb530d2019-09-30 14:14:10 -0600331};
332
333class SyncValidator : public ValidationStateTracker, public SyncStageAccess {
334 public:
335 SyncValidator() { container_type = LayerObjectTypeSyncValidation; }
336 using StateTracker = ValidationStateTracker;
337
338 using StateTracker::AccessorTraitsTypes;
John Zulauf5f13a792020-03-10 07:31:21 -0600339 ResourceUsageTag tag; // Find a better tagging scheme...
John Zulauf3d84f1b2020-03-09 13:33:25 -0600340 std::unordered_map<VkCommandBuffer, std::unique_ptr<CommandBufferAccessContext>> cb_access_state;
341 CommandBufferAccessContext *GetAccessContextImpl(VkCommandBuffer command_buffer, bool do_insert) {
John Zulauf9cb530d2019-09-30 14:14:10 -0600342 auto found_it = cb_access_state.find(command_buffer);
343 if (found_it == cb_access_state.end()) {
344 if (!do_insert) return nullptr;
345 // If we don't have one, make it.
John Zulauf3d84f1b2020-03-09 13:33:25 -0600346 auto cb_state = GetShared<CMD_BUFFER_STATE>(command_buffer);
347 assert(cb_state.get());
348 auto queue_flags = GetQueueFlags(*cb_state);
349 std::unique_ptr<CommandBufferAccessContext> context(new CommandBufferAccessContext(cb_state, queue_flags));
350 auto insert_pair = cb_access_state.insert(std::make_pair(command_buffer, std::move(context)));
John Zulauf9cb530d2019-09-30 14:14:10 -0600351 found_it = insert_pair.first;
352 }
353 return found_it->second.get();
354 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600355 CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) {
356 return GetAccessContextImpl(command_buffer, true); // true -> do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600357 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600358 CommandBufferAccessContext *GetAccessContextNoInsert(VkCommandBuffer command_buffer) {
359 return GetAccessContextImpl(command_buffer, false); // false -> don't do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600360 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600361
362 const CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) const {
John Zulauf9cb530d2019-09-30 14:14:10 -0600363 const auto found_it = cb_access_state.find(command_buffer);
364 if (found_it == cb_access_state.end()) {
365 return nullptr;
366 }
367 return found_it->second.get();
368 }
369
John Zulauf3d84f1b2020-03-09 13:33:25 -0600370 void ApplyGlobalBarriers(AccessTrackerContext *context, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
John Zulauf9cb530d2019-09-30 14:14:10 -0600371 SyncStageAccessFlags src_stage_scope, SyncStageAccessFlags dst_stage_scope,
372 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600373 void ApplyBufferBarriers(AccessTrackerContext *context, VkPipelineStageFlags src_stage_mask,
John Zulauf9cb530d2019-09-30 14:14:10 -0600374 SyncStageAccessFlags src_stage_scope, VkPipelineStageFlags dst_stage_mask,
375 SyncStageAccessFlags dst_stage_scope, uint32_t barrier_count, const VkBufferMemoryBarrier *barriers);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600376 void ApplyImageBarriers(AccessTrackerContext *context, VkPipelineStageFlags src_stage_mask,
John Zulauf5c5e88d2019-12-26 11:22:02 -0700377 SyncStageAccessFlags src_stage_scope, VkPipelineStageFlags dst_stage_mask,
378 SyncStageAccessFlags dst_stage_scope, uint32_t barrier_count, const VkImageMemoryBarrier *barriers);
John Zulauf9cb530d2019-09-30 14:14:10 -0600379
380 void ResetCommandBuffer(VkCommandBuffer command_buffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600381 void RecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
382 const VkSubpassBeginInfo *pSubpassBeginInfo);
383 void RecordCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
384 const VkSubpassEndInfo *pSubpassEndInfo);
385 void RecordCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo);
John Zulauf9cb530d2019-09-30 14:14:10 -0600386
387 void PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
388 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result);
389
390 bool PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
391 const VkBufferCopy *pRegions) const;
392
393 void PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
394 const VkBufferCopy *pRegions);
395
John Zulauf5c5e88d2019-12-26 11:22:02 -0700396 bool PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
397 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
398 const VkImageCopy *pRegions) const;
399
400 void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
401 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions);
402
John Zulauf9cb530d2019-09-30 14:14:10 -0600403 bool PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
404 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
405 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
406 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
407 uint32_t imageMemoryBarrierCount,
408 const VkImageMemoryBarrier *pImageMemoryBarriers) const;
409
410 void PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
411 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
412 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
413 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
414 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600415
416 void PostCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo,
417 VkResult result);
418
419 void PostCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
420 VkSubpassContents contents);
421 void PostCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
422 const VkSubpassBeginInfo *pSubpassBeginInfo);
423 void PostCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
424 const VkSubpassBeginInfo *pSubpassBeginInfo);
425
426 void PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents);
427 void PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
428 const VkSubpassEndInfo *pSubpassEndInfo);
429 void PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
430 const VkSubpassEndInfo *pSubpassEndInfo);
431
432 void PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer);
433 void PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo);
434 void PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo);
locke-lunarga19c71d2020-03-02 18:17:04 -0700435 bool PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
436 VkImageLayout dstImageLayout, uint32_t regionCount,
437 const VkBufferImageCopy *pRegions) const;
438
439 void PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
440 VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions);
441
442 bool PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
443 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) const;
444
445 void PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
446 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions);
447
448 bool PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
449 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
450 const VkImageBlit *pRegions, VkFilter filter) const;
451
452 void PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
453 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions,
454 VkFilter filter);
John Zulauf9cb530d2019-09-30 14:14:10 -0600455};