blob: 79dda75267a973c3c738f67313870eda240396ae [file] [log] [blame]
John Zulauf3d84f1b2020-03-09 13:33:25 -06001/*
2 * Copyright (c) 2019-2020 Valve Corporation
3 * Copyright (c) 2019-2020 LunarG, Inc.
John Zulauf9cb530d2019-09-30 14:14:10 -06004 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 * Author: John Zulauf <jzulauf@lunarg.com>
18 */
19
20#pragma once
21
22#include <map>
23#include <memory>
24#include <unordered_map>
25#include <vulkan/vulkan.h>
26
27#include "synchronization_validation_types.h"
28#include "state_tracker.h"
29
John Zulauf2f952d22020-02-10 11:34:51 -070030enum SyncHazard {
31 NONE = 0,
32 READ_AFTER_WRITE,
33 WRITE_AFTER_READ,
34 WRITE_AFTER_WRITE,
35 READ_RACING_WRITE,
36 WRITE_RACING_WRITE,
37 WRITE_RACING_READ,
38};
John Zulauf9cb530d2019-09-30 14:14:10 -060039
40// Useful Utilites for manipulating StageAccess parameters, suitable as base class to save typing
41struct SyncStageAccess {
42 static SyncStageAccessFlagBits FlagBit(SyncStageAccessIndex stage_access) {
43 return syncStageAccessInfoByStageAccessIndex[stage_access].stage_access_bit;
44 }
45
46 static bool IsRead(SyncStageAccessFlagBits stage_access_bit) { return 0 != (stage_access_bit & syncStageAccessReadMask); }
47 static bool IsRead(SyncStageAccessIndex stage_access_index) { return IsRead(FlagBit(stage_access_index)); }
48
49 static bool IsWrite(SyncStageAccessFlagBits stage_access_bit) { return 0 != (stage_access_bit & syncStageAccessWriteMask); }
50 static bool IsWrite(SyncStageAccessIndex stage_access_index) { return IsWrite(FlagBit(stage_access_index)); }
51 static VkPipelineStageFlagBits PipelineStageBit(SyncStageAccessIndex stage_access_index) {
52 return syncStageAccessInfoByStageAccessIndex[stage_access_index].stage_mask;
53 }
54 static SyncStageAccessFlags AccessScopeByStage(VkPipelineStageFlags stages);
55 static SyncStageAccessFlags AccessScopeByAccess(VkAccessFlags access);
56 static SyncStageAccessFlags AccessScope(VkPipelineStageFlags stages, VkAccessFlags access);
57 static SyncStageAccessFlags AccessScope(SyncStageAccessFlags stage_scope, VkAccessFlags accesses) {
58 return stage_scope & AccessScopeByAccess(accesses);
59 }
60};
61
62using ResourceUsageTag = uint64_t; // TODO -- identify a better DWORD or QWORD size UID/Tag for usages causing hazards
63struct HazardResult {
64 SyncHazard hazard = NONE;
65 ResourceUsageTag tag = ResourceUsageTag();
66 void Set(SyncHazard hazard_, const ResourceUsageTag &tag_) {
67 hazard = hazard_;
68 tag = tag_;
69 }
70};
71
John Zulauf3d84f1b2020-03-09 13:33:25 -060072struct SyncBarrier {
73 VkPipelineStageFlags src_exec_scope;
74 SyncStageAccessFlags src_access_scope;
75 VkPipelineStageFlags dst_exec_scope;
76 SyncStageAccessFlags dst_access_scope;
77 SyncBarrier() = default;
78 SyncBarrier &operator=(const SyncBarrier &) = default;
79 SyncBarrier(VkQueueFlags gueue_flags, const VkSubpassDependency2 &sub_pass_barrier);
80};
81using SyncBarrierStack = std::vector<const SyncBarrier *>;
82
John Zulauf9cb530d2019-09-30 14:14:10 -060083class ResourceAccessState : public SyncStageAccess {
84 protected:
85 // Mutliple read operations can be simlutaneously (and independently) synchronized,
86 // given the only the second execution scope creates a dependency chain, we have to track each,
87 // but only up to one per pipeline stage (as another read from the *same* stage become more recent,
88 // and applicable one for hazard detection
89 struct ReadState {
90 VkPipelineStageFlagBits stage; // The stage of this read
91 VkPipelineStageFlags barriers; // all applicable barriered stages
92 ResourceUsageTag tag;
93 };
94
John Zulauf3d84f1b2020-03-09 13:33:25 -060095 void ApplyBarrier(const SyncBarrier &barrier);
96 static ResourceAccessState ApplyBarrierStack(const ResourceAccessState &that, const SyncBarrierStack &barrier_stack);
97
John Zulauf9cb530d2019-09-30 14:14:10 -060098 public:
John Zulauf3d84f1b2020-03-09 13:33:25 -060099 HazardResult DetectHazard(SyncStageAccessIndex usage_index, SyncBarrierStack *barrier_stack) const;
John Zulauf9cb530d2019-09-30 14:14:10 -0600100 HazardResult DetectHazard(SyncStageAccessIndex usage_index) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600101
102 HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags src_exec_scope,
103 SyncStageAccessFlags src_access_scope, SyncBarrierStack *barrier_stack) const;
John Zulauf0cb5be22020-01-23 12:18:22 -0700104 HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags src_stage_mask,
105 SyncStageAccessFlags source_scope) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600106
107 HazardResult DetectAsyncHazard(SyncStageAccessIndex usage_index) const;
108
John Zulauf9cb530d2019-09-30 14:14:10 -0600109 void Update(SyncStageAccessIndex usage_index, const ResourceUsageTag &tag);
110 void ApplyExecutionBarrier(VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask);
111 void ApplyMemoryAccessBarrier(VkPipelineStageFlags src_stage_mask, SyncStageAccessFlags src_scope,
112 VkPipelineStageFlags dst_stage_mask, SyncStageAccessFlags dst_scope);
113
114 ResourceAccessState()
115 : write_barriers(~SyncStageAccessFlags(0)), write_dependency_chain(0), last_read_count(0), last_read_stages(0) {}
116
John Zulauf3d84f1b2020-03-09 13:33:25 -0600117 bool HasWriteOp() const { return last_write != 0; }
118
John Zulauf9cb530d2019-09-30 14:14:10 -0600119 private:
120 bool IsWriteHazard(SyncStageAccessFlagBits usage) const { return 0 != (usage & ~write_barriers); }
121 bool IsReadHazard(VkPipelineStageFlagBits stage, const ReadState &read_access) const {
122 return 0 != (stage & ~read_access.barriers);
123 }
John Zulauf0cb5be22020-01-23 12:18:22 -0700124 bool IsReadHazard(VkPipelineStageFlags stage_mask, const ReadState &read_access) const {
125 return stage_mask != (stage_mask & read_access.barriers);
126 }
John Zulauf9cb530d2019-09-30 14:14:10 -0600127 // With reads, each must be "safe" relative to it's prior write, so we need only
128 // save the most recent write operation (as anything *transitively* unsafe would arleady
129 // be included
130 SyncStageAccessFlags write_barriers; // union of applicable barrier masks since last write
131 VkPipelineStageFlags write_dependency_chain; // intiially zero, but accumulating the dstStages of barriers if they chain.
132 uint32_t last_read_count;
133 VkPipelineStageFlags last_read_stages;
134
135 ResourceUsageTag write_tag;
136
137 std::array<ReadState, 8 * sizeof(VkPipelineStageFlags)> last_reads;
138 SyncStageAccessFlagBits last_write; // only the most recent write
139};
140
John Zulauf5c5e88d2019-12-26 11:22:02 -0700141using ResourceAccessRangeMap = sparse_container::range_map<uint64_t, ResourceAccessState>;
142using ResourceAccessRange = typename ResourceAccessRangeMap::key_type;
John Zulauf9cb530d2019-09-30 14:14:10 -0600143
John Zulauf3d84f1b2020-03-09 13:33:25 -0600144class AccessTrackerContext;
145// This class owns none of the objects pointed to.
146class AccessTracker {
John Zulauf9cb530d2019-09-30 14:14:10 -0600147 public:
John Zulauf3d84f1b2020-03-09 13:33:25 -0600148 AccessTracker(AccessTrackerContext *context) : accesses_() {}
149 ResourceAccessRangeMap &GetCurrentAccessMap() { return accesses_; }
150 const ResourceAccessRangeMap &GetCurrentAccessMap() const { return accesses_; }
151 void UpdateAccessState(SyncStageAccessIndex current_usage, const ResourceAccessRange &range, const ResourceUsageTag &tag);
152 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
153 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset, const VkExtent3D &extent,
154 const ResourceUsageTag &tag);
John Zulauf9cb530d2019-09-30 14:14:10 -0600155
John Zulauf3d84f1b2020-03-09 13:33:25 -0600156 private:
157 ResourceAccessRangeMap accesses_;
158// TODO: Cache the track back tree to save on repeated map lookups
159#if 0
160 struct TrackBack {
161 const VkSubpassDependency2 *barrier;
162 AccessTracker *tracker;
163 };
164 std::vector<TrackBack> prev_;
165 std::vector<AccessTracker *> async_;
166 TrackBack external_;
167 AccessTrackerContext *context_;
168#endif
169};
170
171class AccessTrackerContext {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700172 protected:
John Zulauf9cb530d2019-09-30 14:14:10 -0600173 // TODO -- hide the details of the implementation..
John Zulauf5c5e88d2019-12-26 11:22:02 -0700174 template <typename Map, typename Key>
John Zulauf3d84f1b2020-03-09 13:33:25 -0600175 static typename Map::mapped_type *GetImpl(Map *map, Key key, AccessTrackerContext *context) {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700176 auto find_it = map->find(key);
177 if (find_it == map->end()) {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600178 if (!context) return nullptr;
179 auto insert_pair = map->insert(std::make_pair(key, typename Map::mapped_type(context)));
John Zulauf9cb530d2019-09-30 14:14:10 -0600180 find_it = insert_pair.first;
181 }
182 return &find_it->second;
183 }
John Zulauf9cb530d2019-09-30 14:14:10 -0600184
John Zulauf5c5e88d2019-12-26 11:22:02 -0700185 template <typename Map, typename Key>
186 static const typename Map::mapped_type *GetConstImpl(const Map *map, Key key) {
187 auto find_it = map->find(key);
188 if (find_it == map->cend()) {
John Zulauf9cb530d2019-09-30 14:14:10 -0600189 return nullptr;
190 }
191 return &find_it->second;
192 }
193
John Zulauf5c5e88d2019-12-26 11:22:02 -0700194 public:
John Zulauf3d84f1b2020-03-09 13:33:25 -0600195 using AccessTrackerMap = std::unordered_map<VulkanTypedHandle, AccessTracker>;
196 struct TrackBack {
197 SyncBarrier barrier;
198 AccessTrackerContext *context;
199 TrackBack(AccessTrackerContext *context_, VkQueueFlags queue_flags_, const VkSubpassDependency2 &subpass_barrier_)
200 : barrier(queue_flags_, subpass_barrier_), context(context_) {}
201 TrackBack &operator=(const TrackBack &) = default;
202 TrackBack() = default;
203 };
John Zulauf5c5e88d2019-12-26 11:22:02 -0700204
John Zulauf3d84f1b2020-03-09 13:33:25 -0600205 AccessTracker *GetAccessTracker(const VulkanTypedHandle &handle) { return GetImpl(&access_tracker_map_, handle, this); }
206 AccessTracker *GetAccessTrackerNoInsert(const VulkanTypedHandle &handle) {
207 return GetImpl(&access_tracker_map_, handle, nullptr);
208 }
209 const AccessTracker *GetAccessTracker(const VulkanTypedHandle &handle) const {
210 return GetConstImpl(&access_tracker_map_, handle);
211 }
212 HazardResult DetectHazard(const VulkanTypedHandle &handle, SyncStageAccessIndex usage_index,
213 const ResourceAccessRange &range) const;
214 HazardResult DetectBarrierHazard(const VulkanTypedHandle &handle, SyncStageAccessIndex current_usage,
215 VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope,
216 const ResourceAccessRange &range) const;
217 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
218 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset,
219 const VkExtent3D &extent) const;
220
221 const AccessTrackerMap &GetAccessTrackerMap() const { return access_tracker_map_; }
222 AccessTrackerMap &GetAccessTrackerMap() { return access_tracker_map_; }
223 void Reset() {
224 access_tracker_map_.clear();
225 prev_.clear();
226 async_.clear();
227 external_ = TrackBack();
228 }
229 void UpdateAccessState(const VulkanTypedHandle &handle, SyncStageAccessIndex current_usage, const ResourceAccessRange &range,
230 const ResourceUsageTag &tag);
231 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
232 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset, const VkExtent3D &extent,
233 const ResourceUsageTag &tag);
234
235 AccessTrackerContext(uint32_t subpass, VkQueueFlags queue_flags, const std::vector<SubpassDependencyGraphNode> &dependencies,
236 const std::vector<AccessTrackerContext> &contexts, AccessTrackerContext *external_context);
237
238 AccessTrackerContext() { Reset(); }
239
240 private:
241 template <typename Detector>
242 HazardResult DetectHazard(const VulkanTypedHandle &handle, const Detector &detector, const ResourceAccessRange &range,
243 SyncBarrierStack *barrier_stack) const;
244 template <typename Detector>
245 HazardResult DetectHazardRecur(const VulkanTypedHandle &handle, const Detector &detector, const ResourceAccessRange &range,
246 SyncBarrierStack *barrier_stack) const;
247 template <typename Detector>
248 HazardResult DetectHazard(const VulkanTypedHandle &handle, const Detector &detector, const ResourceAccessRange &range) const;
249 template <typename Detector>
250 HazardResult DetectAsyncHazard(const VulkanTypedHandle &handle, const Detector &detector,
251 const ResourceAccessRange &range) const;
252
253 AccessTrackerMap access_tracker_map_;
254
255 std::vector<TrackBack> prev_;
256 std::vector<AccessTrackerContext *> async_;
257 TrackBack external_;
258};
259
260struct RenderPassAccessContext {
261 uint32_t current_subpass_;
262 std::vector<AccessTrackerContext> subpass_contexts_;
263 const std::vector<SubpassDependencyGraphNode> *dependencies_;
264 RenderPassAccessContext(VkQueueFlags queue_flags, const std::vector<SubpassDependencyGraphNode> *dependencies,
265 AccessTrackerContext *external_context)
266 : current_subpass_(0), dependencies_(dependencies) {
267 if (dependencies_) {
268 subpass_contexts_.emplace_back(0, queue_flags, *dependencies_, subpass_contexts_, external_context);
269 }
270 }
271 void NextSubpass(VkQueueFlags queue_flags, AccessTrackerContext *external_context) {
272 current_subpass_++;
273 subpass_contexts_.emplace_back(current_subpass_, queue_flags, *dependencies_, subpass_contexts_, external_context);
274 assert(subpass_contexts_.size() == (current_subpass_ + 1));
275 }
276 AccessTrackerContext &CurrentContext() { return subpass_contexts_[current_subpass_]; }
277 const AccessTrackerContext &CurrentContext() const { return subpass_contexts_[current_subpass_]; }
278};
279
280class CommandBufferAccessContext {
281 public:
282 CommandBufferAccessContext()
283 : render_pass_contexts_(),
284 cb_tracker_context_(),
285 current_context_(&cb_tracker_context_),
286 current_renderpass_context_(),
287 cb_state_(),
288 queue_flags_() {}
289 CommandBufferAccessContext(std::shared_ptr<CMD_BUFFER_STATE> &cb_state, VkQueueFlags queue_flags)
290 : CommandBufferAccessContext() {
291 cb_state_ = cb_state;
292 queue_flags_ = queue_flags;
293 }
John Zulauf5c5e88d2019-12-26 11:22:02 -0700294
295 void Reset() {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600296 cb_tracker_context_.Reset();
297 render_pass_contexts_.clear();
298 current_context_ = &cb_tracker_context_;
299 current_renderpass_context_ = nullptr;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700300 }
301
John Zulauf3d84f1b2020-03-09 13:33:25 -0600302 AccessTrackerContext *GetCurrentAccessContext() { return current_context_; }
303 const AccessTrackerContext *GetCurrentAccessContext() const { return current_context_; }
304 void BeginRenderPass(const RENDER_PASS_STATE &render_pass);
305 void NextRenderPass(const RENDER_PASS_STATE &render_pass);
306 void EndRenderPass(const RENDER_PASS_STATE &render_pass);
307 CMD_BUFFER_STATE *GetCommandBufferState() { return cb_state_.get(); }
308 const CMD_BUFFER_STATE *GetCommandBufferState() const { return cb_state_.get(); }
309 VkQueueFlags GetQueueFlags() const { return queue_flags_; }
310
311 private:
312 std::vector<RenderPassAccessContext> render_pass_contexts_;
313 AccessTrackerContext cb_tracker_context_;
314 AccessTrackerContext *current_context_;
315 RenderPassAccessContext *current_renderpass_context_;
316 std::shared_ptr<CMD_BUFFER_STATE> cb_state_;
317 VkQueueFlags queue_flags_;
John Zulauf9cb530d2019-09-30 14:14:10 -0600318};
319
320class SyncValidator : public ValidationStateTracker, public SyncStageAccess {
321 public:
322 SyncValidator() { container_type = LayerObjectTypeSyncValidation; }
323 using StateTracker = ValidationStateTracker;
324
325 using StateTracker::AccessorTraitsTypes;
326 ResourceUsageTag tag = 0; // Find a better tagging scheme...
John Zulauf3d84f1b2020-03-09 13:33:25 -0600327 std::unordered_map<VkCommandBuffer, std::unique_ptr<CommandBufferAccessContext>> cb_access_state;
328 CommandBufferAccessContext *GetAccessContextImpl(VkCommandBuffer command_buffer, bool do_insert) {
John Zulauf9cb530d2019-09-30 14:14:10 -0600329 auto found_it = cb_access_state.find(command_buffer);
330 if (found_it == cb_access_state.end()) {
331 if (!do_insert) return nullptr;
332 // If we don't have one, make it.
John Zulauf3d84f1b2020-03-09 13:33:25 -0600333 auto cb_state = GetShared<CMD_BUFFER_STATE>(command_buffer);
334 assert(cb_state.get());
335 auto queue_flags = GetQueueFlags(*cb_state);
336 std::unique_ptr<CommandBufferAccessContext> context(new CommandBufferAccessContext(cb_state, queue_flags));
337 auto insert_pair = cb_access_state.insert(std::make_pair(command_buffer, std::move(context)));
John Zulauf9cb530d2019-09-30 14:14:10 -0600338 found_it = insert_pair.first;
339 }
340 return found_it->second.get();
341 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600342 CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) {
343 return GetAccessContextImpl(command_buffer, true); // true -> do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600344 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600345 CommandBufferAccessContext *GetAccessContextNoInsert(VkCommandBuffer command_buffer) {
346 return GetAccessContextImpl(command_buffer, false); // false -> don't do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600347 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600348
349 const CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) const {
John Zulauf9cb530d2019-09-30 14:14:10 -0600350 const auto found_it = cb_access_state.find(command_buffer);
351 if (found_it == cb_access_state.end()) {
352 return nullptr;
353 }
354 return found_it->second.get();
355 }
356
John Zulauf3d84f1b2020-03-09 13:33:25 -0600357 void ApplyGlobalBarriers(AccessTrackerContext *context, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
John Zulauf9cb530d2019-09-30 14:14:10 -0600358 SyncStageAccessFlags src_stage_scope, SyncStageAccessFlags dst_stage_scope,
359 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600360 void ApplyBufferBarriers(AccessTrackerContext *context, VkPipelineStageFlags src_stage_mask,
John Zulauf9cb530d2019-09-30 14:14:10 -0600361 SyncStageAccessFlags src_stage_scope, VkPipelineStageFlags dst_stage_mask,
362 SyncStageAccessFlags dst_stage_scope, uint32_t barrier_count, const VkBufferMemoryBarrier *barriers);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600363 void ApplyImageBarriers(AccessTrackerContext *context, VkPipelineStageFlags src_stage_mask,
John Zulauf5c5e88d2019-12-26 11:22:02 -0700364 SyncStageAccessFlags src_stage_scope, VkPipelineStageFlags dst_stage_mask,
365 SyncStageAccessFlags dst_stage_scope, uint32_t barrier_count, const VkImageMemoryBarrier *barriers);
John Zulauf9cb530d2019-09-30 14:14:10 -0600366
367 void ResetCommandBuffer(VkCommandBuffer command_buffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600368 void RecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
369 const VkSubpassBeginInfo *pSubpassBeginInfo);
370 void RecordCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
371 const VkSubpassEndInfo *pSubpassEndInfo);
372 void RecordCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo);
John Zulauf9cb530d2019-09-30 14:14:10 -0600373
374 void PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
375 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result);
376
377 bool PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
378 const VkBufferCopy *pRegions) const;
379
380 void PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
381 const VkBufferCopy *pRegions);
382
John Zulauf5c5e88d2019-12-26 11:22:02 -0700383 bool PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
384 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
385 const VkImageCopy *pRegions) const;
386
387 void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
388 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions);
389
John Zulauf9cb530d2019-09-30 14:14:10 -0600390 bool PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
391 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
392 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
393 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
394 uint32_t imageMemoryBarrierCount,
395 const VkImageMemoryBarrier *pImageMemoryBarriers) const;
396
397 void PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
398 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
399 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
400 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
401 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600402
403 void PostCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo,
404 VkResult result);
405
406 void PostCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
407 VkSubpassContents contents);
408 void PostCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
409 const VkSubpassBeginInfo *pSubpassBeginInfo);
410 void PostCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
411 const VkSubpassBeginInfo *pSubpassBeginInfo);
412
413 void PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents);
414 void PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
415 const VkSubpassEndInfo *pSubpassEndInfo);
416 void PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
417 const VkSubpassEndInfo *pSubpassEndInfo);
418
419 void PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer);
420 void PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo);
421 void PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo);
John Zulauf9cb530d2019-09-30 14:14:10 -0600422};