blob: 0f38dd8625564e2c3f19ad5ac244622e478978e2 [file] [log] [blame]
John Zulauf3d84f1b2020-03-09 13:33:25 -06001/*
2 * Copyright (c) 2019-2020 Valve Corporation
3 * Copyright (c) 2019-2020 LunarG, Inc.
John Zulauf9cb530d2019-09-30 14:14:10 -06004 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 * Author: John Zulauf <jzulauf@lunarg.com>
18 */
19
20#pragma once
21
John Zulauf7635de32020-05-29 17:14:15 -060022#include <limits>
John Zulauf9cb530d2019-09-30 14:14:10 -060023#include <map>
24#include <memory>
25#include <unordered_map>
26#include <vulkan/vulkan.h>
27
28#include "synchronization_validation_types.h"
29#include "state_tracker.h"
30
John Zulauf355e49b2020-04-24 15:11:15 -060031class SyncValidator;
John Zulauf59e25072020-07-17 10:55:21 -060032class ResourceAccessState;
John Zulauf355e49b2020-04-24 15:11:15 -060033
John Zulauf2f952d22020-02-10 11:34:51 -070034enum SyncHazard {
35 NONE = 0,
36 READ_AFTER_WRITE,
37 WRITE_AFTER_READ,
38 WRITE_AFTER_WRITE,
39 READ_RACING_WRITE,
40 WRITE_RACING_WRITE,
41 WRITE_RACING_READ,
42};
John Zulauf9cb530d2019-09-30 14:14:10 -060043
44// Useful Utilites for manipulating StageAccess parameters, suitable as base class to save typing
45struct SyncStageAccess {
John Zulauf1507ee42020-05-18 11:33:09 -060046 static inline SyncStageAccessFlagBits FlagBit(SyncStageAccessIndex stage_access) {
John Zulauf9cb530d2019-09-30 14:14:10 -060047 return syncStageAccessInfoByStageAccessIndex[stage_access].stage_access_bit;
48 }
John Zulauf1507ee42020-05-18 11:33:09 -060049 static inline SyncStageAccessFlags Flags(SyncStageAccessIndex stage_access) {
50 return static_cast<SyncStageAccessFlags>(FlagBit(stage_access));
51 }
John Zulauf9cb530d2019-09-30 14:14:10 -060052
53 static bool IsRead(SyncStageAccessFlagBits stage_access_bit) { return 0 != (stage_access_bit & syncStageAccessReadMask); }
54 static bool IsRead(SyncStageAccessIndex stage_access_index) { return IsRead(FlagBit(stage_access_index)); }
55
56 static bool IsWrite(SyncStageAccessFlagBits stage_access_bit) { return 0 != (stage_access_bit & syncStageAccessWriteMask); }
John Zulauf1507ee42020-05-18 11:33:09 -060057 static bool HasWrite(SyncStageAccessFlags stage_access_mask) { return 0 != (stage_access_mask & syncStageAccessWriteMask); }
John Zulauf9cb530d2019-09-30 14:14:10 -060058 static bool IsWrite(SyncStageAccessIndex stage_access_index) { return IsWrite(FlagBit(stage_access_index)); }
59 static VkPipelineStageFlagBits PipelineStageBit(SyncStageAccessIndex stage_access_index) {
60 return syncStageAccessInfoByStageAccessIndex[stage_access_index].stage_mask;
61 }
62 static SyncStageAccessFlags AccessScopeByStage(VkPipelineStageFlags stages);
63 static SyncStageAccessFlags AccessScopeByAccess(VkAccessFlags access);
64 static SyncStageAccessFlags AccessScope(VkPipelineStageFlags stages, VkAccessFlags access);
65 static SyncStageAccessFlags AccessScope(SyncStageAccessFlags stage_scope, VkAccessFlags accesses) {
66 return stage_scope & AccessScopeByAccess(accesses);
67 }
68};
69
John Zulauf5f13a792020-03-10 07:31:21 -060070struct ResourceUsageTag {
71 uint64_t index;
John Zulaufcc6fecb2020-06-17 15:24:54 -060072 CMD_TYPE command;
John Zulauf7635de32020-05-29 17:14:15 -060073 const static uint64_t kMaxIndex = std::numeric_limits<uint64_t>::max();
John Zulauf5f13a792020-03-10 07:31:21 -060074 ResourceUsageTag &operator++() {
75 index++;
76 return *this;
77 }
78 bool IsBefore(const ResourceUsageTag &rhs) const { return index < rhs.index; }
John Zulaufe5da6e52020-03-18 15:32:18 -060079 bool operator==(const ResourceUsageTag &rhs) const { return (index == rhs.index); }
80 bool operator!=(const ResourceUsageTag &rhs) const { return !(*this == rhs); }
John Zulaufcc6fecb2020-06-17 15:24:54 -060081 ResourceUsageTag() : index(0), command(CMD_NONE) {}
82 ResourceUsageTag(uint64_t index_, CMD_TYPE command_) : index(index_), command(command_) {}
John Zulauf5f13a792020-03-10 07:31:21 -060083};
84
John Zulauf9cb530d2019-09-30 14:14:10 -060085struct HazardResult {
John Zulauf59e25072020-07-17 10:55:21 -060086 std::unique_ptr<const ResourceAccessState> access_state;
87 SyncStageAccessIndex usage_index = std::numeric_limits<SyncStageAccessIndex>::max();
John Zulauf9cb530d2019-09-30 14:14:10 -060088 SyncHazard hazard = NONE;
John Zulauf37ceaed2020-07-03 16:18:15 -060089 SyncStageAccessFlags prior_access = 0U; // TODO -- change to a NONE enum in ...Bits
John Zulauf9cb530d2019-09-30 14:14:10 -060090 ResourceUsageTag tag = ResourceUsageTag();
John Zulauf59e25072020-07-17 10:55:21 -060091 void Set(const ResourceAccessState *access_state_, SyncStageAccessIndex usage_index_, SyncHazard hazard_,
92 SyncStageAccessFlags prior_, const ResourceUsageTag &tag_);
John Zulauf9cb530d2019-09-30 14:14:10 -060093};
94
John Zulauf3d84f1b2020-03-09 13:33:25 -060095struct SyncBarrier {
96 VkPipelineStageFlags src_exec_scope;
97 SyncStageAccessFlags src_access_scope;
98 VkPipelineStageFlags dst_exec_scope;
99 SyncStageAccessFlags dst_access_scope;
100 SyncBarrier() = default;
101 SyncBarrier &operator=(const SyncBarrier &) = default;
102 SyncBarrier(VkQueueFlags gueue_flags, const VkSubpassDependency2 &sub_pass_barrier);
John Zulaufa0a98292020-09-18 09:30:10 -0600103 void Merge(const SyncBarrier &other) {
104 src_exec_scope |= other.src_exec_scope;
105 src_access_scope |= other.src_access_scope;
106 dst_exec_scope |= other.dst_exec_scope;
107 dst_access_scope |= other.dst_access_scope;
108 }
109 SyncBarrier(VkPipelineStageFlags src_exec_scope_, SyncStageAccessFlags src_access_scope_, VkPipelineStageFlags dst_exec_scope_,
110 SyncStageAccessFlags dst_access_scope_)
111 : src_exec_scope(src_exec_scope_),
112 src_access_scope(src_access_scope_),
113 dst_exec_scope(dst_exec_scope_),
114 dst_access_scope(dst_access_scope_) {}
John Zulauf3d84f1b2020-03-09 13:33:25 -0600115};
John Zulauf69133422020-05-20 14:55:53 -0600116
117// To represent ordering guarantees such as rasterization and store
118struct SyncOrderingBarrier {
119 VkPipelineStageFlags exec_scope;
120 SyncStageAccessFlags access_scope;
121 SyncOrderingBarrier() = default;
122 SyncOrderingBarrier &operator=(const SyncOrderingBarrier &) = default;
123};
John Zulauf3d84f1b2020-03-09 13:33:25 -0600124
John Zulauf9cb530d2019-09-30 14:14:10 -0600125class ResourceAccessState : public SyncStageAccess {
126 protected:
127 // Mutliple read operations can be simlutaneously (and independently) synchronized,
128 // given the only the second execution scope creates a dependency chain, we have to track each,
129 // but only up to one per pipeline stage (as another read from the *same* stage become more recent,
130 // and applicable one for hazard detection
131 struct ReadState {
132 VkPipelineStageFlagBits stage; // The stage of this read
John Zulauf37ceaed2020-07-03 16:18:15 -0600133 SyncStageAccessFlags access; // TODO: Change to FlagBits when we have a None bit enum
John Zulauf9cb530d2019-09-30 14:14:10 -0600134 VkPipelineStageFlags barriers; // all applicable barriered stages
135 ResourceUsageTag tag;
John Zulaufe5da6e52020-03-18 15:32:18 -0600136 bool operator==(const ReadState &rhs) const {
John Zulauf37ceaed2020-07-03 16:18:15 -0600137 bool same = (stage == rhs.stage) && (access == rhs.access) && (barriers == rhs.barriers) && (tag == rhs.tag);
John Zulaufe5da6e52020-03-18 15:32:18 -0600138 return same;
139 }
140 bool operator!=(const ReadState &rhs) const { return !(*this == rhs); }
John Zulauf9cb530d2019-09-30 14:14:10 -0600141 };
142
143 public:
144 HazardResult DetectHazard(SyncStageAccessIndex usage_index) const;
John Zulauf69133422020-05-20 14:55:53 -0600145 HazardResult DetectHazard(SyncStageAccessIndex usage_index, const SyncOrderingBarrier &ordering) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600146
John Zulaufc9201222020-05-13 15:13:03 -0600147 HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags source_exec_scope,
148 SyncStageAccessFlags source_access_scope) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600149 HazardResult DetectAsyncHazard(SyncStageAccessIndex usage_index) const;
150
John Zulauf9cb530d2019-09-30 14:14:10 -0600151 void Update(SyncStageAccessIndex usage_index, const ResourceUsageTag &tag);
John Zulauf5f13a792020-03-10 07:31:21 -0600152 void Resolve(const ResourceAccessState &other);
John Zulaufa0a98292020-09-18 09:30:10 -0600153 void ApplyBarriers(const std::vector<SyncBarrier> &barriers);
John Zulauf9cb530d2019-09-30 14:14:10 -0600154 void ApplyExecutionBarrier(VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask);
John Zulaufa0a98292020-09-18 09:30:10 -0600155 void ApplyExecutionBarriers(const std::vector<SyncBarrier> &barriers);
156 void ApplyMemoryAccessBarrier(bool multi_dep, VkPipelineStageFlags src_stage_mask, SyncStageAccessFlags src_scope,
John Zulauf9cb530d2019-09-30 14:14:10 -0600157 VkPipelineStageFlags dst_stage_mask, SyncStageAccessFlags dst_scope);
158
159 ResourceAccessState()
John Zulauf355e49b2020-04-24 15:11:15 -0600160 : write_barriers(~SyncStageAccessFlags(0)),
161 write_dependency_chain(0),
John Zulauf355e49b2020-04-24 15:11:15 -0600162 write_tag(),
John Zulaufd14743a2020-07-03 09:42:39 -0600163 last_write(0),
164 input_attachment_barriers(kNoAttachmentRead),
165 input_attachment_tag(),
166 last_read_count(0),
John Zulauf361fb532020-07-22 10:45:39 -0600167 last_read_stages(0),
168 read_execution_barriers(0) {}
John Zulauf9cb530d2019-09-30 14:14:10 -0600169
John Zulauf3d84f1b2020-03-09 13:33:25 -0600170 bool HasWriteOp() const { return last_write != 0; }
John Zulaufe5da6e52020-03-18 15:32:18 -0600171 bool operator==(const ResourceAccessState &rhs) const {
172 bool same = (write_barriers == rhs.write_barriers) && (write_dependency_chain == rhs.write_dependency_chain) &&
173 (last_read_count == rhs.last_read_count) && (last_read_stages == rhs.last_read_stages) &&
John Zulaufd14743a2020-07-03 09:42:39 -0600174 (write_tag == rhs.write_tag) && (input_attachment_barriers == rhs.input_attachment_barriers) &&
John Zulauf361fb532020-07-22 10:45:39 -0600175 ((input_attachment_barriers == kNoAttachmentRead) || input_attachment_tag == rhs.input_attachment_tag) &&
176 (read_execution_barriers == rhs.read_execution_barriers);
John Zulaufe5da6e52020-03-18 15:32:18 -0600177 for (uint32_t i = 0; same && i < last_read_count; i++) {
178 same |= last_reads[i] == rhs.last_reads[i];
179 }
180 return same;
181 }
182 bool operator!=(const ResourceAccessState &rhs) const { return !(*this == rhs); }
John Zulauf59e25072020-07-17 10:55:21 -0600183 VkPipelineStageFlags GetReadBarriers(SyncStageAccessFlags usage) const;
184 SyncStageAccessFlags GetWriteBarriers() const { return write_barriers; }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600185
John Zulauf9cb530d2019-09-30 14:14:10 -0600186 private:
John Zulaufd14743a2020-07-03 09:42:39 -0600187 static constexpr VkPipelineStageFlags kNoAttachmentRead = ~VkPipelineStageFlags(0);
John Zulauf9cb530d2019-09-30 14:14:10 -0600188 bool IsWriteHazard(SyncStageAccessFlagBits usage) const { return 0 != (usage & ~write_barriers); }
John Zulaufa0a98292020-09-18 09:30:10 -0600189 bool InSourceScopeOrChain(VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope) const {
190 return (src_access_scope & last_write) || (write_dependency_chain & src_exec_scope);
191 }
John Zulaufd14743a2020-07-03 09:42:39 -0600192
193 static bool IsReadHazard(VkPipelineStageFlagBits stage, const VkPipelineStageFlags barriers) {
194 return 0 != (stage & ~barriers);
195 }
196 static bool IsReadHazard(VkPipelineStageFlags stage_mask, const VkPipelineStageFlags barriers) {
197 return stage_mask != (stage_mask & barriers);
198 }
199
John Zulauf9cb530d2019-09-30 14:14:10 -0600200 bool IsReadHazard(VkPipelineStageFlagBits stage, const ReadState &read_access) const {
John Zulaufd14743a2020-07-03 09:42:39 -0600201 return IsReadHazard(stage, read_access.barriers);
John Zulauf9cb530d2019-09-30 14:14:10 -0600202 }
John Zulauf0cb5be22020-01-23 12:18:22 -0700203 bool IsReadHazard(VkPipelineStageFlags stage_mask, const ReadState &read_access) const {
John Zulaufd14743a2020-07-03 09:42:39 -0600204 return IsReadHazard(stage_mask, read_access.barriers);
John Zulauf0cb5be22020-01-23 12:18:22 -0700205 }
John Zulaufd14743a2020-07-03 09:42:39 -0600206
207 // TODO: Add a NONE (zero) enum to SyncStageAccessFlagBits for input_attachment_read and last_write
208
John Zulauf9cb530d2019-09-30 14:14:10 -0600209 // With reads, each must be "safe" relative to it's prior write, so we need only
210 // save the most recent write operation (as anything *transitively* unsafe would arleady
211 // be included
212 SyncStageAccessFlags write_barriers; // union of applicable barrier masks since last write
213 VkPipelineStageFlags write_dependency_chain; // intiially zero, but accumulating the dstStages of barriers if they chain.
John Zulauf9cb530d2019-09-30 14:14:10 -0600214 ResourceUsageTag write_tag;
John Zulauf355e49b2020-04-24 15:11:15 -0600215 SyncStageAccessFlags last_write; // only the most recent write
John Zulauf9cb530d2019-09-30 14:14:10 -0600216
John Zulaufd14743a2020-07-03 09:42:39 -0600217 // This is special as it's a framebuffer-local read from a framebuffer-global pipeline stage
218 // As the only possible state for the input attachment stage/access is SYNC_FRAGMENT_SHADER_INPUT_ATTACHMENT_READ_BIT,
219 // encode the presence with the barriers mask, ~0 denotes no pending input attachment. Zero -- is the no-barrier state,
220 // otherwise reflects the barrier/dependency chain information.
221 VkPipelineStageFlags input_attachment_barriers;
222 ResourceUsageTag input_attachment_tag;
223
224 uint32_t last_read_count;
225 VkPipelineStageFlags last_read_stages;
John Zulauf361fb532020-07-22 10:45:39 -0600226 VkPipelineStageFlags read_execution_barriers;
John Zulaufd14743a2020-07-03 09:42:39 -0600227 static constexpr size_t kStageCount = 32; // TODO: The manual count was 28 real stages. Add stage count to codegen
228 std::array<ReadState, kStageCount> last_reads;
John Zulauf9cb530d2019-09-30 14:14:10 -0600229};
230
John Zulauf16adfc92020-04-08 10:28:33 -0600231using ResourceAccessRangeMap = sparse_container::range_map<VkDeviceSize, ResourceAccessState>;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700232using ResourceAccessRange = typename ResourceAccessRangeMap::key_type;
John Zulauf355e49b2020-04-24 15:11:15 -0600233using ResourceRangeMergeIterator = sparse_container::parallel_iterator<ResourceAccessRangeMap, const ResourceAccessRangeMap>;
John Zulauf9cb530d2019-09-30 14:14:10 -0600234
John Zulauf540266b2020-04-06 18:54:53 -0600235class AccessContext {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700236 public:
John Zulauf355e49b2020-04-24 15:11:15 -0600237 enum AddressType : int { kLinearAddress = 0, kIdealizedAddress = 1, kMaxAddressType = 1 };
John Zulauf69133422020-05-20 14:55:53 -0600238 enum DetectOptions : uint32_t {
John Zulauf355e49b2020-04-24 15:11:15 -0600239 kDetectPrevious = 1U << 0,
240 kDetectAsync = 1U << 1,
241 kDetectAll = (kDetectPrevious | kDetectAsync)
John Zulauf16adfc92020-04-08 10:28:33 -0600242 };
243
John Zulaufbaea94f2020-09-15 17:55:16 -0600244 // WIP TODO WIP Multi-dep -- change track back to support barrier vector, not just last.
John Zulauf3d84f1b2020-03-09 13:33:25 -0600245 struct TrackBack {
John Zulaufa0a98292020-09-18 09:30:10 -0600246 std::vector<SyncBarrier> barriers;
John Zulauf1a224292020-06-30 14:52:13 -0600247 const AccessContext *context;
John Zulaufbaea94f2020-09-15 17:55:16 -0600248 TrackBack(const AccessContext *context_, VkQueueFlags queue_flags_,
John Zulaufa0a98292020-09-18 09:30:10 -0600249 const std::vector<const VkSubpassDependency2 *> &subpass_dependencies_)
250 : barriers(), context(context_) {
251 barriers.reserve(subpass_dependencies_.size());
252 for (const VkSubpassDependency2 *dependency : subpass_dependencies_) {
253 assert(dependency);
254 barriers.emplace_back(queue_flags_, *dependency);
255 }
256 }
257
John Zulauf3d84f1b2020-03-09 13:33:25 -0600258 TrackBack &operator=(const TrackBack &) = default;
259 TrackBack() = default;
260 };
John Zulauf5c5e88d2019-12-26 11:22:02 -0700261
John Zulauf355e49b2020-04-24 15:11:15 -0600262 HazardResult DetectHazard(const BUFFER_STATE &buffer, SyncStageAccessIndex usage_index, const ResourceAccessRange &range) const;
John Zulauf540266b2020-04-06 18:54:53 -0600263 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
John Zulauf3d84f1b2020-03-09 13:33:25 -0600264 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset,
265 const VkExtent3D &extent) const;
John Zulauf69133422020-05-20 14:55:53 -0600266 template <typename Detector>
267 HazardResult DetectHazard(Detector &detector, const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range,
268 const VkOffset3D &offset, const VkExtent3D &extent, DetectOptions options) const;
John Zulauf1507ee42020-05-18 11:33:09 -0600269 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
270 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset,
271 const VkExtent3D &extent) const;
John Zulauf69133422020-05-20 14:55:53 -0600272 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
273 const VkImageSubresourceRange &subresource_range, const SyncOrderingBarrier &ordering,
274 const VkOffset3D &offset, const VkExtent3D &extent) const;
John Zulaufb027cdb2020-05-21 14:25:22 -0600275 HazardResult DetectHazard(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const SyncOrderingBarrier &ordering,
276 const VkOffset3D &offset, const VkExtent3D &extent, VkImageAspectFlags aspect_mask = 0U) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600277 HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
278 SyncStageAccessFlags src_access_scope, const VkImageSubresourceRange &subresource_range,
279 DetectOptions options) const;
280 HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
281 SyncStageAccessFlags src_stage_accesses, const VkImageMemoryBarrier &barrier) const;
John Zulauf7635de32020-05-29 17:14:15 -0600282 HazardResult DetectSubpassTransitionHazard(const TrackBack &track_back, const IMAGE_VIEW_STATE *attach_view) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600283
John Zulaufe5da6e52020-03-18 15:32:18 -0600284 const TrackBack &GetDstExternalTrackBack() const { return dst_external_; }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600285 void Reset() {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600286 prev_.clear();
John Zulauf355e49b2020-04-24 15:11:15 -0600287 prev_by_subpass_.clear();
John Zulauf3d84f1b2020-03-09 13:33:25 -0600288 async_.clear();
John Zulaufe5da6e52020-03-18 15:32:18 -0600289 src_external_ = TrackBack();
John Zulaufa0a98292020-09-18 09:30:10 -0600290 dst_external_ = TrackBack();
John Zulauf16adfc92020-04-08 10:28:33 -0600291 for (auto &map : access_state_maps_) {
292 map.clear();
293 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600294 }
John Zulauf5f13a792020-03-10 07:31:21 -0600295 // TODO: See if returning the lower_bound would be useful from a performance POV -- look at the lower_bound overhead
296 // Would need to add a "hint" overload to parallel_iterator::invalidate_[AB] call, if so.
John Zulauf355e49b2020-04-24 15:11:15 -0600297 void ResolvePreviousAccess(AddressType type, const ResourceAccessRange &range, ResourceAccessRangeMap *descent_map,
298 const ResourceAccessState *infill_state) const;
John Zulauf540266b2020-04-06 18:54:53 -0600299 void ResolvePreviousAccess(const IMAGE_STATE &image_state, const VkImageSubresourceRange &subresource_range,
John Zulauf355e49b2020-04-24 15:11:15 -0600300 AddressType address_type, ResourceAccessRangeMap *descent_map,
301 const ResourceAccessState *infill_state) const;
John Zulaufa0a98292020-09-18 09:30:10 -0600302 void ResolveAccessRange(AddressType type, const ResourceAccessRange &range, const std::vector<SyncBarrier> &barriers,
John Zulauf355e49b2020-04-24 15:11:15 -0600303 ResourceAccessRangeMap *resolve_map, const ResourceAccessState *infill_state,
304 bool recur_to_infill = true) const;
305 void UpdateAccessState(const BUFFER_STATE &buffer, SyncStageAccessIndex current_usage, const ResourceAccessRange &range,
306 const ResourceUsageTag &tag);
307 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
308 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset, const VkExtent3D &extent,
309 const ResourceUsageTag &tag);
John Zulauf7635de32020-05-29 17:14:15 -0600310 void UpdateAccessState(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const VkOffset3D &offset,
311 const VkExtent3D &extent, VkImageAspectFlags aspect_mask, const ResourceUsageTag &tag);
John Zulauf540266b2020-04-06 18:54:53 -0600312 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
John Zulauf3d84f1b2020-03-09 13:33:25 -0600313 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset, const VkExtent3D &extent,
314 const ResourceUsageTag &tag);
John Zulauf7635de32020-05-29 17:14:15 -0600315 void UpdateAttachmentResolveAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
316 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass,
317 const ResourceUsageTag &tag);
John Zulaufaff20662020-06-01 14:07:58 -0600318 void UpdateAttachmentStoreAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
319 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass,
320 const ResourceUsageTag &tag);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600321
John Zulauf540266b2020-04-06 18:54:53 -0600322 void ResolveChildContexts(const std::vector<AccessContext> &contexts);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600323
John Zulauf355e49b2020-04-24 15:11:15 -0600324 void ApplyImageBarrier(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope,
325 VkPipelineStageFlags dst_exec_scope, SyncStageAccessFlags dst_accesse_scope,
326 const VkImageSubresourceRange &subresource_range);
327
328 void ApplyImageBarrier(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope,
329 VkPipelineStageFlags dst_exec_scope, SyncStageAccessFlags dst_access_scope,
330 const VkImageSubresourceRange &subresource_range, bool layout_transition, const ResourceUsageTag &tag);
331 void ApplyImageBarrier(const IMAGE_STATE &image, const SyncBarrier &barrier, const VkImageSubresourceRange &subresource_range,
332 bool layout_transition, const ResourceUsageTag &tag);
333
John Zulauf540266b2020-04-06 18:54:53 -0600334 template <typename Action>
335 void UpdateMemoryAccess(const BUFFER_STATE &buffer, const ResourceAccessRange &range, const Action action);
336 template <typename Action>
337 void UpdateMemoryAccess(const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range, const Action action);
338
339 template <typename Action>
340 void ApplyGlobalBarriers(const Action &barrier_action);
341
John Zulauf16adfc92020-04-08 10:28:33 -0600342 static AddressType ImageAddressType(const IMAGE_STATE &image);
John Zulauf355e49b2020-04-24 15:11:15 -0600343 static VkDeviceSize ResourceBaseAddress(const BINDABLE &bindable);
John Zulauf16adfc92020-04-08 10:28:33 -0600344
John Zulauf540266b2020-04-06 18:54:53 -0600345 AccessContext(uint32_t subpass, VkQueueFlags queue_flags, const std::vector<SubpassDependencyGraphNode> &dependencies,
John Zulauf1a224292020-06-30 14:52:13 -0600346 const std::vector<AccessContext> &contexts, const AccessContext *external_context);
John Zulauf540266b2020-04-06 18:54:53 -0600347
348 AccessContext() { Reset(); }
John Zulauf7635de32020-05-29 17:14:15 -0600349 AccessContext(const AccessContext &copy_from) = default;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600350
John Zulauf16adfc92020-04-08 10:28:33 -0600351 ResourceAccessRangeMap &GetAccessStateMap(AddressType type) { return access_state_maps_[type]; }
352 const ResourceAccessRangeMap &GetAccessStateMap(AddressType type) const { return access_state_maps_[type]; }
353 ResourceAccessRangeMap &GetLinearMap() { return GetAccessStateMap(AddressType::kLinearAddress); }
354 const ResourceAccessRangeMap &GetLinearMap() const { return GetAccessStateMap(AddressType::kLinearAddress); }
355 ResourceAccessRangeMap &GetIdealizedMap() { return GetAccessStateMap(AddressType::kIdealizedAddress); }
356 const ResourceAccessRangeMap &GetIdealizedMap() const { return GetAccessStateMap(AddressType::kIdealizedAddress); }
John Zulauf355e49b2020-04-24 15:11:15 -0600357 const TrackBack *GetTrackBackFromSubpass(uint32_t subpass) const {
358 if (subpass == VK_SUBPASS_EXTERNAL) {
359 return &src_external_;
360 } else {
361 assert(subpass < prev_by_subpass_.size());
362 return prev_by_subpass_[subpass];
363 }
364 }
John Zulauf16adfc92020-04-08 10:28:33 -0600365
John Zulauf7635de32020-05-29 17:14:15 -0600366 bool ValidateLayoutTransitions(const SyncValidator &sync_state,
367
368 const RENDER_PASS_STATE &rp_state,
369
370 const VkRect2D &render_area,
371
372 uint32_t subpass, const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
373 const char *func_name) const;
John Zulauf1507ee42020-05-18 11:33:09 -0600374 bool ValidateLoadOperation(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
John Zulauf7635de32020-05-29 17:14:15 -0600375 uint32_t subpass, const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
376 const char *func_name) const;
John Zulaufaff20662020-06-01 14:07:58 -0600377 bool ValidateStoreOperation(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
378 uint32_t subpass, const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
379 const char *func_name) const;
John Zulaufb027cdb2020-05-21 14:25:22 -0600380 bool ValidateResolveOperations(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
381 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const char *func_name,
382 uint32_t subpass) const;
John Zulauf1507ee42020-05-18 11:33:09 -0600383
John Zulauf3d84f1b2020-03-09 13:33:25 -0600384 private:
John Zulauf355e49b2020-04-24 15:11:15 -0600385 HazardResult DetectHazard(AddressType type, SyncStageAccessIndex usage_index, const ResourceAccessRange &range) const;
386 HazardResult DetectBarrierHazard(AddressType type, SyncStageAccessIndex current_usage, VkPipelineStageFlags src_exec_scope,
387 SyncStageAccessFlags src_access_scope, const ResourceAccessRange &range,
388 DetectOptions options) const;
John Zulauf16adfc92020-04-08 10:28:33 -0600389
John Zulauf3d84f1b2020-03-09 13:33:25 -0600390 template <typename Detector>
John Zulauf355e49b2020-04-24 15:11:15 -0600391 HazardResult DetectHazard(AddressType type, const Detector &detector, const ResourceAccessRange &range,
392 DetectOptions options) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600393 template <typename Detector>
John Zulauf355e49b2020-04-24 15:11:15 -0600394 HazardResult DetectAsyncHazard(AddressType type, const Detector &detector, const ResourceAccessRange &range) const;
John Zulauf5f13a792020-03-10 07:31:21 -0600395 template <typename Detector>
John Zulauf355e49b2020-04-24 15:11:15 -0600396 HazardResult DetectPreviousHazard(AddressType type, const Detector &detector, const ResourceAccessRange &range) const;
John Zulauf16adfc92020-04-08 10:28:33 -0600397 void UpdateAccessState(AddressType type, SyncStageAccessIndex current_usage, const ResourceAccessRange &range,
398 const ResourceUsageTag &tag);
399 constexpr static int kAddressTypeCount = AddressType::kMaxAddressType + 1;
400 static const std::array<AddressType, kAddressTypeCount> kAddressTypes;
401 std::array<ResourceAccessRangeMap, kAddressTypeCount> access_state_maps_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600402 std::vector<TrackBack> prev_;
John Zulauf355e49b2020-04-24 15:11:15 -0600403 std::vector<TrackBack *> prev_by_subpass_;
John Zulauf540266b2020-04-06 18:54:53 -0600404 std::vector<AccessContext *> async_;
John Zulaufe5da6e52020-03-18 15:32:18 -0600405 TrackBack src_external_;
406 TrackBack dst_external_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600407};
408
John Zulauf355e49b2020-04-24 15:11:15 -0600409class RenderPassAccessContext {
410 public:
John Zulauf1a224292020-06-30 14:52:13 -0600411 RenderPassAccessContext() : rp_state_(nullptr), current_subpass_(0) {}
John Zulauf355e49b2020-04-24 15:11:15 -0600412
locke-lunarg61870c22020-06-09 14:51:50 -0600413 bool ValidateDrawSubpassAttachment(const SyncValidator &sync_state, const CMD_BUFFER_STATE &cmd, const VkRect2D &render_area,
414 const char *func_name) const;
locke-lunarg96dc9632020-06-10 17:22:18 -0600415 void RecordDrawSubpassAttachment(const CMD_BUFFER_STATE &cmd, const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf1507ee42020-05-18 11:33:09 -0600416 bool ValidateNextSubpass(const SyncValidator &sync_state, const VkRect2D &render_area, const char *command_name) const;
John Zulauf7635de32020-05-29 17:14:15 -0600417 bool ValidateEndRenderPass(const SyncValidator &sync_state, const VkRect2D &render_area, const char *func_name) const;
418 bool ValidateFinalSubpassLayoutTransitions(const SyncValidator &sync_state, const VkRect2D &render_area,
419 const char *func_name) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600420
421 void RecordLayoutTransitions(const ResourceUsageTag &tag);
John Zulauf1507ee42020-05-18 11:33:09 -0600422 void RecordLoadOperations(const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf1a224292020-06-30 14:52:13 -0600423 void RecordBeginRenderPass(const SyncValidator &state, const CMD_BUFFER_STATE &cb_state, const AccessContext *external_context,
424 VkQueueFlags queue_flags, const ResourceUsageTag &tag);
John Zulauf1507ee42020-05-18 11:33:09 -0600425 void RecordNextSubpass(const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf1a224292020-06-30 14:52:13 -0600426 void RecordEndRenderPass(AccessContext *external_context, const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf355e49b2020-04-24 15:11:15 -0600427
John Zulauf540266b2020-04-06 18:54:53 -0600428 AccessContext &CurrentContext() { return subpass_contexts_[current_subpass_]; }
429 const AccessContext &CurrentContext() const { return subpass_contexts_[current_subpass_]; }
John Zulauf355e49b2020-04-24 15:11:15 -0600430 const std::vector<AccessContext> &GetContexts() const { return subpass_contexts_; }
431 uint32_t GetCurrentSubpass() const { return current_subpass_; }
432 const RENDER_PASS_STATE *GetRenderPassState() const { return rp_state_; }
John Zulauf7635de32020-05-29 17:14:15 -0600433 AccessContext *CreateStoreResolveProxy(const VkRect2D &render_area) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600434
435 private:
John Zulauf355e49b2020-04-24 15:11:15 -0600436 const RENDER_PASS_STATE *rp_state_;
437 uint32_t current_subpass_;
438 std::vector<AccessContext> subpass_contexts_;
439 std::vector<const IMAGE_VIEW_STATE *> attachment_views_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600440};
441
442class CommandBufferAccessContext {
443 public:
444 CommandBufferAccessContext()
John Zulauf355e49b2020-04-24 15:11:15 -0600445 : command_number_(0),
446 reset_count_(0),
447 render_pass_contexts_(),
448 cb_access_context_(),
449 current_context_(&cb_access_context_),
John Zulauf3d84f1b2020-03-09 13:33:25 -0600450 current_renderpass_context_(),
451 cb_state_(),
452 queue_flags_() {}
John Zulauf355e49b2020-04-24 15:11:15 -0600453 CommandBufferAccessContext(SyncValidator &sync_validator, std::shared_ptr<CMD_BUFFER_STATE> &cb_state, VkQueueFlags queue_flags)
John Zulauf3d84f1b2020-03-09 13:33:25 -0600454 : CommandBufferAccessContext() {
455 cb_state_ = cb_state;
John Zulauf355e49b2020-04-24 15:11:15 -0600456 sync_state_ = &sync_validator;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600457 queue_flags_ = queue_flags;
458 }
John Zulauf5c5e88d2019-12-26 11:22:02 -0700459
460 void Reset() {
John Zulauf355e49b2020-04-24 15:11:15 -0600461 command_number_ = 0;
462 reset_count_++;
463 cb_access_context_.Reset();
John Zulauf3d84f1b2020-03-09 13:33:25 -0600464 render_pass_contexts_.clear();
John Zulauf355e49b2020-04-24 15:11:15 -0600465 current_context_ = &cb_access_context_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600466 current_renderpass_context_ = nullptr;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700467 }
468
John Zulauf540266b2020-04-06 18:54:53 -0600469 AccessContext *GetCurrentAccessContext() { return current_context_; }
470 const AccessContext *GetCurrentAccessContext() const { return current_context_; }
John Zulauf355e49b2020-04-24 15:11:15 -0600471 void RecordBeginRenderPass(const ResourceUsageTag &tag);
472 bool ValidateBeginRenderPass(const RENDER_PASS_STATE &render_pass, const VkRenderPassBeginInfo *pRenderPassBegin,
473 const VkSubpassBeginInfoKHR *pSubpassBeginInfo, const char *func_name) const;
locke-lunarg61870c22020-06-09 14:51:50 -0600474 bool ValidateDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint, const char *func_name) const;
475 void RecordDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint, const ResourceUsageTag &tag);
476 bool ValidateDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const char *func_name) const;
477 void RecordDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const ResourceUsageTag &tag);
478 bool ValidateDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const char *func_name) const;
479 void RecordDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const ResourceUsageTag &tag);
480 bool ValidateDrawSubpassAttachment(const char *func_name) const;
481 void RecordDrawSubpassAttachment(const ResourceUsageTag &tag);
John Zulauf355e49b2020-04-24 15:11:15 -0600482 bool ValidateNextSubpass(const char *func_name) const;
483 bool ValidateEndRenderpass(const char *func_name) const;
484 void RecordNextSubpass(const RENDER_PASS_STATE &render_pass, const ResourceUsageTag &tag);
485 void RecordEndRenderPass(const RENDER_PASS_STATE &render_pass, const ResourceUsageTag &tag);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600486 CMD_BUFFER_STATE *GetCommandBufferState() { return cb_state_.get(); }
487 const CMD_BUFFER_STATE *GetCommandBufferState() const { return cb_state_.get(); }
488 VkQueueFlags GetQueueFlags() const { return queue_flags_; }
John Zulauf355e49b2020-04-24 15:11:15 -0600489 inline ResourceUsageTag NextCommandTag(CMD_TYPE command) {
490 // TODO: add command encoding to ResourceUsageTag.
491 // What else we what to include. Do we want some sort of "parent" or global sequence number
492 command_number_++;
John Zulaufcc6fecb2020-06-17 15:24:54 -0600493 const auto index = (static_cast<uint64_t>(reset_count_) << 32) | command_number_;
494 ResourceUsageTag next(index, command);
John Zulauf355e49b2020-04-24 15:11:15 -0600495 return next;
496 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600497
498 private:
John Zulauf355e49b2020-04-24 15:11:15 -0600499 uint32_t command_number_;
500 uint32_t reset_count_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600501 std::vector<RenderPassAccessContext> render_pass_contexts_;
John Zulauf355e49b2020-04-24 15:11:15 -0600502 AccessContext cb_access_context_;
John Zulauf540266b2020-04-06 18:54:53 -0600503 AccessContext *current_context_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600504 RenderPassAccessContext *current_renderpass_context_;
505 std::shared_ptr<CMD_BUFFER_STATE> cb_state_;
John Zulauf355e49b2020-04-24 15:11:15 -0600506 SyncValidator *sync_state_;
507
John Zulauf3d84f1b2020-03-09 13:33:25 -0600508 VkQueueFlags queue_flags_;
John Zulauf9cb530d2019-09-30 14:14:10 -0600509};
510
511class SyncValidator : public ValidationStateTracker, public SyncStageAccess {
512 public:
513 SyncValidator() { container_type = LayerObjectTypeSyncValidation; }
514 using StateTracker = ValidationStateTracker;
515
516 using StateTracker::AccessorTraitsTypes;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600517 std::unordered_map<VkCommandBuffer, std::unique_ptr<CommandBufferAccessContext>> cb_access_state;
518 CommandBufferAccessContext *GetAccessContextImpl(VkCommandBuffer command_buffer, bool do_insert) {
John Zulauf9cb530d2019-09-30 14:14:10 -0600519 auto found_it = cb_access_state.find(command_buffer);
520 if (found_it == cb_access_state.end()) {
521 if (!do_insert) return nullptr;
522 // If we don't have one, make it.
John Zulauf3d84f1b2020-03-09 13:33:25 -0600523 auto cb_state = GetShared<CMD_BUFFER_STATE>(command_buffer);
524 assert(cb_state.get());
525 auto queue_flags = GetQueueFlags(*cb_state);
John Zulauf355e49b2020-04-24 15:11:15 -0600526 std::unique_ptr<CommandBufferAccessContext> context(new CommandBufferAccessContext(*this, cb_state, queue_flags));
John Zulauf3d84f1b2020-03-09 13:33:25 -0600527 auto insert_pair = cb_access_state.insert(std::make_pair(command_buffer, std::move(context)));
John Zulauf9cb530d2019-09-30 14:14:10 -0600528 found_it = insert_pair.first;
529 }
530 return found_it->second.get();
531 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600532 CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) {
533 return GetAccessContextImpl(command_buffer, true); // true -> do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600534 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600535 CommandBufferAccessContext *GetAccessContextNoInsert(VkCommandBuffer command_buffer) {
536 return GetAccessContextImpl(command_buffer, false); // false -> don't do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600537 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600538
539 const CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) const {
John Zulauf9cb530d2019-09-30 14:14:10 -0600540 const auto found_it = cb_access_state.find(command_buffer);
541 if (found_it == cb_access_state.end()) {
542 return nullptr;
543 }
544 return found_it->second.get();
545 }
546
John Zulauf540266b2020-04-06 18:54:53 -0600547 void ApplyGlobalBarriers(AccessContext *context, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
John Zulauf9cb530d2019-09-30 14:14:10 -0600548 SyncStageAccessFlags src_stage_scope, SyncStageAccessFlags dst_stage_scope,
549 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers);
John Zulauf540266b2020-04-06 18:54:53 -0600550 void ApplyBufferBarriers(AccessContext *context, VkPipelineStageFlags src_stage_mask, SyncStageAccessFlags src_stage_scope,
551 VkPipelineStageFlags dst_stage_mask, SyncStageAccessFlags dst_stage_scope, uint32_t barrier_count,
552 const VkBufferMemoryBarrier *barriers);
553 void ApplyImageBarriers(AccessContext *context, VkPipelineStageFlags src_stage_mask, SyncStageAccessFlags src_stage_scope,
554 VkPipelineStageFlags dst_stage_mask, SyncStageAccessFlags dst_stage_scope, uint32_t barrier_count,
John Zulauf355e49b2020-04-24 15:11:15 -0600555 const VkImageMemoryBarrier *barriers, const ResourceUsageTag &tag);
John Zulauf9cb530d2019-09-30 14:14:10 -0600556
John Zulaufd1f85d42020-04-15 12:23:15 -0600557 void ResetCommandBufferCallback(VkCommandBuffer command_buffer);
558 void FreeCommandBufferCallback(VkCommandBuffer command_buffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600559 void RecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
John Zulauf355e49b2020-04-24 15:11:15 -0600560 const VkSubpassBeginInfo *pSubpassBeginInfo, CMD_TYPE command);
561 void RecordCmdNextSubpass(VkCommandBuffer commandBuffer,
562
563 const VkSubpassBeginInfo *pSubpassBeginInfo, const VkSubpassEndInfo *pSubpassEndInfo,
564 CMD_TYPE command);
565 void RecordCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo, CMD_TYPE command);
John Zulauf33fc1d52020-07-17 11:01:10 -0600566 bool SupressedBoundDescriptorWAW(const HazardResult &hazard) const;
John Zulauf9cb530d2019-09-30 14:14:10 -0600567
568 void PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
569 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result);
570
John Zulauf355e49b2020-04-24 15:11:15 -0600571 bool ValidateBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
572 const VkSubpassBeginInfoKHR *pSubpassBeginInfo, const char *func_name) const;
573
574 bool PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
575 VkSubpassContents contents) const;
576
577 bool PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
578 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) const;
579
580 bool PreCallValidateCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
581 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) const;
582
John Zulauf9cb530d2019-09-30 14:14:10 -0600583 bool PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
584 const VkBufferCopy *pRegions) const;
585
586 void PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
587 const VkBufferCopy *pRegions);
588
Jeff Leger178b1e52020-10-05 12:22:23 -0400589 bool PreCallValidateCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR *pCopyBufferInfos) const;
590
591 void PreCallRecordCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR *pCopyBufferInfos);
592
John Zulauf5c5e88d2019-12-26 11:22:02 -0700593 bool PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
594 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
595 const VkImageCopy *pRegions) const;
596
597 void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
598 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions);
599
Jeff Leger178b1e52020-10-05 12:22:23 -0400600 bool PreCallValidateCmdCopyImage2KHR(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR *pCopyImageInfo) const;
601
602 void PreCallRecordCmdCopyImage2KHR(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR *pCopyImageInfo);
603
John Zulauf9cb530d2019-09-30 14:14:10 -0600604 bool PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
605 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
606 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
607 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
608 uint32_t imageMemoryBarrierCount,
609 const VkImageMemoryBarrier *pImageMemoryBarriers) const;
610
611 void PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
612 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
613 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
614 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
615 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600616
617 void PostCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo,
618 VkResult result);
619
620 void PostCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
621 VkSubpassContents contents);
622 void PostCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
623 const VkSubpassBeginInfo *pSubpassBeginInfo);
624 void PostCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
625 const VkSubpassBeginInfo *pSubpassBeginInfo);
626
John Zulauf355e49b2020-04-24 15:11:15 -0600627 bool ValidateCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
628 const VkSubpassEndInfoKHR *pSubpassEndInfo, const char *func_name) const;
629 bool PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const;
630 bool PreCallValidateCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
631 const VkSubpassEndInfoKHR *pSubpassEndInfo) const;
632 bool PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
633 const VkSubpassEndInfoKHR *pSubpassEndInfo) const;
634
John Zulauf3d84f1b2020-03-09 13:33:25 -0600635 void PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents);
636 void PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
637 const VkSubpassEndInfo *pSubpassEndInfo);
638 void PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
639 const VkSubpassEndInfo *pSubpassEndInfo);
640
John Zulauf355e49b2020-04-24 15:11:15 -0600641 bool ValidateCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo,
642 const char *func_name) const;
643 bool PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) const;
644 bool PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo) const;
645 bool PreCallValidateCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo) const;
646
John Zulauf3d84f1b2020-03-09 13:33:25 -0600647 void PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer);
648 void PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo);
649 void PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo);
Jeff Leger178b1e52020-10-05 12:22:23 -0400650
651 template <typename BufferImageCopyRegionType>
652 bool ValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
653 VkImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopyRegionType *pRegions,
654 CopyCommandVersion version) const;
locke-lunarga19c71d2020-03-02 18:17:04 -0700655 bool PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
656 VkImageLayout dstImageLayout, uint32_t regionCount,
657 const VkBufferImageCopy *pRegions) const;
Jeff Leger178b1e52020-10-05 12:22:23 -0400658 bool PreCallValidateCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
659 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) const;
locke-lunarga19c71d2020-03-02 18:17:04 -0700660
Jeff Leger178b1e52020-10-05 12:22:23 -0400661 template <typename BufferImageCopyRegionType>
662 void RecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
663 VkImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopyRegionType *pRegions,
664 CopyCommandVersion version);
locke-lunarga19c71d2020-03-02 18:17:04 -0700665 void PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
666 VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions);
Jeff Leger178b1e52020-10-05 12:22:23 -0400667 void PreCallRecordCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
668 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo);
locke-lunarga19c71d2020-03-02 18:17:04 -0700669
Jeff Leger178b1e52020-10-05 12:22:23 -0400670 template <typename BufferImageCopyRegionType>
671 bool ValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
672 VkBuffer dstBuffer, uint32_t regionCount, const BufferImageCopyRegionType *pRegions,
673 CopyCommandVersion version) const;
locke-lunarga19c71d2020-03-02 18:17:04 -0700674 bool PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
675 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) const;
Jeff Leger178b1e52020-10-05 12:22:23 -0400676 bool PreCallValidateCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
677 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) const;
locke-lunarga19c71d2020-03-02 18:17:04 -0700678
Jeff Leger178b1e52020-10-05 12:22:23 -0400679 template <typename BufferImageCopyRegionType>
680 void RecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
681 VkBuffer dstBuffer, uint32_t regionCount, const BufferImageCopyRegionType *pRegions,
682 CopyCommandVersion version);
locke-lunarga19c71d2020-03-02 18:17:04 -0700683 void PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
684 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions);
Jeff Leger178b1e52020-10-05 12:22:23 -0400685 void PreCallRecordCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
686 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo);
687
688 template <typename RegionType>
689 bool ValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
690 VkImageLayout dstImageLayout, uint32_t regionCount, const RegionType *pRegions, VkFilter filter,
691 const char *apiName) const;
locke-lunarga19c71d2020-03-02 18:17:04 -0700692
693 bool PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
694 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
695 const VkImageBlit *pRegions, VkFilter filter) const;
Jeff Leger178b1e52020-10-05 12:22:23 -0400696 bool PreCallValidateCmdBlitImage2KHR(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR *pBlitImageInfo) const;
locke-lunarga19c71d2020-03-02 18:17:04 -0700697
Jeff Leger178b1e52020-10-05 12:22:23 -0400698 template <typename RegionType>
699 void RecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
700 VkImageLayout dstImageLayout, uint32_t regionCount, const RegionType *pRegions, VkFilter filter,
701 ResourceUsageTag tag);
locke-lunarga19c71d2020-03-02 18:17:04 -0700702 void PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
703 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions,
704 VkFilter filter);
Jeff Leger178b1e52020-10-05 12:22:23 -0400705 void PreCallRecordCmdBlitImage2KHR(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR *pBlitImageInfo);
locke-lunarg36ba2592020-04-03 09:42:04 -0600706
locke-lunarg61870c22020-06-09 14:51:50 -0600707 bool ValidateIndirectBuffer(const AccessContext &context, VkCommandBuffer commandBuffer, const VkDeviceSize struct_size,
708 const VkBuffer buffer, const VkDeviceSize offset, const uint32_t drawCount, const uint32_t stride,
709 const char *function) const;
710 void RecordIndirectBuffer(AccessContext &context, const ResourceUsageTag &tag, const VkDeviceSize struct_size,
711 const VkBuffer buffer, const VkDeviceSize offset, const uint32_t drawCount, uint32_t stride);
locke-lunarg36ba2592020-04-03 09:42:04 -0600712
locke-lunarg61870c22020-06-09 14:51:50 -0600713 bool ValidateCountBuffer(const AccessContext &context, VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
714 const char *function) const;
715 void RecordCountBuffer(AccessContext &context, const ResourceUsageTag &tag, VkBuffer buffer, VkDeviceSize offset);
locke-lunarg93d68af2020-05-12 17:18:03 -0600716
locke-lunarg36ba2592020-04-03 09:42:04 -0600717 bool PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) const;
718 void PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z);
locke-lunarge1a67022020-04-29 00:15:36 -0600719
720 bool PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const;
721 void PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset);
722
723 bool PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
724 uint32_t firstInstance) const;
725 void PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
726 uint32_t firstInstance);
727
728 bool PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
729 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) const;
730 void PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
731 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance);
732
733 bool PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
734 uint32_t stride) const;
735 void PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
736 uint32_t stride);
737
738 bool PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
739 uint32_t drawCount, uint32_t stride) const;
740 void PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
741 uint32_t drawCount, uint32_t stride);
742
locke-lunargff255f92020-05-13 18:53:52 -0600743 bool ValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer,
744 VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride,
745 const char *function) const;
locke-lunarge1a67022020-04-29 00:15:36 -0600746 bool PreCallValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
747 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
748 uint32_t stride) const;
749 void PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
750 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
751 uint32_t stride);
752 bool PreCallValidateCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
753 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
754 uint32_t stride) const;
755 void PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
756 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
757 uint32_t stride);
758 bool PreCallValidateCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
759 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
760 uint32_t stride) const;
761 void PreCallRecordCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
762 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
763 uint32_t stride);
764
locke-lunargff255f92020-05-13 18:53:52 -0600765 bool ValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
766 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
767 uint32_t stride, const char *function) const;
locke-lunarge1a67022020-04-29 00:15:36 -0600768 bool PreCallValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
769 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
770 uint32_t stride) const;
771 void PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
772 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
773 uint32_t stride);
774 bool PreCallValidateCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
775 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
776 uint32_t stride) const;
777 void PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
778 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
779 uint32_t stride);
780 bool PreCallValidateCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
781 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
782 uint32_t stride) const;
783 void PreCallRecordCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
784 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
785 uint32_t stride);
786
787 bool PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
788 const VkClearColorValue *pColor, uint32_t rangeCount,
789 const VkImageSubresourceRange *pRanges) const;
790 void PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
791 const VkClearColorValue *pColor, uint32_t rangeCount,
792 const VkImageSubresourceRange *pRanges);
793
794 bool PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
795 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
796 const VkImageSubresourceRange *pRanges) const;
797 void PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
798 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
799 const VkImageSubresourceRange *pRanges);
800
801 bool PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
802 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
803 VkDeviceSize stride, VkQueryResultFlags flags) const;
804 void PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
805 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride,
806 VkQueryResultFlags flags);
807
808 bool PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size,
809 uint32_t data) const;
810 void PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size,
811 uint32_t data);
812
813 bool PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
814 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
815 const VkImageResolve *pRegions) const;
Jeff Leger178b1e52020-10-05 12:22:23 -0400816
locke-lunarge1a67022020-04-29 00:15:36 -0600817 void PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
818 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
819 const VkImageResolve *pRegions);
820
Jeff Leger178b1e52020-10-05 12:22:23 -0400821 bool PreCallValidateCmdResolveImage2KHR(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR *pResolveImageInfo) const;
822 void PreCallRecordCmdResolveImage2KHR(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR *pResolveImageInfo);
823
locke-lunarge1a67022020-04-29 00:15:36 -0600824 bool PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
825 VkDeviceSize dataSize, const void *pData) const;
826 void PreCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
827 VkDeviceSize dataSize, const void *pData);
locke-lunargff255f92020-05-13 18:53:52 -0600828
829 bool PreCallValidateCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
830 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) const;
831 void PreCallRecordCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
832 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker);
John Zulauf9cb530d2019-09-30 14:14:10 -0600833};