blob: 5fd47b1207b9f1e49c4ac4e0ed5e2b537f986cd8 [file] [log] [blame]
John Zulauf3d84f1b2020-03-09 13:33:25 -06001/*
2 * Copyright (c) 2019-2020 Valve Corporation
3 * Copyright (c) 2019-2020 LunarG, Inc.
John Zulauf9cb530d2019-09-30 14:14:10 -06004 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 * Author: John Zulauf <jzulauf@lunarg.com>
18 */
19
20#pragma once
21
John Zulauf7635de32020-05-29 17:14:15 -060022#include <limits>
John Zulauf9cb530d2019-09-30 14:14:10 -060023#include <map>
24#include <memory>
25#include <unordered_map>
26#include <vulkan/vulkan.h>
27
28#include "synchronization_validation_types.h"
29#include "state_tracker.h"
30
John Zulauf355e49b2020-04-24 15:11:15 -060031class SyncValidator;
32
John Zulauf2f952d22020-02-10 11:34:51 -070033enum SyncHazard {
34 NONE = 0,
35 READ_AFTER_WRITE,
36 WRITE_AFTER_READ,
37 WRITE_AFTER_WRITE,
38 READ_RACING_WRITE,
39 WRITE_RACING_WRITE,
40 WRITE_RACING_READ,
41};
John Zulauf9cb530d2019-09-30 14:14:10 -060042
43// Useful Utilites for manipulating StageAccess parameters, suitable as base class to save typing
44struct SyncStageAccess {
John Zulauf1507ee42020-05-18 11:33:09 -060045 static inline SyncStageAccessFlagBits FlagBit(SyncStageAccessIndex stage_access) {
John Zulauf9cb530d2019-09-30 14:14:10 -060046 return syncStageAccessInfoByStageAccessIndex[stage_access].stage_access_bit;
47 }
John Zulauf1507ee42020-05-18 11:33:09 -060048 static inline SyncStageAccessFlags Flags(SyncStageAccessIndex stage_access) {
49 return static_cast<SyncStageAccessFlags>(FlagBit(stage_access));
50 }
John Zulauf9cb530d2019-09-30 14:14:10 -060051
52 static bool IsRead(SyncStageAccessFlagBits stage_access_bit) { return 0 != (stage_access_bit & syncStageAccessReadMask); }
53 static bool IsRead(SyncStageAccessIndex stage_access_index) { return IsRead(FlagBit(stage_access_index)); }
54
55 static bool IsWrite(SyncStageAccessFlagBits stage_access_bit) { return 0 != (stage_access_bit & syncStageAccessWriteMask); }
John Zulauf1507ee42020-05-18 11:33:09 -060056 static bool HasWrite(SyncStageAccessFlags stage_access_mask) { return 0 != (stage_access_mask & syncStageAccessWriteMask); }
John Zulauf9cb530d2019-09-30 14:14:10 -060057 static bool IsWrite(SyncStageAccessIndex stage_access_index) { return IsWrite(FlagBit(stage_access_index)); }
58 static VkPipelineStageFlagBits PipelineStageBit(SyncStageAccessIndex stage_access_index) {
59 return syncStageAccessInfoByStageAccessIndex[stage_access_index].stage_mask;
60 }
61 static SyncStageAccessFlags AccessScopeByStage(VkPipelineStageFlags stages);
62 static SyncStageAccessFlags AccessScopeByAccess(VkAccessFlags access);
63 static SyncStageAccessFlags AccessScope(VkPipelineStageFlags stages, VkAccessFlags access);
64 static SyncStageAccessFlags AccessScope(SyncStageAccessFlags stage_scope, VkAccessFlags accesses) {
65 return stage_scope & AccessScopeByAccess(accesses);
66 }
67};
68
John Zulauf5f13a792020-03-10 07:31:21 -060069struct ResourceUsageTag {
70 uint64_t index;
John Zulaufcc6fecb2020-06-17 15:24:54 -060071 CMD_TYPE command;
John Zulauf7635de32020-05-29 17:14:15 -060072 const static uint64_t kMaxIndex = std::numeric_limits<uint64_t>::max();
John Zulauf5f13a792020-03-10 07:31:21 -060073 ResourceUsageTag &operator++() {
74 index++;
75 return *this;
76 }
77 bool IsBefore(const ResourceUsageTag &rhs) const { return index < rhs.index; }
John Zulaufe5da6e52020-03-18 15:32:18 -060078 bool operator==(const ResourceUsageTag &rhs) const { return (index == rhs.index); }
79 bool operator!=(const ResourceUsageTag &rhs) const { return !(*this == rhs); }
John Zulaufcc6fecb2020-06-17 15:24:54 -060080 ResourceUsageTag() : index(0), command(CMD_NONE) {}
81 ResourceUsageTag(uint64_t index_, CMD_TYPE command_) : index(index_), command(command_) {}
John Zulauf5f13a792020-03-10 07:31:21 -060082};
83
John Zulauf9cb530d2019-09-30 14:14:10 -060084struct HazardResult {
85 SyncHazard hazard = NONE;
John Zulauf37ceaed2020-07-03 16:18:15 -060086 SyncStageAccessFlags prior_access = 0U; // TODO -- change to a NONE enum in ...Bits
John Zulauf9cb530d2019-09-30 14:14:10 -060087 ResourceUsageTag tag = ResourceUsageTag();
John Zulauf37ceaed2020-07-03 16:18:15 -060088 void Set(SyncHazard hazard_, SyncStageAccessFlags prior_, const ResourceUsageTag &tag_) {
John Zulauf9cb530d2019-09-30 14:14:10 -060089 hazard = hazard_;
John Zulauf37ceaed2020-07-03 16:18:15 -060090 prior_access = prior_;
John Zulauf9cb530d2019-09-30 14:14:10 -060091 tag = tag_;
92 }
93};
94
John Zulauf3d84f1b2020-03-09 13:33:25 -060095struct SyncBarrier {
96 VkPipelineStageFlags src_exec_scope;
97 SyncStageAccessFlags src_access_scope;
98 VkPipelineStageFlags dst_exec_scope;
99 SyncStageAccessFlags dst_access_scope;
100 SyncBarrier() = default;
101 SyncBarrier &operator=(const SyncBarrier &) = default;
102 SyncBarrier(VkQueueFlags gueue_flags, const VkSubpassDependency2 &sub_pass_barrier);
103};
John Zulauf69133422020-05-20 14:55:53 -0600104
105// To represent ordering guarantees such as rasterization and store
106struct SyncOrderingBarrier {
107 VkPipelineStageFlags exec_scope;
108 SyncStageAccessFlags access_scope;
109 SyncOrderingBarrier() = default;
110 SyncOrderingBarrier &operator=(const SyncOrderingBarrier &) = default;
111};
John Zulauf3d84f1b2020-03-09 13:33:25 -0600112
John Zulauf9cb530d2019-09-30 14:14:10 -0600113class ResourceAccessState : public SyncStageAccess {
114 protected:
115 // Mutliple read operations can be simlutaneously (and independently) synchronized,
116 // given the only the second execution scope creates a dependency chain, we have to track each,
117 // but only up to one per pipeline stage (as another read from the *same* stage become more recent,
118 // and applicable one for hazard detection
119 struct ReadState {
120 VkPipelineStageFlagBits stage; // The stage of this read
John Zulauf37ceaed2020-07-03 16:18:15 -0600121 SyncStageAccessFlags access; // TODO: Change to FlagBits when we have a None bit enum
John Zulauf9cb530d2019-09-30 14:14:10 -0600122 VkPipelineStageFlags barriers; // all applicable barriered stages
123 ResourceUsageTag tag;
John Zulaufe5da6e52020-03-18 15:32:18 -0600124 bool operator==(const ReadState &rhs) const {
John Zulauf37ceaed2020-07-03 16:18:15 -0600125 bool same = (stage == rhs.stage) && (access == rhs.access) && (barriers == rhs.barriers) && (tag == rhs.tag);
John Zulaufe5da6e52020-03-18 15:32:18 -0600126 return same;
127 }
128 bool operator!=(const ReadState &rhs) const { return !(*this == rhs); }
John Zulauf9cb530d2019-09-30 14:14:10 -0600129 };
130
131 public:
132 HazardResult DetectHazard(SyncStageAccessIndex usage_index) const;
John Zulauf69133422020-05-20 14:55:53 -0600133 HazardResult DetectHazard(SyncStageAccessIndex usage_index, const SyncOrderingBarrier &ordering) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600134
John Zulaufc9201222020-05-13 15:13:03 -0600135 HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags source_exec_scope,
136 SyncStageAccessFlags source_access_scope) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600137 HazardResult DetectAsyncHazard(SyncStageAccessIndex usage_index) const;
138
John Zulauf9cb530d2019-09-30 14:14:10 -0600139 void Update(SyncStageAccessIndex usage_index, const ResourceUsageTag &tag);
John Zulauf5f13a792020-03-10 07:31:21 -0600140 void Resolve(const ResourceAccessState &other);
141 void ApplyBarrier(const SyncBarrier &barrier);
John Zulauf9cb530d2019-09-30 14:14:10 -0600142 void ApplyExecutionBarrier(VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask);
143 void ApplyMemoryAccessBarrier(VkPipelineStageFlags src_stage_mask, SyncStageAccessFlags src_scope,
144 VkPipelineStageFlags dst_stage_mask, SyncStageAccessFlags dst_scope);
145
146 ResourceAccessState()
John Zulauf355e49b2020-04-24 15:11:15 -0600147 : write_barriers(~SyncStageAccessFlags(0)),
148 write_dependency_chain(0),
John Zulauf355e49b2020-04-24 15:11:15 -0600149 write_tag(),
John Zulaufd14743a2020-07-03 09:42:39 -0600150 last_write(0),
151 input_attachment_barriers(kNoAttachmentRead),
152 input_attachment_tag(),
153 last_read_count(0),
154 last_read_stages(0) {}
John Zulauf9cb530d2019-09-30 14:14:10 -0600155
John Zulauf3d84f1b2020-03-09 13:33:25 -0600156 bool HasWriteOp() const { return last_write != 0; }
John Zulaufe5da6e52020-03-18 15:32:18 -0600157 bool operator==(const ResourceAccessState &rhs) const {
158 bool same = (write_barriers == rhs.write_barriers) && (write_dependency_chain == rhs.write_dependency_chain) &&
159 (last_read_count == rhs.last_read_count) && (last_read_stages == rhs.last_read_stages) &&
John Zulaufd14743a2020-07-03 09:42:39 -0600160 (write_tag == rhs.write_tag) && (input_attachment_barriers == rhs.input_attachment_barriers) &&
161 ((input_attachment_barriers == kNoAttachmentRead) || input_attachment_tag == rhs.input_attachment_tag);
John Zulaufe5da6e52020-03-18 15:32:18 -0600162 for (uint32_t i = 0; same && i < last_read_count; i++) {
163 same |= last_reads[i] == rhs.last_reads[i];
164 }
165 return same;
166 }
167 bool operator!=(const ResourceAccessState &rhs) const { return !(*this == rhs); }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600168
John Zulauf9cb530d2019-09-30 14:14:10 -0600169 private:
John Zulaufd14743a2020-07-03 09:42:39 -0600170 static constexpr VkPipelineStageFlags kNoAttachmentRead = ~VkPipelineStageFlags(0);
John Zulauf9cb530d2019-09-30 14:14:10 -0600171 bool IsWriteHazard(SyncStageAccessFlagBits usage) const { return 0 != (usage & ~write_barriers); }
John Zulaufd14743a2020-07-03 09:42:39 -0600172
173 static bool IsReadHazard(VkPipelineStageFlagBits stage, const VkPipelineStageFlags barriers) {
174 return 0 != (stage & ~barriers);
175 }
176 static bool IsReadHazard(VkPipelineStageFlags stage_mask, const VkPipelineStageFlags barriers) {
177 return stage_mask != (stage_mask & barriers);
178 }
179
John Zulauf9cb530d2019-09-30 14:14:10 -0600180 bool IsReadHazard(VkPipelineStageFlagBits stage, const ReadState &read_access) const {
John Zulaufd14743a2020-07-03 09:42:39 -0600181 return IsReadHazard(stage, read_access.barriers);
John Zulauf9cb530d2019-09-30 14:14:10 -0600182 }
John Zulauf0cb5be22020-01-23 12:18:22 -0700183 bool IsReadHazard(VkPipelineStageFlags stage_mask, const ReadState &read_access) const {
John Zulaufd14743a2020-07-03 09:42:39 -0600184 return IsReadHazard(stage_mask, read_access.barriers);
John Zulauf0cb5be22020-01-23 12:18:22 -0700185 }
John Zulaufd14743a2020-07-03 09:42:39 -0600186
187 // TODO: Add a NONE (zero) enum to SyncStageAccessFlagBits for input_attachment_read and last_write
188
John Zulauf9cb530d2019-09-30 14:14:10 -0600189 // With reads, each must be "safe" relative to it's prior write, so we need only
190 // save the most recent write operation (as anything *transitively* unsafe would arleady
191 // be included
192 SyncStageAccessFlags write_barriers; // union of applicable barrier masks since last write
193 VkPipelineStageFlags write_dependency_chain; // intiially zero, but accumulating the dstStages of barriers if they chain.
John Zulauf9cb530d2019-09-30 14:14:10 -0600194 ResourceUsageTag write_tag;
John Zulauf355e49b2020-04-24 15:11:15 -0600195 SyncStageAccessFlags last_write; // only the most recent write
John Zulauf9cb530d2019-09-30 14:14:10 -0600196
John Zulaufd14743a2020-07-03 09:42:39 -0600197 // This is special as it's a framebuffer-local read from a framebuffer-global pipeline stage
198 // As the only possible state for the input attachment stage/access is SYNC_FRAGMENT_SHADER_INPUT_ATTACHMENT_READ_BIT,
199 // encode the presence with the barriers mask, ~0 denotes no pending input attachment. Zero -- is the no-barrier state,
200 // otherwise reflects the barrier/dependency chain information.
201 VkPipelineStageFlags input_attachment_barriers;
202 ResourceUsageTag input_attachment_tag;
203
204 uint32_t last_read_count;
205 VkPipelineStageFlags last_read_stages;
206 static constexpr size_t kStageCount = 32; // TODO: The manual count was 28 real stages. Add stage count to codegen
207 std::array<ReadState, kStageCount> last_reads;
John Zulauf9cb530d2019-09-30 14:14:10 -0600208};
209
John Zulauf16adfc92020-04-08 10:28:33 -0600210using ResourceAccessRangeMap = sparse_container::range_map<VkDeviceSize, ResourceAccessState>;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700211using ResourceAccessRange = typename ResourceAccessRangeMap::key_type;
John Zulauf355e49b2020-04-24 15:11:15 -0600212using ResourceRangeMergeIterator = sparse_container::parallel_iterator<ResourceAccessRangeMap, const ResourceAccessRangeMap>;
John Zulauf9cb530d2019-09-30 14:14:10 -0600213
John Zulauf540266b2020-04-06 18:54:53 -0600214class AccessContext {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700215 public:
John Zulauf355e49b2020-04-24 15:11:15 -0600216 enum AddressType : int { kLinearAddress = 0, kIdealizedAddress = 1, kMaxAddressType = 1 };
John Zulauf69133422020-05-20 14:55:53 -0600217 enum DetectOptions : uint32_t {
John Zulauf355e49b2020-04-24 15:11:15 -0600218 kDetectPrevious = 1U << 0,
219 kDetectAsync = 1U << 1,
220 kDetectAll = (kDetectPrevious | kDetectAsync)
John Zulauf16adfc92020-04-08 10:28:33 -0600221 };
222
John Zulauf3d84f1b2020-03-09 13:33:25 -0600223 struct TrackBack {
224 SyncBarrier barrier;
John Zulauf1a224292020-06-30 14:52:13 -0600225 const AccessContext *context;
226 TrackBack(const AccessContext *context_, VkQueueFlags queue_flags_, const VkSubpassDependency2 &subpass_barrier_)
John Zulauf3d84f1b2020-03-09 13:33:25 -0600227 : barrier(queue_flags_, subpass_barrier_), context(context_) {}
228 TrackBack &operator=(const TrackBack &) = default;
229 TrackBack() = default;
230 };
John Zulauf5c5e88d2019-12-26 11:22:02 -0700231
John Zulauf355e49b2020-04-24 15:11:15 -0600232 HazardResult DetectHazard(const BUFFER_STATE &buffer, SyncStageAccessIndex usage_index, const ResourceAccessRange &range) const;
John Zulauf540266b2020-04-06 18:54:53 -0600233 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
John Zulauf3d84f1b2020-03-09 13:33:25 -0600234 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset,
235 const VkExtent3D &extent) const;
John Zulauf69133422020-05-20 14:55:53 -0600236 template <typename Detector>
237 HazardResult DetectHazard(Detector &detector, const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range,
238 const VkOffset3D &offset, const VkExtent3D &extent, DetectOptions options) const;
John Zulauf1507ee42020-05-18 11:33:09 -0600239 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
240 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset,
241 const VkExtent3D &extent) const;
John Zulauf69133422020-05-20 14:55:53 -0600242 HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
243 const VkImageSubresourceRange &subresource_range, const SyncOrderingBarrier &ordering,
244 const VkOffset3D &offset, const VkExtent3D &extent) const;
John Zulaufb027cdb2020-05-21 14:25:22 -0600245 HazardResult DetectHazard(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const SyncOrderingBarrier &ordering,
246 const VkOffset3D &offset, const VkExtent3D &extent, VkImageAspectFlags aspect_mask = 0U) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600247 HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
248 SyncStageAccessFlags src_access_scope, const VkImageSubresourceRange &subresource_range,
249 DetectOptions options) const;
250 HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
251 SyncStageAccessFlags src_stage_accesses, const VkImageMemoryBarrier &barrier) const;
John Zulauf7635de32020-05-29 17:14:15 -0600252 HazardResult DetectSubpassTransitionHazard(const TrackBack &track_back, const IMAGE_VIEW_STATE *attach_view) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600253
John Zulaufe5da6e52020-03-18 15:32:18 -0600254 const TrackBack &GetDstExternalTrackBack() const { return dst_external_; }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600255 void Reset() {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600256 prev_.clear();
John Zulauf355e49b2020-04-24 15:11:15 -0600257 prev_by_subpass_.clear();
John Zulauf3d84f1b2020-03-09 13:33:25 -0600258 async_.clear();
John Zulaufe5da6e52020-03-18 15:32:18 -0600259 src_external_ = TrackBack();
John Zulauf16adfc92020-04-08 10:28:33 -0600260 for (auto &map : access_state_maps_) {
261 map.clear();
262 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600263 }
John Zulauf5f13a792020-03-10 07:31:21 -0600264 // TODO: See if returning the lower_bound would be useful from a performance POV -- look at the lower_bound overhead
265 // Would need to add a "hint" overload to parallel_iterator::invalidate_[AB] call, if so.
John Zulauf355e49b2020-04-24 15:11:15 -0600266 void ResolvePreviousAccess(AddressType type, const ResourceAccessRange &range, ResourceAccessRangeMap *descent_map,
267 const ResourceAccessState *infill_state) const;
John Zulauf540266b2020-04-06 18:54:53 -0600268 void ResolvePreviousAccess(const IMAGE_STATE &image_state, const VkImageSubresourceRange &subresource_range,
John Zulauf355e49b2020-04-24 15:11:15 -0600269 AddressType address_type, ResourceAccessRangeMap *descent_map,
270 const ResourceAccessState *infill_state) const;
271 void ResolveAccessRange(AddressType type, const ResourceAccessRange &range, const SyncBarrier *barrier,
272 ResourceAccessRangeMap *resolve_map, const ResourceAccessState *infill_state,
273 bool recur_to_infill = true) const;
274 void UpdateAccessState(const BUFFER_STATE &buffer, SyncStageAccessIndex current_usage, const ResourceAccessRange &range,
275 const ResourceUsageTag &tag);
276 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
277 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset, const VkExtent3D &extent,
278 const ResourceUsageTag &tag);
John Zulauf7635de32020-05-29 17:14:15 -0600279 void UpdateAccessState(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const VkOffset3D &offset,
280 const VkExtent3D &extent, VkImageAspectFlags aspect_mask, const ResourceUsageTag &tag);
John Zulauf540266b2020-04-06 18:54:53 -0600281 void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
John Zulauf3d84f1b2020-03-09 13:33:25 -0600282 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset, const VkExtent3D &extent,
283 const ResourceUsageTag &tag);
John Zulauf7635de32020-05-29 17:14:15 -0600284 void UpdateAttachmentResolveAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
285 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass,
286 const ResourceUsageTag &tag);
John Zulaufaff20662020-06-01 14:07:58 -0600287 void UpdateAttachmentStoreAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
288 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass,
289 const ResourceUsageTag &tag);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600290
John Zulauf540266b2020-04-06 18:54:53 -0600291 void ResolveChildContexts(const std::vector<AccessContext> &contexts);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600292
John Zulauf355e49b2020-04-24 15:11:15 -0600293 void ApplyImageBarrier(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope,
294 VkPipelineStageFlags dst_exec_scope, SyncStageAccessFlags dst_accesse_scope,
295 const VkImageSubresourceRange &subresource_range);
296
297 void ApplyImageBarrier(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope,
298 VkPipelineStageFlags dst_exec_scope, SyncStageAccessFlags dst_access_scope,
299 const VkImageSubresourceRange &subresource_range, bool layout_transition, const ResourceUsageTag &tag);
300 void ApplyImageBarrier(const IMAGE_STATE &image, const SyncBarrier &barrier, const VkImageSubresourceRange &subresource_range,
301 bool layout_transition, const ResourceUsageTag &tag);
302
John Zulauf540266b2020-04-06 18:54:53 -0600303 template <typename Action>
304 void UpdateMemoryAccess(const BUFFER_STATE &buffer, const ResourceAccessRange &range, const Action action);
305 template <typename Action>
306 void UpdateMemoryAccess(const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range, const Action action);
307
308 template <typename Action>
309 void ApplyGlobalBarriers(const Action &barrier_action);
310
John Zulauf16adfc92020-04-08 10:28:33 -0600311 static AddressType ImageAddressType(const IMAGE_STATE &image);
John Zulauf355e49b2020-04-24 15:11:15 -0600312 static VkDeviceSize ResourceBaseAddress(const BINDABLE &bindable);
John Zulauf16adfc92020-04-08 10:28:33 -0600313
John Zulauf540266b2020-04-06 18:54:53 -0600314 AccessContext(uint32_t subpass, VkQueueFlags queue_flags, const std::vector<SubpassDependencyGraphNode> &dependencies,
John Zulauf1a224292020-06-30 14:52:13 -0600315 const std::vector<AccessContext> &contexts, const AccessContext *external_context);
John Zulauf540266b2020-04-06 18:54:53 -0600316
317 AccessContext() { Reset(); }
John Zulauf7635de32020-05-29 17:14:15 -0600318 AccessContext(const AccessContext &copy_from) = default;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600319
John Zulauf16adfc92020-04-08 10:28:33 -0600320 ResourceAccessRangeMap &GetAccessStateMap(AddressType type) { return access_state_maps_[type]; }
321 const ResourceAccessRangeMap &GetAccessStateMap(AddressType type) const { return access_state_maps_[type]; }
322 ResourceAccessRangeMap &GetLinearMap() { return GetAccessStateMap(AddressType::kLinearAddress); }
323 const ResourceAccessRangeMap &GetLinearMap() const { return GetAccessStateMap(AddressType::kLinearAddress); }
324 ResourceAccessRangeMap &GetIdealizedMap() { return GetAccessStateMap(AddressType::kIdealizedAddress); }
325 const ResourceAccessRangeMap &GetIdealizedMap() const { return GetAccessStateMap(AddressType::kIdealizedAddress); }
John Zulauf355e49b2020-04-24 15:11:15 -0600326 const TrackBack *GetTrackBackFromSubpass(uint32_t subpass) const {
327 if (subpass == VK_SUBPASS_EXTERNAL) {
328 return &src_external_;
329 } else {
330 assert(subpass < prev_by_subpass_.size());
331 return prev_by_subpass_[subpass];
332 }
333 }
John Zulauf16adfc92020-04-08 10:28:33 -0600334
John Zulauf7635de32020-05-29 17:14:15 -0600335 bool ValidateLayoutTransitions(const SyncValidator &sync_state,
336
337 const RENDER_PASS_STATE &rp_state,
338
339 const VkRect2D &render_area,
340
341 uint32_t subpass, const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
342 const char *func_name) const;
John Zulauf1507ee42020-05-18 11:33:09 -0600343 bool ValidateLoadOperation(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
John Zulauf7635de32020-05-29 17:14:15 -0600344 uint32_t subpass, const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
345 const char *func_name) const;
John Zulaufaff20662020-06-01 14:07:58 -0600346 bool ValidateStoreOperation(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
347 uint32_t subpass, const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
348 const char *func_name) const;
John Zulaufb027cdb2020-05-21 14:25:22 -0600349 bool ValidateResolveOperations(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
350 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const char *func_name,
351 uint32_t subpass) const;
John Zulauf1507ee42020-05-18 11:33:09 -0600352
John Zulauf3d84f1b2020-03-09 13:33:25 -0600353 private:
John Zulauf355e49b2020-04-24 15:11:15 -0600354 HazardResult DetectHazard(AddressType type, SyncStageAccessIndex usage_index, const ResourceAccessRange &range) const;
355 HazardResult DetectBarrierHazard(AddressType type, SyncStageAccessIndex current_usage, VkPipelineStageFlags src_exec_scope,
356 SyncStageAccessFlags src_access_scope, const ResourceAccessRange &range,
357 DetectOptions options) const;
John Zulauf16adfc92020-04-08 10:28:33 -0600358
John Zulauf3d84f1b2020-03-09 13:33:25 -0600359 template <typename Detector>
John Zulauf355e49b2020-04-24 15:11:15 -0600360 HazardResult DetectHazard(AddressType type, const Detector &detector, const ResourceAccessRange &range,
361 DetectOptions options) const;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600362 template <typename Detector>
John Zulauf355e49b2020-04-24 15:11:15 -0600363 HazardResult DetectAsyncHazard(AddressType type, const Detector &detector, const ResourceAccessRange &range) const;
John Zulauf5f13a792020-03-10 07:31:21 -0600364 template <typename Detector>
John Zulauf355e49b2020-04-24 15:11:15 -0600365 HazardResult DetectPreviousHazard(AddressType type, const Detector &detector, const ResourceAccessRange &range) const;
John Zulauf16adfc92020-04-08 10:28:33 -0600366 void UpdateAccessState(AddressType type, SyncStageAccessIndex current_usage, const ResourceAccessRange &range,
367 const ResourceUsageTag &tag);
368 constexpr static int kAddressTypeCount = AddressType::kMaxAddressType + 1;
369 static const std::array<AddressType, kAddressTypeCount> kAddressTypes;
370 std::array<ResourceAccessRangeMap, kAddressTypeCount> access_state_maps_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600371 std::vector<TrackBack> prev_;
John Zulauf355e49b2020-04-24 15:11:15 -0600372 std::vector<TrackBack *> prev_by_subpass_;
John Zulauf540266b2020-04-06 18:54:53 -0600373 std::vector<AccessContext *> async_;
John Zulaufe5da6e52020-03-18 15:32:18 -0600374 TrackBack src_external_;
375 TrackBack dst_external_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600376};
377
John Zulauf355e49b2020-04-24 15:11:15 -0600378class RenderPassAccessContext {
379 public:
John Zulauf1a224292020-06-30 14:52:13 -0600380 RenderPassAccessContext() : rp_state_(nullptr), current_subpass_(0) {}
John Zulauf355e49b2020-04-24 15:11:15 -0600381
locke-lunarg61870c22020-06-09 14:51:50 -0600382 bool ValidateDrawSubpassAttachment(const SyncValidator &sync_state, const CMD_BUFFER_STATE &cmd, const VkRect2D &render_area,
383 const char *func_name) const;
locke-lunarg96dc9632020-06-10 17:22:18 -0600384 void RecordDrawSubpassAttachment(const CMD_BUFFER_STATE &cmd, const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf1507ee42020-05-18 11:33:09 -0600385 bool ValidateNextSubpass(const SyncValidator &sync_state, const VkRect2D &render_area, const char *command_name) const;
John Zulauf7635de32020-05-29 17:14:15 -0600386 bool ValidateEndRenderPass(const SyncValidator &sync_state, const VkRect2D &render_area, const char *func_name) const;
387 bool ValidateFinalSubpassLayoutTransitions(const SyncValidator &sync_state, const VkRect2D &render_area,
388 const char *func_name) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600389
390 void RecordLayoutTransitions(const ResourceUsageTag &tag);
John Zulauf1507ee42020-05-18 11:33:09 -0600391 void RecordLoadOperations(const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf1a224292020-06-30 14:52:13 -0600392 void RecordBeginRenderPass(const SyncValidator &state, const CMD_BUFFER_STATE &cb_state, const AccessContext *external_context,
393 VkQueueFlags queue_flags, const ResourceUsageTag &tag);
John Zulauf1507ee42020-05-18 11:33:09 -0600394 void RecordNextSubpass(const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf1a224292020-06-30 14:52:13 -0600395 void RecordEndRenderPass(AccessContext *external_context, const VkRect2D &render_area, const ResourceUsageTag &tag);
John Zulauf355e49b2020-04-24 15:11:15 -0600396
John Zulauf540266b2020-04-06 18:54:53 -0600397 AccessContext &CurrentContext() { return subpass_contexts_[current_subpass_]; }
398 const AccessContext &CurrentContext() const { return subpass_contexts_[current_subpass_]; }
John Zulauf355e49b2020-04-24 15:11:15 -0600399 const std::vector<AccessContext> &GetContexts() const { return subpass_contexts_; }
400 uint32_t GetCurrentSubpass() const { return current_subpass_; }
401 const RENDER_PASS_STATE *GetRenderPassState() const { return rp_state_; }
John Zulauf7635de32020-05-29 17:14:15 -0600402 AccessContext *CreateStoreResolveProxy(const VkRect2D &render_area) const;
John Zulauf355e49b2020-04-24 15:11:15 -0600403
404 private:
John Zulauf355e49b2020-04-24 15:11:15 -0600405 const RENDER_PASS_STATE *rp_state_;
406 uint32_t current_subpass_;
407 std::vector<AccessContext> subpass_contexts_;
408 std::vector<const IMAGE_VIEW_STATE *> attachment_views_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600409};
410
411class CommandBufferAccessContext {
412 public:
413 CommandBufferAccessContext()
John Zulauf355e49b2020-04-24 15:11:15 -0600414 : command_number_(0),
415 reset_count_(0),
416 render_pass_contexts_(),
417 cb_access_context_(),
418 current_context_(&cb_access_context_),
John Zulauf3d84f1b2020-03-09 13:33:25 -0600419 current_renderpass_context_(),
420 cb_state_(),
421 queue_flags_() {}
John Zulauf355e49b2020-04-24 15:11:15 -0600422 CommandBufferAccessContext(SyncValidator &sync_validator, std::shared_ptr<CMD_BUFFER_STATE> &cb_state, VkQueueFlags queue_flags)
John Zulauf3d84f1b2020-03-09 13:33:25 -0600423 : CommandBufferAccessContext() {
424 cb_state_ = cb_state;
John Zulauf355e49b2020-04-24 15:11:15 -0600425 sync_state_ = &sync_validator;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600426 queue_flags_ = queue_flags;
427 }
John Zulauf5c5e88d2019-12-26 11:22:02 -0700428
429 void Reset() {
John Zulauf355e49b2020-04-24 15:11:15 -0600430 command_number_ = 0;
431 reset_count_++;
432 cb_access_context_.Reset();
John Zulauf3d84f1b2020-03-09 13:33:25 -0600433 render_pass_contexts_.clear();
John Zulauf355e49b2020-04-24 15:11:15 -0600434 current_context_ = &cb_access_context_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600435 current_renderpass_context_ = nullptr;
John Zulauf5c5e88d2019-12-26 11:22:02 -0700436 }
437
John Zulauf540266b2020-04-06 18:54:53 -0600438 AccessContext *GetCurrentAccessContext() { return current_context_; }
439 const AccessContext *GetCurrentAccessContext() const { return current_context_; }
John Zulauf355e49b2020-04-24 15:11:15 -0600440 void RecordBeginRenderPass(const ResourceUsageTag &tag);
441 bool ValidateBeginRenderPass(const RENDER_PASS_STATE &render_pass, const VkRenderPassBeginInfo *pRenderPassBegin,
442 const VkSubpassBeginInfoKHR *pSubpassBeginInfo, const char *func_name) const;
locke-lunarg61870c22020-06-09 14:51:50 -0600443 bool ValidateDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint, const char *func_name) const;
444 void RecordDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint, const ResourceUsageTag &tag);
445 bool ValidateDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const char *func_name) const;
446 void RecordDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const ResourceUsageTag &tag);
447 bool ValidateDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const char *func_name) const;
448 void RecordDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const ResourceUsageTag &tag);
449 bool ValidateDrawSubpassAttachment(const char *func_name) const;
450 void RecordDrawSubpassAttachment(const ResourceUsageTag &tag);
John Zulauf355e49b2020-04-24 15:11:15 -0600451 bool ValidateNextSubpass(const char *func_name) const;
452 bool ValidateEndRenderpass(const char *func_name) const;
453 void RecordNextSubpass(const RENDER_PASS_STATE &render_pass, const ResourceUsageTag &tag);
454 void RecordEndRenderPass(const RENDER_PASS_STATE &render_pass, const ResourceUsageTag &tag);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600455 CMD_BUFFER_STATE *GetCommandBufferState() { return cb_state_.get(); }
456 const CMD_BUFFER_STATE *GetCommandBufferState() const { return cb_state_.get(); }
457 VkQueueFlags GetQueueFlags() const { return queue_flags_; }
John Zulauf355e49b2020-04-24 15:11:15 -0600458 inline ResourceUsageTag NextCommandTag(CMD_TYPE command) {
459 // TODO: add command encoding to ResourceUsageTag.
460 // What else we what to include. Do we want some sort of "parent" or global sequence number
461 command_number_++;
John Zulaufcc6fecb2020-06-17 15:24:54 -0600462 const auto index = (static_cast<uint64_t>(reset_count_) << 32) | command_number_;
463 ResourceUsageTag next(index, command);
John Zulauf355e49b2020-04-24 15:11:15 -0600464 return next;
465 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600466
467 private:
John Zulauf355e49b2020-04-24 15:11:15 -0600468 uint32_t command_number_;
469 uint32_t reset_count_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600470 std::vector<RenderPassAccessContext> render_pass_contexts_;
John Zulauf355e49b2020-04-24 15:11:15 -0600471 AccessContext cb_access_context_;
John Zulauf540266b2020-04-06 18:54:53 -0600472 AccessContext *current_context_;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600473 RenderPassAccessContext *current_renderpass_context_;
474 std::shared_ptr<CMD_BUFFER_STATE> cb_state_;
John Zulauf355e49b2020-04-24 15:11:15 -0600475 SyncValidator *sync_state_;
476
John Zulauf3d84f1b2020-03-09 13:33:25 -0600477 VkQueueFlags queue_flags_;
John Zulauf9cb530d2019-09-30 14:14:10 -0600478};
479
480class SyncValidator : public ValidationStateTracker, public SyncStageAccess {
481 public:
482 SyncValidator() { container_type = LayerObjectTypeSyncValidation; }
483 using StateTracker = ValidationStateTracker;
484
485 using StateTracker::AccessorTraitsTypes;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600486 std::unordered_map<VkCommandBuffer, std::unique_ptr<CommandBufferAccessContext>> cb_access_state;
487 CommandBufferAccessContext *GetAccessContextImpl(VkCommandBuffer command_buffer, bool do_insert) {
John Zulauf9cb530d2019-09-30 14:14:10 -0600488 auto found_it = cb_access_state.find(command_buffer);
489 if (found_it == cb_access_state.end()) {
490 if (!do_insert) return nullptr;
491 // If we don't have one, make it.
John Zulauf3d84f1b2020-03-09 13:33:25 -0600492 auto cb_state = GetShared<CMD_BUFFER_STATE>(command_buffer);
493 assert(cb_state.get());
494 auto queue_flags = GetQueueFlags(*cb_state);
John Zulauf355e49b2020-04-24 15:11:15 -0600495 std::unique_ptr<CommandBufferAccessContext> context(new CommandBufferAccessContext(*this, cb_state, queue_flags));
John Zulauf3d84f1b2020-03-09 13:33:25 -0600496 auto insert_pair = cb_access_state.insert(std::make_pair(command_buffer, std::move(context)));
John Zulauf9cb530d2019-09-30 14:14:10 -0600497 found_it = insert_pair.first;
498 }
499 return found_it->second.get();
500 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600501 CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) {
502 return GetAccessContextImpl(command_buffer, true); // true -> do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600503 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600504 CommandBufferAccessContext *GetAccessContextNoInsert(VkCommandBuffer command_buffer) {
505 return GetAccessContextImpl(command_buffer, false); // false -> don't do_insert on not found
John Zulauf9cb530d2019-09-30 14:14:10 -0600506 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600507
508 const CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) const {
John Zulauf9cb530d2019-09-30 14:14:10 -0600509 const auto found_it = cb_access_state.find(command_buffer);
510 if (found_it == cb_access_state.end()) {
511 return nullptr;
512 }
513 return found_it->second.get();
514 }
515
John Zulauf540266b2020-04-06 18:54:53 -0600516 void ApplyGlobalBarriers(AccessContext *context, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
John Zulauf9cb530d2019-09-30 14:14:10 -0600517 SyncStageAccessFlags src_stage_scope, SyncStageAccessFlags dst_stage_scope,
518 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers);
John Zulauf540266b2020-04-06 18:54:53 -0600519 void ApplyBufferBarriers(AccessContext *context, VkPipelineStageFlags src_stage_mask, SyncStageAccessFlags src_stage_scope,
520 VkPipelineStageFlags dst_stage_mask, SyncStageAccessFlags dst_stage_scope, uint32_t barrier_count,
521 const VkBufferMemoryBarrier *barriers);
522 void ApplyImageBarriers(AccessContext *context, VkPipelineStageFlags src_stage_mask, SyncStageAccessFlags src_stage_scope,
523 VkPipelineStageFlags dst_stage_mask, SyncStageAccessFlags dst_stage_scope, uint32_t barrier_count,
John Zulauf355e49b2020-04-24 15:11:15 -0600524 const VkImageMemoryBarrier *barriers, const ResourceUsageTag &tag);
John Zulauf9cb530d2019-09-30 14:14:10 -0600525
John Zulaufd1f85d42020-04-15 12:23:15 -0600526 void ResetCommandBufferCallback(VkCommandBuffer command_buffer);
527 void FreeCommandBufferCallback(VkCommandBuffer command_buffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600528 void RecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
John Zulauf355e49b2020-04-24 15:11:15 -0600529 const VkSubpassBeginInfo *pSubpassBeginInfo, CMD_TYPE command);
530 void RecordCmdNextSubpass(VkCommandBuffer commandBuffer,
531
532 const VkSubpassBeginInfo *pSubpassBeginInfo, const VkSubpassEndInfo *pSubpassEndInfo,
533 CMD_TYPE command);
534 void RecordCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo, CMD_TYPE command);
John Zulauf9cb530d2019-09-30 14:14:10 -0600535
536 void PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
537 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result);
538
John Zulauf355e49b2020-04-24 15:11:15 -0600539 bool ValidateBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
540 const VkSubpassBeginInfoKHR *pSubpassBeginInfo, const char *func_name) const;
541
542 bool PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
543 VkSubpassContents contents) const;
544
545 bool PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
546 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) const;
547
548 bool PreCallValidateCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
549 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) const;
550
John Zulauf9cb530d2019-09-30 14:14:10 -0600551 bool PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
552 const VkBufferCopy *pRegions) const;
553
554 void PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
555 const VkBufferCopy *pRegions);
556
John Zulauf5c5e88d2019-12-26 11:22:02 -0700557 bool PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
558 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
559 const VkImageCopy *pRegions) const;
560
561 void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
562 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions);
563
John Zulauf9cb530d2019-09-30 14:14:10 -0600564 bool PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
565 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
566 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
567 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
568 uint32_t imageMemoryBarrierCount,
569 const VkImageMemoryBarrier *pImageMemoryBarriers) const;
570
571 void PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
572 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
573 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
574 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
575 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600576
577 void PostCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo,
578 VkResult result);
579
580 void PostCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
581 VkSubpassContents contents);
582 void PostCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
583 const VkSubpassBeginInfo *pSubpassBeginInfo);
584 void PostCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
585 const VkSubpassBeginInfo *pSubpassBeginInfo);
586
John Zulauf355e49b2020-04-24 15:11:15 -0600587 bool ValidateCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
588 const VkSubpassEndInfoKHR *pSubpassEndInfo, const char *func_name) const;
589 bool PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const;
590 bool PreCallValidateCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
591 const VkSubpassEndInfoKHR *pSubpassEndInfo) const;
592 bool PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
593 const VkSubpassEndInfoKHR *pSubpassEndInfo) const;
594
John Zulauf3d84f1b2020-03-09 13:33:25 -0600595 void PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents);
596 void PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
597 const VkSubpassEndInfo *pSubpassEndInfo);
598 void PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
599 const VkSubpassEndInfo *pSubpassEndInfo);
600
John Zulauf355e49b2020-04-24 15:11:15 -0600601 bool ValidateCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo,
602 const char *func_name) const;
603 bool PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) const;
604 bool PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo) const;
605 bool PreCallValidateCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo) const;
606
John Zulauf3d84f1b2020-03-09 13:33:25 -0600607 void PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer);
608 void PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo);
609 void PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo);
locke-lunarga19c71d2020-03-02 18:17:04 -0700610 bool PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
611 VkImageLayout dstImageLayout, uint32_t regionCount,
612 const VkBufferImageCopy *pRegions) const;
613
614 void PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
615 VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions);
616
617 bool PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
618 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) const;
619
620 void PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
621 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions);
622
623 bool PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
624 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
625 const VkImageBlit *pRegions, VkFilter filter) const;
626
627 void PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
628 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions,
629 VkFilter filter);
locke-lunarg36ba2592020-04-03 09:42:04 -0600630
locke-lunarg61870c22020-06-09 14:51:50 -0600631 bool ValidateIndirectBuffer(const AccessContext &context, VkCommandBuffer commandBuffer, const VkDeviceSize struct_size,
632 const VkBuffer buffer, const VkDeviceSize offset, const uint32_t drawCount, const uint32_t stride,
633 const char *function) const;
634 void RecordIndirectBuffer(AccessContext &context, const ResourceUsageTag &tag, const VkDeviceSize struct_size,
635 const VkBuffer buffer, const VkDeviceSize offset, const uint32_t drawCount, uint32_t stride);
locke-lunarg36ba2592020-04-03 09:42:04 -0600636
locke-lunarg61870c22020-06-09 14:51:50 -0600637 bool ValidateCountBuffer(const AccessContext &context, VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
638 const char *function) const;
639 void RecordCountBuffer(AccessContext &context, const ResourceUsageTag &tag, VkBuffer buffer, VkDeviceSize offset);
locke-lunarg93d68af2020-05-12 17:18:03 -0600640
locke-lunarg36ba2592020-04-03 09:42:04 -0600641 bool PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) const;
642 void PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z);
locke-lunarge1a67022020-04-29 00:15:36 -0600643
644 bool PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const;
645 void PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset);
646
647 bool PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
648 uint32_t firstInstance) const;
649 void PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
650 uint32_t firstInstance);
651
652 bool PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
653 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) const;
654 void PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
655 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance);
656
657 bool PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
658 uint32_t stride) const;
659 void PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
660 uint32_t stride);
661
662 bool PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
663 uint32_t drawCount, uint32_t stride) const;
664 void PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
665 uint32_t drawCount, uint32_t stride);
666
locke-lunargff255f92020-05-13 18:53:52 -0600667 bool ValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer,
668 VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride,
669 const char *function) const;
locke-lunarge1a67022020-04-29 00:15:36 -0600670 bool PreCallValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
671 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
672 uint32_t stride) const;
673 void PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
674 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
675 uint32_t stride);
676 bool PreCallValidateCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
677 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
678 uint32_t stride) const;
679 void PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
680 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
681 uint32_t stride);
682 bool PreCallValidateCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
683 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
684 uint32_t stride) const;
685 void PreCallRecordCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
686 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
687 uint32_t stride);
688
locke-lunargff255f92020-05-13 18:53:52 -0600689 bool ValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
690 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
691 uint32_t stride, const char *function) const;
locke-lunarge1a67022020-04-29 00:15:36 -0600692 bool PreCallValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
693 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
694 uint32_t stride) const;
695 void PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
696 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
697 uint32_t stride);
698 bool PreCallValidateCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
699 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
700 uint32_t stride) const;
701 void PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
702 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
703 uint32_t stride);
704 bool PreCallValidateCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
705 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
706 uint32_t stride) const;
707 void PreCallRecordCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
708 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
709 uint32_t stride);
710
711 bool PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
712 const VkClearColorValue *pColor, uint32_t rangeCount,
713 const VkImageSubresourceRange *pRanges) const;
714 void PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
715 const VkClearColorValue *pColor, uint32_t rangeCount,
716 const VkImageSubresourceRange *pRanges);
717
718 bool PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
719 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
720 const VkImageSubresourceRange *pRanges) const;
721 void PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
722 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
723 const VkImageSubresourceRange *pRanges);
724
725 bool PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
726 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
727 VkDeviceSize stride, VkQueryResultFlags flags) const;
728 void PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
729 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride,
730 VkQueryResultFlags flags);
731
732 bool PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size,
733 uint32_t data) const;
734 void PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size,
735 uint32_t data);
736
737 bool PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
738 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
739 const VkImageResolve *pRegions) const;
740 void PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
741 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
742 const VkImageResolve *pRegions);
743
744 bool PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
745 VkDeviceSize dataSize, const void *pData) const;
746 void PreCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
747 VkDeviceSize dataSize, const void *pData);
locke-lunargff255f92020-05-13 18:53:52 -0600748
749 bool PreCallValidateCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
750 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) const;
751 void PreCallRecordCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
752 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker);
John Zulauf9cb530d2019-09-30 14:14:10 -0600753};