blob: 1df5266e0b10c91aa3d12ede3d1913d6e91ca0f1 [file] [log] [blame]
locke-lunarg8ec19162020-06-16 18:48:34 -06001/* Copyright (c) 2019-2020 The Khronos Group Inc.
2 * Copyright (c) 2019-2020 Valve Corporation
3 * Copyright (c) 2019-2020 LunarG, Inc.
John Zulauf9cb530d2019-09-30 14:14:10 -06004 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 * Author: John Zulauf <jzulauf@lunarg.com>
18 */
19
20#include <limits>
21#include <vector>
locke-lunarg296a3c92020-03-25 01:04:29 -060022#include <memory>
23#include <bitset>
John Zulauf9cb530d2019-09-30 14:14:10 -060024#include "synchronization_validation.h"
25
26static const char *string_SyncHazardVUID(SyncHazard hazard) {
27 switch (hazard) {
28 case SyncHazard::NONE:
John Zulauf2f952d22020-02-10 11:34:51 -070029 return "SYNC-HAZARD-NONE";
John Zulauf9cb530d2019-09-30 14:14:10 -060030 break;
31 case SyncHazard::READ_AFTER_WRITE:
32 return "SYNC-HAZARD-READ_AFTER_WRITE";
33 break;
34 case SyncHazard::WRITE_AFTER_READ:
35 return "SYNC-HAZARD-WRITE_AFTER_READ";
36 break;
37 case SyncHazard::WRITE_AFTER_WRITE:
38 return "SYNC-HAZARD-WRITE_AFTER_WRITE";
39 break;
John Zulauf2f952d22020-02-10 11:34:51 -070040 case SyncHazard::READ_RACING_WRITE:
41 return "SYNC-HAZARD-READ-RACING-WRITE";
42 break;
43 case SyncHazard::WRITE_RACING_WRITE:
44 return "SYNC-HAZARD-WRITE-RACING-WRITE";
45 break;
46 case SyncHazard::WRITE_RACING_READ:
47 return "SYNC-HAZARD-WRITE-RACING-READ";
48 break;
John Zulauf9cb530d2019-09-30 14:14:10 -060049 default:
50 assert(0);
51 }
52 return "SYNC-HAZARD-INVALID";
53}
54
55static const char *string_SyncHazard(SyncHazard hazard) {
56 switch (hazard) {
57 case SyncHazard::NONE:
58 return "NONR";
59 break;
60 case SyncHazard::READ_AFTER_WRITE:
61 return "READ_AFTER_WRITE";
62 break;
63 case SyncHazard::WRITE_AFTER_READ:
64 return "WRITE_AFTER_READ";
65 break;
66 case SyncHazard::WRITE_AFTER_WRITE:
67 return "WRITE_AFTER_WRITE";
68 break;
John Zulauf2f952d22020-02-10 11:34:51 -070069 case SyncHazard::READ_RACING_WRITE:
70 return "READ_RACING_WRITE";
71 break;
72 case SyncHazard::WRITE_RACING_WRITE:
73 return "WRITE_RACING_WRITE";
74 break;
75 case SyncHazard::WRITE_RACING_READ:
76 return "WRITE_RACING_READ";
77 break;
John Zulauf9cb530d2019-09-30 14:14:10 -060078 default:
79 assert(0);
80 }
81 return "INVALID HAZARD";
82}
83
John Zulauf1dae9192020-06-16 15:46:44 -060084static std::string string_UsageTag(const ResourceUsageTag &tag) {
85 std::stringstream out;
John Zulaufcc6fecb2020-06-17 15:24:54 -060086 out << "(command " << CommandTypeString(tag.command) << ", seq #" << (tag.index & 0xFFFFFFFF) << ", reset #"
87 << (tag.index >> 32) << ")";
John Zulauf1dae9192020-06-16 15:46:44 -060088 return out.str();
89}
90
John Zulaufb027cdb2020-05-21 14:25:22 -060091static constexpr VkPipelineStageFlags kColorAttachmentExecScope = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
92static constexpr SyncStageAccessFlags kColorAttachmentAccessScope =
93 SyncStageAccessFlagBits::SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ_BIT |
94 SyncStageAccessFlagBits::SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT |
95 SyncStageAccessFlagBits::SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE_BIT;
96static constexpr VkPipelineStageFlags kDepthStencilAttachmentExecScope =
97 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
98static constexpr SyncStageAccessFlags kDepthStencilAttachmentAccessScope =
99 SyncStageAccessFlagBits::SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
100 SyncStageAccessFlagBits::SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
101 SyncStageAccessFlagBits::SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
102 SyncStageAccessFlagBits::SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
103
104static constexpr SyncOrderingBarrier kColorAttachmentRasterOrder = {kColorAttachmentExecScope, kColorAttachmentAccessScope};
105static constexpr SyncOrderingBarrier kDepthStencilAttachmentRasterOrder = {kDepthStencilAttachmentExecScope,
106 kDepthStencilAttachmentAccessScope};
107static constexpr SyncOrderingBarrier kAttachmentRasterOrder = {kDepthStencilAttachmentExecScope | kColorAttachmentExecScope,
108 kDepthStencilAttachmentAccessScope | kColorAttachmentAccessScope};
John Zulauf7635de32020-05-29 17:14:15 -0600109// Sometimes we have an internal access conflict, and we using the kCurrentCommandTag to set and detect in temporary/proxy contexts
John Zulaufcc6fecb2020-06-17 15:24:54 -0600110static const ResourceUsageTag kCurrentCommandTag(ResourceUsageTag::kMaxIndex, CMD_NONE);
John Zulaufb027cdb2020-05-21 14:25:22 -0600111
locke-lunarg3c038002020-04-30 23:08:08 -0600112inline VkDeviceSize GetRealWholeSize(VkDeviceSize offset, VkDeviceSize size, VkDeviceSize whole_size) {
113 if (size == VK_WHOLE_SIZE) {
114 return (whole_size - offset);
115 }
116 return size;
117}
118
John Zulauf16adfc92020-04-08 10:28:33 -0600119template <typename T>
John Zulauf355e49b2020-04-24 15:11:15 -0600120static ResourceAccessRange MakeRange(const T &has_offset_and_size) {
John Zulauf16adfc92020-04-08 10:28:33 -0600121 return ResourceAccessRange(has_offset_and_size.offset, (has_offset_and_size.offset + has_offset_and_size.size));
122}
123
John Zulauf355e49b2020-04-24 15:11:15 -0600124static ResourceAccessRange MakeRange(VkDeviceSize start, VkDeviceSize size) { return ResourceAccessRange(start, (start + size)); }
John Zulauf16adfc92020-04-08 10:28:33 -0600125
John Zulauf0cb5be22020-01-23 12:18:22 -0700126// Expand the pipeline stage without regard to whether the are valid w.r.t. queue or extension
127VkPipelineStageFlags ExpandPipelineStages(VkQueueFlags queue_flags, VkPipelineStageFlags stage_mask) {
128 VkPipelineStageFlags expanded = stage_mask;
129 if (VK_PIPELINE_STAGE_ALL_COMMANDS_BIT & stage_mask) {
130 expanded = expanded & ~VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
131 for (const auto &all_commands : syncAllCommandStagesByQueueFlags) {
132 if (all_commands.first & queue_flags) {
133 expanded |= all_commands.second;
134 }
135 }
136 }
137 if (VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT & stage_mask) {
138 expanded = expanded & ~VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT;
139 expanded |= syncAllCommandStagesByQueueFlags.at(VK_QUEUE_GRAPHICS_BIT) & ~VK_PIPELINE_STAGE_HOST_BIT;
140 }
141 return expanded;
142}
143
John Zulauf36bcf6a2020-02-03 15:12:52 -0700144VkPipelineStageFlags RelatedPipelineStages(VkPipelineStageFlags stage_mask,
145 std::map<VkPipelineStageFlagBits, VkPipelineStageFlags> &map) {
146 VkPipelineStageFlags unscanned = stage_mask;
147 VkPipelineStageFlags related = 0;
148 for (const auto entry : map) {
149 const auto stage = entry.first;
150 if (stage & unscanned) {
151 related = related | entry.second;
152 unscanned = unscanned & ~stage;
153 if (!unscanned) break;
154 }
155 }
156 return related;
157}
158
159VkPipelineStageFlags WithEarlierPipelineStages(VkPipelineStageFlags stage_mask) {
160 return stage_mask | RelatedPipelineStages(stage_mask, syncLogicallyEarlierStages);
161}
162
163VkPipelineStageFlags WithLaterPipelineStages(VkPipelineStageFlags stage_mask) {
164 return stage_mask | RelatedPipelineStages(stage_mask, syncLogicallyLaterStages);
165}
166
John Zulauf5c5e88d2019-12-26 11:22:02 -0700167static const ResourceAccessRange full_range(std::numeric_limits<VkDeviceSize>::min(), std::numeric_limits<VkDeviceSize>::max());
John Zulauf5c5e88d2019-12-26 11:22:02 -0700168
locke-lunargff255f92020-05-13 18:53:52 -0600169void GetBufferRange(VkDeviceSize &range_start, VkDeviceSize &range_size, VkDeviceSize offset, VkDeviceSize buf_whole_size,
170 uint32_t first_index, uint32_t count, VkDeviceSize stride) {
171 range_start = offset + first_index * stride;
172 range_size = 0;
173 if (count == UINT32_MAX) {
174 range_size = buf_whole_size - range_start;
175 } else {
176 range_size = count * stride;
177 }
178}
179
locke-lunarg654e3692020-06-04 17:19:15 -0600180SyncStageAccessIndex GetSyncStageAccessIndexsByDescriptorSet(VkDescriptorType descriptor_type, const interface_var &descriptor_data,
181 VkShaderStageFlagBits stage_flag) {
182 if (descriptor_type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) {
183 assert(stage_flag == VK_SHADER_STAGE_FRAGMENT_BIT);
184 return SYNC_FRAGMENT_SHADER_INPUT_ATTACHMENT_READ;
185 }
186 auto stage_access = syncStageAccessMaskByShaderStage.find(stage_flag);
187 if (stage_access == syncStageAccessMaskByShaderStage.end()) {
188 assert(0);
189 }
190 if (descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER || descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) {
191 return stage_access->second.uniform_read;
192 }
193
194 // If the desriptorSet is writable, we don't need to care SHADER_READ. SHADER_WRITE is enough.
195 // Because if write hazard happens, read hazard might or might not happen.
196 // But if write hazard doesn't happen, read hazard is impossible to happen.
197 if (descriptor_data.is_writable) {
198 return stage_access->second.shader_write;
199 }
200 return stage_access->second.shader_read;
201}
202
locke-lunarg37047832020-06-12 13:44:45 -0600203bool IsImageLayoutDepthWritable(VkImageLayout image_layout) {
204 return (image_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ||
205 image_layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL ||
206 image_layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL)
207 ? true
208 : false;
209}
210
211bool IsImageLayoutStencilWritable(VkImageLayout image_layout) {
212 return (image_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ||
213 image_layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL ||
214 image_layout == VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL)
215 ? true
216 : false;
217}
218
John Zulauf355e49b2020-04-24 15:11:15 -0600219// Class AccessContext stores the state of accesses specific to a Command, Subpass, or Queue
220const std::array<AccessContext::AddressType, AccessContext::kAddressTypeCount> AccessContext::kAddressTypes = {
221 AccessContext::AddressType::kLinearAddress, AccessContext::AddressType::kIdealizedAddress};
222
John Zulauf7635de32020-05-29 17:14:15 -0600223// Tranverse the attachment resolves for this a specific subpass, and do action() to them.
224// Used by both validation and record operations
225//
226// The signature for Action() reflect the needs of both uses.
227template <typename Action>
228void ResolveOperation(Action &action, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
229 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass) {
230 VkExtent3D extent = CastTo3D(render_area.extent);
231 VkOffset3D offset = CastTo3D(render_area.offset);
232 const auto &rp_ci = rp_state.createInfo;
233 const auto *attachment_ci = rp_ci.pAttachments;
234 const auto &subpass_ci = rp_ci.pSubpasses[subpass];
235
236 // Color resolves -- require an inuse color attachment and a matching inuse resolve attachment
237 const auto *color_attachments = subpass_ci.pColorAttachments;
238 const auto *color_resolve = subpass_ci.pResolveAttachments;
239 if (color_resolve && color_attachments) {
240 for (uint32_t i = 0; i < subpass_ci.colorAttachmentCount; i++) {
241 const auto &color_attach = color_attachments[i].attachment;
242 const auto &resolve_attach = subpass_ci.pResolveAttachments[i].attachment;
243 if ((color_attach != VK_ATTACHMENT_UNUSED) && (resolve_attach != VK_ATTACHMENT_UNUSED)) {
244 action("color", "resolve read", color_attach, resolve_attach, attachment_views[color_attach],
245 SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ, kColorAttachmentRasterOrder, offset, extent, 0);
246 action("color", "resolve write", color_attach, resolve_attach, attachment_views[resolve_attach],
247 SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, kColorAttachmentRasterOrder, offset, extent, 0);
248 }
249 }
250 }
251
252 // Depth stencil resolve only if the extension is present
253 const auto ds_resolve = lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolve>(subpass_ci.pNext);
254 if (ds_resolve && ds_resolve->pDepthStencilResolveAttachment &&
255 (ds_resolve->pDepthStencilResolveAttachment->attachment != VK_ATTACHMENT_UNUSED) && subpass_ci.pDepthStencilAttachment &&
256 (subpass_ci.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED)) {
257 const auto src_at = subpass_ci.pDepthStencilAttachment->attachment;
258 const auto src_ci = attachment_ci[src_at];
259 // The formats are required to match so we can pick either
260 const bool resolve_depth = (ds_resolve->depthResolveMode != VK_RESOLVE_MODE_NONE) && FormatHasDepth(src_ci.format);
261 const bool resolve_stencil = (ds_resolve->stencilResolveMode != VK_RESOLVE_MODE_NONE) && FormatHasStencil(src_ci.format);
262 const auto dst_at = ds_resolve->pDepthStencilResolveAttachment->attachment;
263 VkImageAspectFlags aspect_mask = 0u;
264
265 // Figure out which aspects are actually touched during resolve operations
266 const char *aspect_string = nullptr;
267 if (resolve_depth && resolve_stencil) {
268 // Validate all aspects together
269 aspect_mask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
270 aspect_string = "depth/stencil";
271 } else if (resolve_depth) {
272 // Validate depth only
273 aspect_mask = VK_IMAGE_ASPECT_DEPTH_BIT;
274 aspect_string = "depth";
275 } else if (resolve_stencil) {
276 // Validate all stencil only
277 aspect_mask = VK_IMAGE_ASPECT_STENCIL_BIT;
278 aspect_string = "stencil";
279 }
280
281 if (aspect_mask) {
282 action(aspect_string, "resolve read", src_at, dst_at, attachment_views[src_at],
283 SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ, kDepthStencilAttachmentRasterOrder, offset, extent,
284 aspect_mask);
285 action(aspect_string, "resolve write", src_at, dst_at, attachment_views[dst_at],
286 SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, kAttachmentRasterOrder, offset, extent, aspect_mask);
287 }
288 }
289}
290
291// Action for validating resolve operations
292class ValidateResolveAction {
293 public:
294 ValidateResolveAction(VkRenderPass render_pass, uint32_t subpass, const AccessContext &context, const SyncValidator &sync_state,
295 const char *func_name)
296 : render_pass_(render_pass),
297 subpass_(subpass),
298 context_(context),
299 sync_state_(sync_state),
300 func_name_(func_name),
301 skip_(false) {}
302 void operator()(const char *aspect_name, const char *attachment_name, uint32_t src_at, uint32_t dst_at,
303 const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const SyncOrderingBarrier &ordering,
304 const VkOffset3D &offset, const VkExtent3D &extent, VkImageAspectFlags aspect_mask) {
305 HazardResult hazard;
306 hazard = context_.DetectHazard(view, current_usage, ordering, offset, extent, aspect_mask);
307 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -0600308 skip_ |= sync_state_.LogError(render_pass_, string_SyncHazardVUID(hazard.hazard),
309 "%s: Hazard %s in subpass %" PRIu32 "during %s %s, from attachment %" PRIu32
310 " to resolve attachment %" PRIu32 ". Prior access %s.",
311 func_name_, string_SyncHazard(hazard.hazard), subpass_, aspect_name, attachment_name,
312 src_at, dst_at, string_UsageTag(hazard.tag).c_str());
John Zulauf7635de32020-05-29 17:14:15 -0600313 }
314 }
315 // Providing a mechanism for the constructing caller to get the result of the validation
316 bool GetSkip() const { return skip_; }
317
318 private:
319 VkRenderPass render_pass_;
320 const uint32_t subpass_;
321 const AccessContext &context_;
322 const SyncValidator &sync_state_;
323 const char *func_name_;
324 bool skip_;
325};
326
327// Update action for resolve operations
328class UpdateStateResolveAction {
329 public:
330 UpdateStateResolveAction(AccessContext &context, const ResourceUsageTag &tag) : context_(context), tag_(tag) {}
331 void operator()(const char *aspect_name, const char *attachment_name, uint32_t src_at, uint32_t dst_at,
332 const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const SyncOrderingBarrier &ordering,
333 const VkOffset3D &offset, const VkExtent3D &extent, VkImageAspectFlags aspect_mask) {
334 // Ignores validation only arguments...
335 context_.UpdateAccessState(view, current_usage, offset, extent, aspect_mask, tag_);
336 }
337
338 private:
339 AccessContext &context_;
340 const ResourceUsageTag &tag_;
341};
342
John Zulauf540266b2020-04-06 18:54:53 -0600343AccessContext::AccessContext(uint32_t subpass, VkQueueFlags queue_flags,
344 const std::vector<SubpassDependencyGraphNode> &dependencies,
345 const std::vector<AccessContext> &contexts, AccessContext *external_context) {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600346 Reset();
347 const auto &subpass_dep = dependencies[subpass];
348 prev_.reserve(subpass_dep.prev.size());
John Zulauf355e49b2020-04-24 15:11:15 -0600349 prev_by_subpass_.resize(subpass, nullptr); // Can't be more prevs than the subpass we're on
John Zulauf3d84f1b2020-03-09 13:33:25 -0600350 for (const auto &prev_dep : subpass_dep.prev) {
351 assert(prev_dep.dependency);
352 const auto dep = *prev_dep.dependency;
John Zulauf540266b2020-04-06 18:54:53 -0600353 prev_.emplace_back(const_cast<AccessContext *>(&contexts[dep.srcSubpass]), queue_flags, dep);
John Zulauf355e49b2020-04-24 15:11:15 -0600354 prev_by_subpass_[dep.srcSubpass] = &prev_.back();
John Zulauf5c5e88d2019-12-26 11:22:02 -0700355 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600356
357 async_.reserve(subpass_dep.async.size());
358 for (const auto async_subpass : subpass_dep.async) {
John Zulauf540266b2020-04-06 18:54:53 -0600359 async_.emplace_back(const_cast<AccessContext *>(&contexts[async_subpass]));
John Zulauf3d84f1b2020-03-09 13:33:25 -0600360 }
John Zulaufe5da6e52020-03-18 15:32:18 -0600361 if (subpass_dep.barrier_from_external) {
362 src_external_ = TrackBack(external_context, queue_flags, *subpass_dep.barrier_from_external);
363 } else {
364 src_external_ = TrackBack();
365 }
366 if (subpass_dep.barrier_to_external) {
367 dst_external_ = TrackBack(this, queue_flags, *subpass_dep.barrier_to_external);
368 } else {
369 dst_external_ = TrackBack();
John Zulauf3d84f1b2020-03-09 13:33:25 -0600370 }
John Zulauf5c5e88d2019-12-26 11:22:02 -0700371}
372
John Zulauf5f13a792020-03-10 07:31:21 -0600373template <typename Detector>
John Zulauf16adfc92020-04-08 10:28:33 -0600374HazardResult AccessContext::DetectPreviousHazard(AddressType type, const Detector &detector,
John Zulauf540266b2020-04-06 18:54:53 -0600375 const ResourceAccessRange &range) const {
John Zulauf5f13a792020-03-10 07:31:21 -0600376 ResourceAccessRangeMap descent_map;
John Zulauf69133422020-05-20 14:55:53 -0600377 ResolvePreviousAccess(type, range, &descent_map, nullptr);
John Zulauf5f13a792020-03-10 07:31:21 -0600378
379 HazardResult hazard;
380 for (auto prev = descent_map.begin(); prev != descent_map.end() && !hazard.hazard; ++prev) {
381 hazard = detector.Detect(prev);
382 }
383 return hazard;
384}
385
John Zulauf3d84f1b2020-03-09 13:33:25 -0600386// A recursive range walker for hazard detection, first for the current context and the (DetectHazardRecur) to walk
387// the DAG of the contexts (for example subpasses)
388template <typename Detector>
John Zulauf355e49b2020-04-24 15:11:15 -0600389HazardResult AccessContext::DetectHazard(AddressType type, const Detector &detector, const ResourceAccessRange &range,
390 DetectOptions options) const {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600391 HazardResult hazard;
John Zulauf5f13a792020-03-10 07:31:21 -0600392
John Zulauf355e49b2020-04-24 15:11:15 -0600393 if (static_cast<uint32_t>(options) | DetectOptions::kDetectAsync) {
394 // Async checks don't require recursive lookups, as the async lists are exhaustive for the top-level context
395 // so we'll check these first
396 for (const auto &async_context : async_) {
397 hazard = async_context->DetectAsyncHazard(type, detector, range);
398 if (hazard.hazard) return hazard;
399 }
John Zulauf5f13a792020-03-10 07:31:21 -0600400 }
401
John Zulauf69133422020-05-20 14:55:53 -0600402 const bool detect_prev = (static_cast<uint32_t>(options) | DetectOptions::kDetectPrevious) != 0;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600403
John Zulauf69133422020-05-20 14:55:53 -0600404 const auto &accesses = GetAccessStateMap(type);
405 const auto from = accesses.lower_bound(range);
406 const auto to = accesses.upper_bound(range);
407 ResourceAccessRange gap = {range.begin, range.begin};
John Zulauf5f13a792020-03-10 07:31:21 -0600408
John Zulauf69133422020-05-20 14:55:53 -0600409 for (auto pos = from; pos != to; ++pos) {
410 // Cover any leading gap, or gap between entries
411 if (detect_prev) {
412 // TODO: After profiling we may want to change the descent logic such that we don't recur per gap...
413 // Cover any leading gap, or gap between entries
414 gap.end = pos->first.begin; // We know this begin is < range.end
John Zulauf355e49b2020-04-24 15:11:15 -0600415 if (gap.non_empty()) {
John Zulauf69133422020-05-20 14:55:53 -0600416 // Recur on all gaps
John Zulauf16adfc92020-04-08 10:28:33 -0600417 hazard = DetectPreviousHazard(type, detector, gap);
John Zulauf5f13a792020-03-10 07:31:21 -0600418 if (hazard.hazard) return hazard;
419 }
John Zulauf69133422020-05-20 14:55:53 -0600420 // Set up for the next gap. If pos..end is >= range.end, loop will exit, and trailing gap will be empty
421 gap.begin = pos->first.end;
422 }
423
424 hazard = detector.Detect(pos);
425 if (hazard.hazard) return hazard;
426 }
427
428 if (detect_prev) {
429 // Detect in the trailing empty as needed
430 gap.end = range.end;
431 if (gap.non_empty()) {
432 hazard = DetectPreviousHazard(type, detector, gap);
John Zulauf16adfc92020-04-08 10:28:33 -0600433 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600434 }
435
436 return hazard;
437}
438
439// A non recursive range walker for the asynchronous contexts (those we have no barriers with)
440template <typename Detector>
John Zulauf355e49b2020-04-24 15:11:15 -0600441HazardResult AccessContext::DetectAsyncHazard(AddressType type, const Detector &detector, const ResourceAccessRange &range) const {
John Zulauf16adfc92020-04-08 10:28:33 -0600442 auto &accesses = GetAccessStateMap(type);
443 const auto from = accesses.lower_bound(range);
444 const auto to = accesses.upper_bound(range);
445
John Zulauf3d84f1b2020-03-09 13:33:25 -0600446 HazardResult hazard;
John Zulauf16adfc92020-04-08 10:28:33 -0600447 for (auto pos = from; pos != to && !hazard.hazard; ++pos) {
448 hazard = detector.DetectAsync(pos);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600449 }
John Zulauf16adfc92020-04-08 10:28:33 -0600450
John Zulauf3d84f1b2020-03-09 13:33:25 -0600451 return hazard;
452}
453
John Zulauf355e49b2020-04-24 15:11:15 -0600454// Returns the last resolved entry
455static void ResolveMapToEntry(ResourceAccessRangeMap *dest, ResourceAccessRangeMap::iterator entry,
456 ResourceAccessRangeMap::const_iterator first, ResourceAccessRangeMap::const_iterator last,
457 const SyncBarrier *barrier) {
458 auto at = entry;
459 for (auto pos = first; pos != last; ++pos) {
460 // Every member of the input iterator range must fit within the remaining portion of entry
461 assert(at->first.includes(pos->first));
462 assert(at != dest->end());
463 // Trim up at to the same size as the entry to resolve
464 at = sparse_container::split(at, *dest, pos->first);
465 auto access = pos->second;
466 if (barrier) {
467 access.ApplyBarrier(*barrier);
468 }
469 at->second.Resolve(access);
470 ++at; // Go to the remaining unused section of entry
471 }
472}
473
474void AccessContext::ResolveAccessRange(AddressType type, const ResourceAccessRange &range, const SyncBarrier *barrier,
475 ResourceAccessRangeMap *resolve_map, const ResourceAccessState *infill_state,
476 bool recur_to_infill) const {
John Zulauf3bcab5e2020-06-19 14:42:32 -0600477 if (!range.non_empty()) return;
478
John Zulauf355e49b2020-04-24 15:11:15 -0600479 ResourceRangeMergeIterator current(*resolve_map, GetAccessStateMap(type), range.begin);
480 while (current->range.non_empty() && range.includes(current->range.begin)) {
John Zulauf3bcab5e2020-06-19 14:42:32 -0600481 const auto current_range = current->range & range;
John Zulauf16adfc92020-04-08 10:28:33 -0600482 if (current->pos_B->valid) {
483 const auto &src_pos = current->pos_B->lower_bound;
John Zulauf355e49b2020-04-24 15:11:15 -0600484 auto access = src_pos->second;
485 if (barrier) {
486 access.ApplyBarrier(*barrier);
487 }
John Zulauf16adfc92020-04-08 10:28:33 -0600488 if (current->pos_A->valid) {
John Zulauf3bcab5e2020-06-19 14:42:32 -0600489 const auto trimmed = sparse_container::split(current->pos_A->lower_bound, *resolve_map, current_range);
490 trimmed->second.Resolve(access);
491 current.invalidate_A(trimmed);
John Zulauf5f13a792020-03-10 07:31:21 -0600492 } else {
John Zulauf3bcab5e2020-06-19 14:42:32 -0600493 auto inserted = resolve_map->insert(current->pos_A->lower_bound, std::make_pair(current_range, access));
John Zulauf355e49b2020-04-24 15:11:15 -0600494 current.invalidate_A(inserted); // Update the parallel iterator to point at the insert segment
John Zulauf5f13a792020-03-10 07:31:21 -0600495 }
John Zulauf16adfc92020-04-08 10:28:33 -0600496 } else {
497 // we have to descend to fill this gap
498 if (recur_to_infill) {
John Zulauf355e49b2020-04-24 15:11:15 -0600499 if (current->pos_A->valid) {
500 // Dest is valid, so we need to accumulate along the DAG and then resolve... in an N-to-1 resolve operation
501 ResourceAccessRangeMap gap_map;
John Zulauf3bcab5e2020-06-19 14:42:32 -0600502 ResolvePreviousAccess(type, current_range, &gap_map, infill_state);
John Zulauf355e49b2020-04-24 15:11:15 -0600503 ResolveMapToEntry(resolve_map, current->pos_A->lower_bound, gap_map.begin(), gap_map.end(), barrier);
504 } else {
John Zulauf3bcab5e2020-06-19 14:42:32 -0600505 // There isn't anything in dest in current)range, so we can accumulate directly into it.
506 ResolvePreviousAccess(type, current_range, resolve_map, infill_state);
John Zulauf355e49b2020-04-24 15:11:15 -0600507 if (barrier) {
508 // Need to apply the barrier to the accesses we accumulated, noting that we haven't updated current
John Zulauf3bcab5e2020-06-19 14:42:32 -0600509 for (auto pos = resolve_map->lower_bound(current_range); pos != current->pos_A->lower_bound; ++pos) {
John Zulauf355e49b2020-04-24 15:11:15 -0600510 pos->second.ApplyBarrier(*barrier);
511 }
512 }
513 }
514 // Given that there could be gaps we need to seek carefully to not repeatedly search the same gaps in the next
515 // iterator of the outer while.
516
517 // Set the parallel iterator to the end of this range s.t. ++ will move us to the next range whether or
518 // not the end of the range is a gap. For the seek to work, first we need to warn the parallel iterator
519 // we stepped on the dest map
locke-lunarg88dbb542020-06-23 22:05:42 -0600520 const auto seek_to = current_range.end - 1; // The subtraction is safe as range can't be empty (loop condition)
521 current.invalidate_A(); // Changes current->range
John Zulauf355e49b2020-04-24 15:11:15 -0600522 current.seek(seek_to);
523 } else if (!current->pos_A->valid && infill_state) {
524 // If we didn't find anything in the current range, and we aren't reccuring... we infill if required
525 auto inserted = resolve_map->insert(current->pos_A->lower_bound, std::make_pair(current->range, *infill_state));
526 current.invalidate_A(inserted); // Update the parallel iterator to point at the correct segment after insert
John Zulauf16adfc92020-04-08 10:28:33 -0600527 }
John Zulauf5f13a792020-03-10 07:31:21 -0600528 }
John Zulauf16adfc92020-04-08 10:28:33 -0600529 ++current;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600530 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600531}
532
John Zulauf355e49b2020-04-24 15:11:15 -0600533void AccessContext::ResolvePreviousAccess(AddressType type, const ResourceAccessRange &range, ResourceAccessRangeMap *descent_map,
534 const ResourceAccessState *infill_state) const {
John Zulaufe5da6e52020-03-18 15:32:18 -0600535 if ((prev_.size() == 0) && (src_external_.context == nullptr)) {
John Zulauf5f13a792020-03-10 07:31:21 -0600536 if (range.non_empty() && infill_state) {
537 descent_map->insert(std::make_pair(range, *infill_state));
538 }
539 } else {
540 // Look for something to fill the gap further along.
541 for (const auto &prev_dep : prev_) {
John Zulauf355e49b2020-04-24 15:11:15 -0600542 prev_dep.context->ResolveAccessRange(type, range, &prev_dep.barrier, descent_map, infill_state);
John Zulauf5f13a792020-03-10 07:31:21 -0600543 }
544
John Zulaufe5da6e52020-03-18 15:32:18 -0600545 if (src_external_.context) {
John Zulauf355e49b2020-04-24 15:11:15 -0600546 src_external_.context->ResolveAccessRange(type, range, &src_external_.barrier, descent_map, infill_state);
John Zulauf5f13a792020-03-10 07:31:21 -0600547 }
548 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600549}
550
John Zulauf16adfc92020-04-08 10:28:33 -0600551AccessContext::AddressType AccessContext::ImageAddressType(const IMAGE_STATE &image) {
locke-lunarg3f6978b2020-04-16 16:51:35 -0600552 return (image.fragment_encoder->IsLinearImage()) ? AddressType::kLinearAddress : AddressType::kIdealizedAddress;
John Zulauf16adfc92020-04-08 10:28:33 -0600553}
554
555VkDeviceSize AccessContext::ResourceBaseAddress(const BINDABLE &bindable) {
556 return bindable.binding.offset + bindable.binding.mem_state->fake_base_address;
557}
558
John Zulauf355e49b2020-04-24 15:11:15 -0600559static bool SimpleBinding(const BINDABLE &bindable) { return !bindable.sparse && bindable.binding.mem_state; }
John Zulauf16adfc92020-04-08 10:28:33 -0600560
John Zulauf1507ee42020-05-18 11:33:09 -0600561static SyncStageAccessIndex ColorLoadUsage(VkAttachmentLoadOp load_op) {
562 const auto stage_access = (load_op == VK_ATTACHMENT_LOAD_OP_LOAD) ? SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ
563 : SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE;
564 return stage_access;
565}
566static SyncStageAccessIndex DepthStencilLoadUsage(VkAttachmentLoadOp load_op) {
567 const auto stage_access = (load_op == VK_ATTACHMENT_LOAD_OP_LOAD) ? SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_READ
568 : SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE;
569 return stage_access;
570}
571
John Zulauf7635de32020-05-29 17:14:15 -0600572// Caller must manage returned pointer
573static AccessContext *CreateStoreResolveProxyContext(const AccessContext &context, const RENDER_PASS_STATE &rp_state,
574 uint32_t subpass, const VkRect2D &render_area,
575 std::vector<const IMAGE_VIEW_STATE *> attachment_views) {
576 auto *proxy = new AccessContext(context);
577 proxy->UpdateAttachmentResolveAccess(rp_state, render_area, attachment_views, subpass, kCurrentCommandTag);
John Zulaufaff20662020-06-01 14:07:58 -0600578 proxy->UpdateAttachmentStoreAccess(rp_state, render_area, attachment_views, subpass, kCurrentCommandTag);
John Zulauf7635de32020-05-29 17:14:15 -0600579 return proxy;
580}
581
John Zulauf540266b2020-04-06 18:54:53 -0600582void AccessContext::ResolvePreviousAccess(const IMAGE_STATE &image_state, const VkImageSubresourceRange &subresource_range_arg,
John Zulauf355e49b2020-04-24 15:11:15 -0600583 AddressType address_type, ResourceAccessRangeMap *descent_map,
584 const ResourceAccessState *infill_state) const {
John Zulauf16adfc92020-04-08 10:28:33 -0600585 if (!SimpleBinding(image_state)) return;
586
John Zulauf62f10592020-04-03 12:20:02 -0600587 auto subresource_range = NormalizeSubresourceRange(image_state.createInfo, subresource_range_arg);
locke-lunargae26eac2020-04-16 15:29:05 -0600588 subresource_adapter::ImageRangeGenerator range_gen(*image_state.fragment_encoder.get(), subresource_range, {0, 0, 0},
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600589 image_state.createInfo.extent);
John Zulauf16adfc92020-04-08 10:28:33 -0600590 const auto base_address = ResourceBaseAddress(image_state);
John Zulauf62f10592020-04-03 12:20:02 -0600591 for (; range_gen->non_empty(); ++range_gen) {
John Zulauf16adfc92020-04-08 10:28:33 -0600592 ResolvePreviousAccess(address_type, (*range_gen + base_address), descent_map, infill_state);
John Zulauf62f10592020-04-03 12:20:02 -0600593 }
594}
595
John Zulauf7635de32020-05-29 17:14:15 -0600596// Layout transitions are handled as if the were occuring in the beginning of the next subpass
John Zulauf1507ee42020-05-18 11:33:09 -0600597bool AccessContext::ValidateLayoutTransitions(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state,
John Zulauf7635de32020-05-29 17:14:15 -0600598 const VkRect2D &render_area, uint32_t subpass,
599 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
600 const char *func_name) const {
John Zulauf355e49b2020-04-24 15:11:15 -0600601 bool skip = false;
John Zulauf7635de32020-05-29 17:14:15 -0600602 // As validation methods are const and precede the record/update phase, for any tranistions from the immediately
603 // previous subpass, we have to validate them against a copy of the AccessContext, with resolve operations applied, as
604 // those affects have not been recorded yet.
605 //
606 // Note: we could be more efficient by tracking whether or not we actually *have* any changes (e.g. attachment resolve)
607 // to apply and only copy then, if this proves a hot spot.
608 std::unique_ptr<AccessContext> proxy_for_prev;
609 TrackBack proxy_track_back;
610
John Zulauf355e49b2020-04-24 15:11:15 -0600611 const auto &transitions = rp_state.subpass_transitions[subpass];
612 for (const auto &transition : transitions) {
John Zulauf7635de32020-05-29 17:14:15 -0600613 const bool prev_needs_proxy = transition.prev_pass != VK_SUBPASS_EXTERNAL && (transition.prev_pass + 1 == subpass);
614
615 const auto *track_back = GetTrackBackFromSubpass(transition.prev_pass);
616 if (prev_needs_proxy) {
617 if (!proxy_for_prev) {
618 proxy_for_prev.reset(CreateStoreResolveProxyContext(*track_back->context, rp_state, transition.prev_pass,
619 render_area, attachment_views));
620 proxy_track_back = *track_back;
621 proxy_track_back.context = proxy_for_prev.get();
622 }
623 track_back = &proxy_track_back;
624 }
625 auto hazard = DetectSubpassTransitionHazard(*track_back, attachment_views[transition.attachment]);
John Zulauf355e49b2020-04-24 15:11:15 -0600626 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -0600627 skip |= sync_state.LogError(
628 rp_state.renderPass, string_SyncHazardVUID(hazard.hazard),
629 "%s: Hazard %s in subpass %" PRIu32 " for attachment %" PRIu32 " image layout transition. Prior access %s.",
630 func_name, string_SyncHazard(hazard.hazard), subpass, transition.attachment, string_UsageTag(hazard.tag).c_str());
John Zulauf355e49b2020-04-24 15:11:15 -0600631 }
632 }
633 return skip;
634}
635
John Zulauf1507ee42020-05-18 11:33:09 -0600636bool AccessContext::ValidateLoadOperation(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state,
John Zulauf7635de32020-05-29 17:14:15 -0600637 const VkRect2D &render_area, uint32_t subpass,
638 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
639 const char *func_name) const {
John Zulauf1507ee42020-05-18 11:33:09 -0600640 bool skip = false;
641 const auto *attachment_ci = rp_state.createInfo.pAttachments;
642 VkExtent3D extent = CastTo3D(render_area.extent);
643 VkOffset3D offset = CastTo3D(render_area.offset);
644 const auto external_access_scope = src_external_.barrier.dst_access_scope;
John Zulauf1507ee42020-05-18 11:33:09 -0600645
646 for (uint32_t i = 0; i < rp_state.createInfo.attachmentCount; i++) {
647 if (subpass == rp_state.attachment_first_subpass[i]) {
648 if (attachment_views[i] == nullptr) continue;
649 const IMAGE_VIEW_STATE &view = *attachment_views[i];
650 const IMAGE_STATE *image = view.image_state.get();
651 if (image == nullptr) continue;
652 const auto &ci = attachment_ci[i];
653 const bool is_transition = rp_state.attachment_first_is_transition[i];
654
655 // Need check in the following way
656 // 1) if the usage bit isn't in the dest_access_scope, and there is layout traniition for initial use, report hazard
657 // vs. transition
658 // 2) if there isn't a layout transition, we need to look at the external context with a "detect hazard" operation
659 // for each aspect loaded.
660
661 const bool has_depth = FormatHasDepth(ci.format);
John Zulaufb027cdb2020-05-21 14:25:22 -0600662 const bool has_stencil = FormatHasStencil(ci.format);
John Zulauf1507ee42020-05-18 11:33:09 -0600663 const bool is_color = !(has_depth || has_stencil);
664
665 const SyncStageAccessIndex load_index = has_depth ? DepthStencilLoadUsage(ci.loadOp) : ColorLoadUsage(ci.loadOp);
666 const SyncStageAccessFlags load_mask = (has_depth || is_color) ? SyncStageAccess::Flags(load_index) : 0U;
667 const SyncStageAccessIndex stencil_load_index = has_stencil ? DepthStencilLoadUsage(ci.stencilLoadOp) : load_index;
668 const SyncStageAccessFlags stencil_mask = has_stencil ? SyncStageAccess::Flags(stencil_load_index) : 0U;
669
John Zulaufaff20662020-06-01 14:07:58 -0600670 HazardResult hazard;
John Zulauf1507ee42020-05-18 11:33:09 -0600671 const char *aspect = nullptr;
672 if (is_transition) {
673 // For transition w
674 SyncHazard transition_hazard = SyncHazard::NONE;
675 bool checked_stencil = false;
676 if (load_mask) {
677 if ((load_mask & external_access_scope) != load_mask) {
678 transition_hazard =
679 SyncStageAccess::HasWrite(load_mask) ? SyncHazard::WRITE_AFTER_WRITE : SyncHazard::READ_AFTER_WRITE;
680 aspect = is_color ? "color" : "depth";
681 }
682 if (!transition_hazard && stencil_mask) {
683 if ((stencil_mask & external_access_scope) != stencil_mask) {
684 transition_hazard = SyncStageAccess::HasWrite(stencil_mask) ? SyncHazard::WRITE_AFTER_WRITE
685 : SyncHazard::READ_AFTER_WRITE;
686 aspect = "stencil";
687 checked_stencil = true;
688 }
689 }
690 }
691 if (transition_hazard) {
692 // Hazard vs. ILT
693 auto load_op_string = string_VkAttachmentLoadOp(checked_stencil ? ci.stencilLoadOp : ci.loadOp);
694 skip |=
695 sync_state.LogError(rp_state.renderPass, string_SyncHazardVUID(hazard.hazard),
696 "%s: Hazard %s vs. layout transition in subpass %" PRIu32 " for attachment %" PRIu32
697 " aspect %s during load with loadOp %s.",
698 func_name, string_SyncHazard(transition_hazard), subpass, i, aspect, load_op_string);
699 }
700 } else {
701 auto hazard_range = view.normalized_subresource_range;
702 bool checked_stencil = false;
703 if (is_color) {
704 hazard = DetectHazard(*image, load_index, view.normalized_subresource_range, offset, extent);
705 aspect = "color";
706 } else {
707 if (has_depth) {
708 hazard_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
709 hazard = DetectHazard(*image, load_index, hazard_range, offset, extent);
710 aspect = "depth";
711 }
712 if (!hazard.hazard && has_stencil) {
713 hazard_range.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
714 hazard = DetectHazard(*image, stencil_load_index, hazard_range, offset, extent);
715 aspect = "stencil";
716 checked_stencil = true;
717 }
718 }
719
720 if (hazard.hazard) {
721 auto load_op_string = string_VkAttachmentLoadOp(checked_stencil ? ci.stencilLoadOp : ci.loadOp);
722 skip |= sync_state.LogError(rp_state.renderPass, string_SyncHazardVUID(hazard.hazard),
723 "%s: Hazard %s in subpass %" PRIu32 " for attachment %" PRIu32
locke-lunarg88dbb542020-06-23 22:05:42 -0600724 " aspect %s during load with loadOp %s. Prior access %s.",
725 func_name, string_SyncHazard(hazard.hazard), subpass, i, aspect, load_op_string,
726 string_UsageTag(hazard.tag).c_str());
John Zulauf1507ee42020-05-18 11:33:09 -0600727 }
728 }
729 }
730 }
731 return skip;
732}
733
John Zulaufaff20662020-06-01 14:07:58 -0600734// Store operation validation can ignore resolve (before it) and layout tranistions after it. The first is ignored
735// because of the ordering guarantees w.r.t. sample access and that the resolve validation hasn't altered the state, because
736// store is part of the same Next/End operation.
737// The latter is handled in layout transistion validation directly
738bool AccessContext::ValidateStoreOperation(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state,
739 const VkRect2D &render_area, uint32_t subpass,
740 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
741 const char *func_name) const {
742 bool skip = false;
743 const auto *attachment_ci = rp_state.createInfo.pAttachments;
744 VkExtent3D extent = CastTo3D(render_area.extent);
745 VkOffset3D offset = CastTo3D(render_area.offset);
746
747 for (uint32_t i = 0; i < rp_state.createInfo.attachmentCount; i++) {
748 if (subpass == rp_state.attachment_last_subpass[i]) {
749 if (attachment_views[i] == nullptr) continue;
750 const IMAGE_VIEW_STATE &view = *attachment_views[i];
751 const IMAGE_STATE *image = view.image_state.get();
752 if (image == nullptr) continue;
753 const auto &ci = attachment_ci[i];
754
755 // The spec states that "don't care" is an operation with VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
756 // so we assume that an implementation is *free* to write in that case, meaning that for correctness
757 // sake, we treat DONT_CARE as writing.
758 const bool has_depth = FormatHasDepth(ci.format);
759 const bool has_stencil = FormatHasStencil(ci.format);
760 const bool is_color = !(has_depth || has_stencil);
761 const bool store_op_stores = ci.storeOp != VK_ATTACHMENT_STORE_OP_NONE_QCOM;
762 if (!has_stencil && !store_op_stores) continue;
763
764 HazardResult hazard;
765 const char *aspect = nullptr;
766 bool checked_stencil = false;
767 if (is_color) {
768 hazard = DetectHazard(*image, SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE,
769 view.normalized_subresource_range, kAttachmentRasterOrder, offset, extent);
770 aspect = "color";
771 } else {
772 const bool stencil_op_stores = ci.stencilStoreOp != VK_ATTACHMENT_STORE_OP_NONE_QCOM;
773 auto hazard_range = view.normalized_subresource_range;
774 if (has_depth && store_op_stores) {
775 hazard_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
776 hazard = DetectHazard(*image, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, hazard_range,
777 kAttachmentRasterOrder, offset, extent);
778 aspect = "depth";
779 }
780 if (!hazard.hazard && has_stencil && stencil_op_stores) {
781 hazard_range.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
782 hazard = DetectHazard(*image, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, hazard_range,
783 kAttachmentRasterOrder, offset, extent);
784 aspect = "stencil";
785 checked_stencil = true;
786 }
787 }
788
789 if (hazard.hazard) {
790 const char *const op_type_string = checked_stencil ? "stencilStoreOp" : "storeOp";
791 const char *const store_op_string = string_VkAttachmentStoreOp(checked_stencil ? ci.stencilStoreOp : ci.storeOp);
John Zulauf1dae9192020-06-16 15:46:44 -0600792 skip |= sync_state.LogError(rp_state.renderPass, string_SyncHazardVUID(hazard.hazard),
793 "%s: Hazard %s in subpass %" PRIu32 " for attachment %" PRIu32
794 " %s aspect during store with %s %s. Prior access %s",
795 func_name, string_SyncHazard(hazard.hazard), subpass, i, aspect, op_type_string,
796 store_op_string, string_UsageTag(hazard.tag).c_str());
John Zulaufaff20662020-06-01 14:07:58 -0600797 }
798 }
799 }
800 return skip;
801}
802
John Zulaufb027cdb2020-05-21 14:25:22 -0600803bool AccessContext::ValidateResolveOperations(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state,
804 const VkRect2D &render_area,
805 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const char *func_name,
806 uint32_t subpass) const {
John Zulauf7635de32020-05-29 17:14:15 -0600807 ValidateResolveAction validate_action(rp_state.renderPass, subpass, *this, sync_state, func_name);
808 ResolveOperation(validate_action, rp_state, render_area, attachment_views, subpass);
809 return validate_action.GetSkip();
John Zulaufb027cdb2020-05-21 14:25:22 -0600810}
811
John Zulauf3d84f1b2020-03-09 13:33:25 -0600812class HazardDetector {
813 SyncStageAccessIndex usage_index_;
814
815 public:
John Zulauf5f13a792020-03-10 07:31:21 -0600816 HazardResult Detect(const ResourceAccessRangeMap::const_iterator &pos) const { return pos->second.DetectHazard(usage_index_); }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600817 HazardResult DetectAsync(const ResourceAccessRangeMap::const_iterator &pos) const {
818 return pos->second.DetectAsyncHazard(usage_index_);
819 }
820 HazardDetector(SyncStageAccessIndex usage) : usage_index_(usage) {}
821};
822
John Zulauf69133422020-05-20 14:55:53 -0600823class HazardDetectorWithOrdering {
824 const SyncStageAccessIndex usage_index_;
825 const SyncOrderingBarrier &ordering_;
826
827 public:
828 HazardResult Detect(const ResourceAccessRangeMap::const_iterator &pos) const {
829 return pos->second.DetectHazard(usage_index_, ordering_);
830 }
831 HazardResult DetectAsync(const ResourceAccessRangeMap::const_iterator &pos) const {
832 return pos->second.DetectAsyncHazard(usage_index_);
833 }
834 HazardDetectorWithOrdering(SyncStageAccessIndex usage, const SyncOrderingBarrier &ordering)
835 : usage_index_(usage), ordering_(ordering) {}
836};
837
John Zulauf16adfc92020-04-08 10:28:33 -0600838HazardResult AccessContext::DetectHazard(AddressType type, SyncStageAccessIndex usage_index,
John Zulauf540266b2020-04-06 18:54:53 -0600839 const ResourceAccessRange &range) const {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600840 HazardDetector detector(usage_index);
John Zulauf355e49b2020-04-24 15:11:15 -0600841 return DetectHazard(type, detector, range, DetectOptions::kDetectAll);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600842}
843
John Zulauf16adfc92020-04-08 10:28:33 -0600844HazardResult AccessContext::DetectHazard(const BUFFER_STATE &buffer, SyncStageAccessIndex usage_index,
John Zulauf355e49b2020-04-24 15:11:15 -0600845 const ResourceAccessRange &range) const {
John Zulauf16adfc92020-04-08 10:28:33 -0600846 if (!SimpleBinding(buffer)) return HazardResult();
John Zulauf355e49b2020-04-24 15:11:15 -0600847 return DetectHazard(AddressType::kLinearAddress, usage_index, range + ResourceBaseAddress(buffer));
John Zulaufe5da6e52020-03-18 15:32:18 -0600848}
849
John Zulauf69133422020-05-20 14:55:53 -0600850template <typename Detector>
851HazardResult AccessContext::DetectHazard(Detector &detector, const IMAGE_STATE &image,
852 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset,
853 const VkExtent3D &extent, DetectOptions options) const {
854 if (!SimpleBinding(image)) return HazardResult();
855 subresource_adapter::ImageRangeGenerator range_gen(*image.fragment_encoder.get(), subresource_range, offset, extent);
856 const auto address_type = ImageAddressType(image);
857 const auto base_address = ResourceBaseAddress(image);
858 for (; range_gen->non_empty(); ++range_gen) {
859 HazardResult hazard = DetectHazard(address_type, detector, (*range_gen + base_address), options);
860 if (hazard.hazard) return hazard;
861 }
862 return HazardResult();
863}
864
John Zulauf540266b2020-04-06 18:54:53 -0600865HazardResult AccessContext::DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
866 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset,
867 const VkExtent3D &extent) const {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700868 VkImageSubresourceRange subresource_range = {subresource.aspectMask, subresource.mipLevel, 1, subresource.baseArrayLayer,
869 subresource.layerCount};
John Zulauf1507ee42020-05-18 11:33:09 -0600870 return DetectHazard(image, current_usage, subresource_range, offset, extent);
871}
872
873HazardResult AccessContext::DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
874 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset,
875 const VkExtent3D &extent) const {
John Zulauf69133422020-05-20 14:55:53 -0600876 HazardDetector detector(current_usage);
877 return DetectHazard(detector, image, subresource_range, offset, extent, DetectOptions::kDetectAll);
878}
879
880HazardResult AccessContext::DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
881 const VkImageSubresourceRange &subresource_range, const SyncOrderingBarrier &ordering,
882 const VkOffset3D &offset, const VkExtent3D &extent) const {
883 HazardDetectorWithOrdering detector(current_usage, ordering);
884 return DetectHazard(detector, image, subresource_range, offset, extent, DetectOptions::kDetectAll);
John Zulauf9cb530d2019-09-30 14:14:10 -0600885}
886
John Zulaufb027cdb2020-05-21 14:25:22 -0600887// Some common code for looking at attachments, if there's anything wrong, we return no hazard, core validation
888// should have reported the issue regarding an invalid attachment entry
889HazardResult AccessContext::DetectHazard(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage,
890 const SyncOrderingBarrier &ordering, const VkOffset3D &offset, const VkExtent3D &extent,
891 VkImageAspectFlags aspect_mask) const {
892 if (view != nullptr) {
893 const IMAGE_STATE *image = view->image_state.get();
894 if (image != nullptr) {
895 auto *detect_range = &view->normalized_subresource_range;
896 VkImageSubresourceRange masked_range;
897 if (aspect_mask) { // If present and non-zero, restrict the normalized range to aspects present in aspect_mask
898 masked_range = view->normalized_subresource_range;
899 masked_range.aspectMask = aspect_mask & masked_range.aspectMask;
900 detect_range = &masked_range;
901 }
902
903 // NOTE: The range encoding code is not robust to invalid ranges, so we protect it from our change
904 if (detect_range->aspectMask) {
905 return DetectHazard(*image, current_usage, *detect_range, ordering, offset, extent);
906 }
907 }
908 }
909 return HazardResult();
910}
John Zulauf3d84f1b2020-03-09 13:33:25 -0600911class BarrierHazardDetector {
912 public:
913 BarrierHazardDetector(SyncStageAccessIndex usage_index, VkPipelineStageFlags src_exec_scope,
914 SyncStageAccessFlags src_access_scope)
915 : usage_index_(usage_index), src_exec_scope_(src_exec_scope), src_access_scope_(src_access_scope) {}
916
John Zulauf5f13a792020-03-10 07:31:21 -0600917 HazardResult Detect(const ResourceAccessRangeMap::const_iterator &pos) const {
918 return pos->second.DetectBarrierHazard(usage_index_, src_exec_scope_, src_access_scope_);
John Zulauf0cb5be22020-01-23 12:18:22 -0700919 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600920 HazardResult DetectAsync(const ResourceAccessRangeMap::const_iterator &pos) const {
921 // Async barrier hazard detection can use the same path as the usage index is not IsRead, but is IsWrite
922 return pos->second.DetectAsyncHazard(usage_index_);
923 }
924
925 private:
926 SyncStageAccessIndex usage_index_;
927 VkPipelineStageFlags src_exec_scope_;
928 SyncStageAccessFlags src_access_scope_;
929};
930
John Zulauf16adfc92020-04-08 10:28:33 -0600931HazardResult AccessContext::DetectBarrierHazard(AddressType type, SyncStageAccessIndex current_usage,
John Zulauf540266b2020-04-06 18:54:53 -0600932 VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope,
John Zulauf355e49b2020-04-24 15:11:15 -0600933 const ResourceAccessRange &range, DetectOptions options) const {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600934 BarrierHazardDetector detector(current_usage, src_exec_scope, src_access_scope);
John Zulauf69133422020-05-20 14:55:53 -0600935 return DetectHazard(type, detector, range, options);
John Zulauf0cb5be22020-01-23 12:18:22 -0700936}
937
John Zulauf16adfc92020-04-08 10:28:33 -0600938HazardResult AccessContext::DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
John Zulauf355e49b2020-04-24 15:11:15 -0600939 SyncStageAccessFlags src_access_scope,
940 const VkImageSubresourceRange &subresource_range,
941 DetectOptions options) const {
John Zulauf69133422020-05-20 14:55:53 -0600942 BarrierHazardDetector detector(SyncStageAccessIndex::SYNC_IMAGE_LAYOUT_TRANSITION, src_exec_scope, src_access_scope);
943 VkOffset3D zero_offset = {0, 0, 0};
944 return DetectHazard(detector, image, subresource_range, zero_offset, image.createInfo.extent, options);
John Zulauf0cb5be22020-01-23 12:18:22 -0700945}
946
John Zulauf355e49b2020-04-24 15:11:15 -0600947HazardResult AccessContext::DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
948 SyncStageAccessFlags src_stage_accesses,
949 const VkImageMemoryBarrier &barrier) const {
950 auto subresource_range = NormalizeSubresourceRange(image.createInfo, barrier.subresourceRange);
951 const auto src_access_scope = SyncStageAccess::AccessScope(src_stage_accesses, barrier.srcAccessMask);
952 return DetectImageBarrierHazard(image, src_exec_scope, src_access_scope, subresource_range, kDetectAll);
953}
954
John Zulauf9cb530d2019-09-30 14:14:10 -0600955template <typename Flags, typename Map>
956SyncStageAccessFlags AccessScopeImpl(Flags flag_mask, const Map &map) {
957 SyncStageAccessFlags scope = 0;
958 for (const auto &bit_scope : map) {
959 if (flag_mask < bit_scope.first) break;
960
961 if (flag_mask & bit_scope.first) {
962 scope |= bit_scope.second;
963 }
964 }
965 return scope;
966}
967
968SyncStageAccessFlags SyncStageAccess::AccessScopeByStage(VkPipelineStageFlags stages) {
969 return AccessScopeImpl(stages, syncStageAccessMaskByStageBit);
970}
971
972SyncStageAccessFlags SyncStageAccess::AccessScopeByAccess(VkAccessFlags accesses) {
973 return AccessScopeImpl(accesses, syncStageAccessMaskByAccessBit);
974}
975
976// Getting from stage mask and access mask to stage/acess masks is something we need to be good at...
977SyncStageAccessFlags SyncStageAccess::AccessScope(VkPipelineStageFlags stages, VkAccessFlags accesses) {
John Zulauf5f13a792020-03-10 07:31:21 -0600978 // The access scope is the intersection of all stage/access types possible for the enabled stages and the enables
979 // accesses (after doing a couple factoring of common terms the union of stage/access intersections is the intersections
980 // of the union of all stage/access types for all the stages and the same unions for the access mask...
John Zulauf9cb530d2019-09-30 14:14:10 -0600981 return AccessScopeByStage(stages) & AccessScopeByAccess(accesses);
982}
983
984template <typename Action>
John Zulauf5c5e88d2019-12-26 11:22:02 -0700985void UpdateMemoryAccessState(ResourceAccessRangeMap *accesses, const ResourceAccessRange &range, const Action &action) {
John Zulauf7635de32020-05-29 17:14:15 -0600986 // TODO: Optimization for operations that do a pure overwrite (i.e. WRITE usages which rewrite the state, vs READ usages
987 // that do incrementalupdates
John Zulauf9cb530d2019-09-30 14:14:10 -0600988 auto pos = accesses->lower_bound(range);
989 if (pos == accesses->end() || !pos->first.intersects(range)) {
990 // The range is empty, fill it with a default value.
991 pos = action.Infill(accesses, pos, range);
992 } else if (range.begin < pos->first.begin) {
993 // Leading empty space, infill
John Zulauf5c5e88d2019-12-26 11:22:02 -0700994 pos = action.Infill(accesses, pos, ResourceAccessRange(range.begin, pos->first.begin));
John Zulauf9cb530d2019-09-30 14:14:10 -0600995 } else if (pos->first.begin < range.begin) {
996 // Trim the beginning if needed
997 pos = accesses->split(pos, range.begin, sparse_container::split_op_keep_both());
998 ++pos;
999 }
1000
1001 const auto the_end = accesses->end();
1002 while ((pos != the_end) && pos->first.intersects(range)) {
1003 if (pos->first.end > range.end) {
1004 pos = accesses->split(pos, range.end, sparse_container::split_op_keep_both());
1005 }
1006
1007 pos = action(accesses, pos);
1008 if (pos == the_end) break;
1009
1010 auto next = pos;
1011 ++next;
1012 if ((pos->first.end < range.end) && (next != the_end) && !next->first.is_subsequent_to(pos->first)) {
1013 // Need to infill if next is disjoint
1014 VkDeviceSize limit = (next == the_end) ? range.end : std::min(range.end, next->first.begin);
John Zulauf5c5e88d2019-12-26 11:22:02 -07001015 ResourceAccessRange new_range(pos->first.end, limit);
John Zulauf9cb530d2019-09-30 14:14:10 -06001016 next = action.Infill(accesses, next, new_range);
1017 }
1018 pos = next;
1019 }
1020}
1021
1022struct UpdateMemoryAccessStateFunctor {
John Zulauf5c5e88d2019-12-26 11:22:02 -07001023 using Iterator = ResourceAccessRangeMap::iterator;
1024 Iterator Infill(ResourceAccessRangeMap *accesses, Iterator pos, ResourceAccessRange range) const {
John Zulauf5f13a792020-03-10 07:31:21 -06001025 // this is only called on gaps, and never returns a gap.
1026 ResourceAccessState default_state;
John Zulauf16adfc92020-04-08 10:28:33 -06001027 context.ResolvePreviousAccess(type, range, accesses, &default_state);
John Zulauf5f13a792020-03-10 07:31:21 -06001028 return accesses->lower_bound(range);
John Zulauf9cb530d2019-09-30 14:14:10 -06001029 }
John Zulauf5f13a792020-03-10 07:31:21 -06001030
John Zulauf5c5e88d2019-12-26 11:22:02 -07001031 Iterator operator()(ResourceAccessRangeMap *accesses, Iterator pos) const {
John Zulauf9cb530d2019-09-30 14:14:10 -06001032 auto &access_state = pos->second;
1033 access_state.Update(usage, tag);
1034 return pos;
1035 }
1036
John Zulauf16adfc92020-04-08 10:28:33 -06001037 UpdateMemoryAccessStateFunctor(AccessContext::AddressType type_, const AccessContext &context_, SyncStageAccessIndex usage_,
John Zulauf540266b2020-04-06 18:54:53 -06001038 const ResourceUsageTag &tag_)
John Zulauf16adfc92020-04-08 10:28:33 -06001039 : type(type_), context(context_), usage(usage_), tag(tag_) {}
1040 const AccessContext::AddressType type;
John Zulauf540266b2020-04-06 18:54:53 -06001041 const AccessContext &context;
John Zulauf16adfc92020-04-08 10:28:33 -06001042 const SyncStageAccessIndex usage;
John Zulauf9cb530d2019-09-30 14:14:10 -06001043 const ResourceUsageTag &tag;
1044};
1045
1046struct ApplyMemoryAccessBarrierFunctor {
John Zulauf5c5e88d2019-12-26 11:22:02 -07001047 using Iterator = ResourceAccessRangeMap::iterator;
1048 inline Iterator Infill(ResourceAccessRangeMap *accesses, Iterator pos, ResourceAccessRange range) const { return pos; }
John Zulauf9cb530d2019-09-30 14:14:10 -06001049
John Zulauf5c5e88d2019-12-26 11:22:02 -07001050 Iterator operator()(ResourceAccessRangeMap *accesses, Iterator pos) const {
John Zulauf9cb530d2019-09-30 14:14:10 -06001051 auto &access_state = pos->second;
John Zulauf36bcf6a2020-02-03 15:12:52 -07001052 access_state.ApplyMemoryAccessBarrier(src_exec_scope, src_access_scope, dst_exec_scope, dst_access_scope);
John Zulauf9cb530d2019-09-30 14:14:10 -06001053 return pos;
1054 }
1055
John Zulauf36bcf6a2020-02-03 15:12:52 -07001056 ApplyMemoryAccessBarrierFunctor(VkPipelineStageFlags src_exec_scope_, SyncStageAccessFlags src_access_scope_,
1057 VkPipelineStageFlags dst_exec_scope_, SyncStageAccessFlags dst_access_scope_)
1058 : src_exec_scope(src_exec_scope_),
1059 src_access_scope(src_access_scope_),
1060 dst_exec_scope(dst_exec_scope_),
1061 dst_access_scope(dst_access_scope_) {}
John Zulauf9cb530d2019-09-30 14:14:10 -06001062
John Zulauf36bcf6a2020-02-03 15:12:52 -07001063 VkPipelineStageFlags src_exec_scope;
1064 SyncStageAccessFlags src_access_scope;
1065 VkPipelineStageFlags dst_exec_scope;
1066 SyncStageAccessFlags dst_access_scope;
John Zulauf9cb530d2019-09-30 14:14:10 -06001067};
1068
1069struct ApplyGlobalBarrierFunctor {
John Zulauf5c5e88d2019-12-26 11:22:02 -07001070 using Iterator = ResourceAccessRangeMap::iterator;
1071 inline Iterator Infill(ResourceAccessRangeMap *accesses, Iterator pos, ResourceAccessRange range) const { return pos; }
John Zulauf9cb530d2019-09-30 14:14:10 -06001072
John Zulauf5c5e88d2019-12-26 11:22:02 -07001073 Iterator operator()(ResourceAccessRangeMap *accesses, Iterator pos) const {
John Zulauf9cb530d2019-09-30 14:14:10 -06001074 auto &access_state = pos->second;
John Zulauf36bcf6a2020-02-03 15:12:52 -07001075 access_state.ApplyExecutionBarrier(src_exec_scope, dst_exec_scope);
John Zulauf9cb530d2019-09-30 14:14:10 -06001076
1077 for (const auto &functor : barrier_functor) {
1078 functor(accesses, pos);
1079 }
1080 return pos;
1081 }
1082
John Zulauf36bcf6a2020-02-03 15:12:52 -07001083 ApplyGlobalBarrierFunctor(VkPipelineStageFlags src_exec_scope, VkPipelineStageFlags dst_exec_scope,
1084 SyncStageAccessFlags src_stage_accesses, SyncStageAccessFlags dst_stage_accesses,
John Zulauf9cb530d2019-09-30 14:14:10 -06001085 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers)
John Zulauf36bcf6a2020-02-03 15:12:52 -07001086 : src_exec_scope(src_exec_scope), dst_exec_scope(dst_exec_scope) {
John Zulauf9cb530d2019-09-30 14:14:10 -06001087 // Don't want to create this per tracked item, but don't want to loop through all tracked items per barrier...
1088 barrier_functor.reserve(memoryBarrierCount);
1089 for (uint32_t barrier_index = 0; barrier_index < memoryBarrierCount; barrier_index++) {
1090 const auto &barrier = pMemoryBarriers[barrier_index];
John Zulauf36bcf6a2020-02-03 15:12:52 -07001091 barrier_functor.emplace_back(src_exec_scope, SyncStageAccess::AccessScope(src_stage_accesses, barrier.srcAccessMask),
1092 dst_exec_scope, SyncStageAccess::AccessScope(dst_stage_accesses, barrier.dstAccessMask));
John Zulauf9cb530d2019-09-30 14:14:10 -06001093 }
1094 }
1095
John Zulauf36bcf6a2020-02-03 15:12:52 -07001096 const VkPipelineStageFlags src_exec_scope;
1097 const VkPipelineStageFlags dst_exec_scope;
John Zulauf9cb530d2019-09-30 14:14:10 -06001098 std::vector<ApplyMemoryAccessBarrierFunctor> barrier_functor;
1099};
1100
John Zulauf355e49b2020-04-24 15:11:15 -06001101void AccessContext::UpdateAccessState(AddressType type, SyncStageAccessIndex current_usage, const ResourceAccessRange &range,
1102 const ResourceUsageTag &tag) {
John Zulauf16adfc92020-04-08 10:28:33 -06001103 UpdateMemoryAccessStateFunctor action(type, *this, current_usage, tag);
1104 UpdateMemoryAccessState(&GetAccessStateMap(type), range, action);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001105}
1106
John Zulauf16adfc92020-04-08 10:28:33 -06001107void AccessContext::UpdateAccessState(const BUFFER_STATE &buffer, SyncStageAccessIndex current_usage,
John Zulauf355e49b2020-04-24 15:11:15 -06001108 const ResourceAccessRange &range, const ResourceUsageTag &tag) {
John Zulauf16adfc92020-04-08 10:28:33 -06001109 if (!SimpleBinding(buffer)) return;
1110 const auto base_address = ResourceBaseAddress(buffer);
1111 UpdateAccessState(AddressType::kLinearAddress, current_usage, range + base_address, tag);
1112}
John Zulauf355e49b2020-04-24 15:11:15 -06001113
John Zulauf540266b2020-04-06 18:54:53 -06001114void AccessContext::UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
John Zulauf355e49b2020-04-24 15:11:15 -06001115 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset,
John Zulauf540266b2020-04-06 18:54:53 -06001116 const VkExtent3D &extent, const ResourceUsageTag &tag) {
John Zulauf16adfc92020-04-08 10:28:33 -06001117 if (!SimpleBinding(image)) return;
locke-lunargae26eac2020-04-16 15:29:05 -06001118 subresource_adapter::ImageRangeGenerator range_gen(*image.fragment_encoder.get(), subresource_range, offset, extent);
John Zulauf16adfc92020-04-08 10:28:33 -06001119 const auto address_type = ImageAddressType(image);
1120 const auto base_address = ResourceBaseAddress(image);
1121 UpdateMemoryAccessStateFunctor action(address_type, *this, current_usage, tag);
John Zulauf5f13a792020-03-10 07:31:21 -06001122 for (; range_gen->non_empty(); ++range_gen) {
John Zulauf16adfc92020-04-08 10:28:33 -06001123 UpdateMemoryAccessState(&GetAccessStateMap(address_type), (*range_gen + base_address), action);
John Zulauf5f13a792020-03-10 07:31:21 -06001124 }
John Zulauf3d84f1b2020-03-09 13:33:25 -06001125}
John Zulauf7635de32020-05-29 17:14:15 -06001126void AccessContext::UpdateAccessState(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const VkOffset3D &offset,
1127 const VkExtent3D &extent, VkImageAspectFlags aspect_mask, const ResourceUsageTag &tag) {
1128 if (view != nullptr) {
1129 const IMAGE_STATE *image = view->image_state.get();
1130 if (image != nullptr) {
1131 auto *update_range = &view->normalized_subresource_range;
1132 VkImageSubresourceRange masked_range;
1133 if (aspect_mask) { // If present and non-zero, restrict the normalized range to aspects present in aspect_mask
1134 masked_range = view->normalized_subresource_range;
1135 masked_range.aspectMask = aspect_mask & masked_range.aspectMask;
1136 update_range = &masked_range;
1137 }
1138 UpdateAccessState(*image, current_usage, *update_range, offset, extent, tag);
1139 }
1140 }
1141}
John Zulauf3d84f1b2020-03-09 13:33:25 -06001142
John Zulauf355e49b2020-04-24 15:11:15 -06001143void AccessContext::UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
1144 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset,
1145 const VkExtent3D &extent, const ResourceUsageTag &tag) {
John Zulauf355e49b2020-04-24 15:11:15 -06001146 VkImageSubresourceRange subresource_range = {subresource.aspectMask, subresource.mipLevel, 1, subresource.baseArrayLayer,
1147 subresource.layerCount};
1148 UpdateAccessState(image, current_usage, subresource_range, offset, extent, tag);
1149}
1150
John Zulauf540266b2020-04-06 18:54:53 -06001151template <typename Action>
1152void AccessContext::UpdateMemoryAccess(const BUFFER_STATE &buffer, const ResourceAccessRange &range, const Action action) {
John Zulauf16adfc92020-04-08 10:28:33 -06001153 if (!SimpleBinding(buffer)) return;
1154 const auto base_address = ResourceBaseAddress(buffer);
1155 UpdateMemoryAccessState(&GetAccessStateMap(AddressType::kLinearAddress), (range + base_address), action);
John Zulauf540266b2020-04-06 18:54:53 -06001156}
1157
1158template <typename Action>
1159void AccessContext::UpdateMemoryAccess(const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range,
1160 const Action action) {
John Zulauf16adfc92020-04-08 10:28:33 -06001161 if (!SimpleBinding(image)) return;
1162 const auto address_type = ImageAddressType(image);
1163 auto *accesses = &GetAccessStateMap(address_type);
John Zulauf540266b2020-04-06 18:54:53 -06001164
locke-lunargae26eac2020-04-16 15:29:05 -06001165 subresource_adapter::ImageRangeGenerator range_gen(*image.fragment_encoder.get(), subresource_range, {0, 0, 0},
locke-lunarg5f7d3c62020-04-07 00:10:39 -06001166 image.createInfo.extent);
John Zulauf540266b2020-04-06 18:54:53 -06001167
John Zulauf16adfc92020-04-08 10:28:33 -06001168 const auto base_address = ResourceBaseAddress(image);
John Zulauf540266b2020-04-06 18:54:53 -06001169 for (; range_gen->non_empty(); ++range_gen) {
John Zulauf16adfc92020-04-08 10:28:33 -06001170 UpdateMemoryAccessState(accesses, (*range_gen + base_address), action);
John Zulauf540266b2020-04-06 18:54:53 -06001171 }
1172}
1173
John Zulauf7635de32020-05-29 17:14:15 -06001174void AccessContext::UpdateAttachmentResolveAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
1175 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass,
1176 const ResourceUsageTag &tag) {
1177 UpdateStateResolveAction update(*this, tag);
1178 ResolveOperation(update, rp_state, render_area, attachment_views, subpass);
1179}
1180
John Zulaufaff20662020-06-01 14:07:58 -06001181void AccessContext::UpdateAttachmentStoreAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
1182 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass,
1183 const ResourceUsageTag &tag) {
1184 const auto *attachment_ci = rp_state.createInfo.pAttachments;
1185 VkExtent3D extent = CastTo3D(render_area.extent);
1186 VkOffset3D offset = CastTo3D(render_area.offset);
1187
1188 for (uint32_t i = 0; i < rp_state.createInfo.attachmentCount; i++) {
1189 if (rp_state.attachment_last_subpass[i] == subpass) {
1190 if (attachment_views[i] == nullptr) continue; // UNUSED
1191 const auto &view = *attachment_views[i];
1192 const IMAGE_STATE *image = view.image_state.get();
1193 if (image == nullptr) continue;
1194
1195 const auto &ci = attachment_ci[i];
1196 const bool has_depth = FormatHasDepth(ci.format);
1197 const bool has_stencil = FormatHasStencil(ci.format);
1198 const bool is_color = !(has_depth || has_stencil);
1199 const bool store_op_stores = ci.storeOp != VK_ATTACHMENT_STORE_OP_NONE_QCOM;
1200
1201 if (is_color && store_op_stores) {
1202 UpdateAccessState(*image, SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, view.normalized_subresource_range,
1203 offset, extent, tag);
1204 } else {
1205 auto update_range = view.normalized_subresource_range;
1206 if (has_depth && store_op_stores) {
1207 update_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
1208 UpdateAccessState(*image, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, update_range, offset, extent,
1209 tag);
1210 }
1211 const bool stencil_op_stores = ci.stencilStoreOp != VK_ATTACHMENT_STORE_OP_NONE_QCOM;
1212 if (has_stencil && stencil_op_stores) {
1213 update_range.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
1214 UpdateAccessState(*image, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, update_range, offset, extent,
1215 tag);
1216 }
1217 }
1218 }
1219 }
1220}
1221
John Zulauf540266b2020-04-06 18:54:53 -06001222template <typename Action>
1223void AccessContext::ApplyGlobalBarriers(const Action &barrier_action) {
1224 // Note: Barriers do *not* cross context boundaries, applying to accessess within.... (at least for renderpass subpasses)
John Zulauf16adfc92020-04-08 10:28:33 -06001225 for (const auto address_type : kAddressTypes) {
1226 UpdateMemoryAccessState(&GetAccessStateMap(address_type), full_range, barrier_action);
John Zulauf540266b2020-04-06 18:54:53 -06001227 }
1228}
1229
1230void AccessContext::ResolveChildContexts(const std::vector<AccessContext> &contexts) {
John Zulauf540266b2020-04-06 18:54:53 -06001231 for (uint32_t subpass_index = 0; subpass_index < contexts.size(); subpass_index++) {
1232 auto &context = contexts[subpass_index];
John Zulauf16adfc92020-04-08 10:28:33 -06001233 for (const auto address_type : kAddressTypes) {
John Zulauf355e49b2020-04-24 15:11:15 -06001234 context.ResolveAccessRange(address_type, full_range, &context.GetDstExternalTrackBack().barrier,
1235 &GetAccessStateMap(address_type), nullptr, false);
John Zulauf540266b2020-04-06 18:54:53 -06001236 }
1237 }
1238}
1239
John Zulauf355e49b2020-04-24 15:11:15 -06001240void AccessContext::ApplyImageBarrier(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
1241 SyncStageAccessFlags src_access_scope, VkPipelineStageFlags dst_exec_scope,
1242 SyncStageAccessFlags dst_access_scope, const VkImageSubresourceRange &subresource_range) {
1243 const ApplyMemoryAccessBarrierFunctor barrier_action(src_exec_scope, src_access_scope, dst_exec_scope, dst_access_scope);
1244 UpdateMemoryAccess(image, subresource_range, barrier_action);
1245}
1246
John Zulauf7635de32020-05-29 17:14:15 -06001247// Note: ImageBarriers do not operate at offset/extent resolution, only at the whole subreources level
John Zulauf355e49b2020-04-24 15:11:15 -06001248void AccessContext::ApplyImageBarrier(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
1249 SyncStageAccessFlags src_access_scope, VkPipelineStageFlags dst_exec_scope,
1250 SyncStageAccessFlags dst_access_scope, const VkImageSubresourceRange &subresource_range,
1251 bool layout_transition, const ResourceUsageTag &tag) {
1252 if (layout_transition) {
1253 UpdateAccessState(image, SYNC_IMAGE_LAYOUT_TRANSITION, subresource_range, VkOffset3D{0, 0, 0}, image.createInfo.extent,
1254 tag);
1255 ApplyImageBarrier(image, src_exec_scope, SYNC_IMAGE_LAYOUT_TRANSITION_BIT, dst_exec_scope, dst_access_scope,
1256 subresource_range);
John Zulaufc9201222020-05-13 15:13:03 -06001257 } else {
1258 ApplyImageBarrier(image, src_exec_scope, src_access_scope, dst_exec_scope, dst_access_scope, subresource_range);
John Zulauf355e49b2020-04-24 15:11:15 -06001259 }
John Zulauf355e49b2020-04-24 15:11:15 -06001260}
1261
John Zulauf7635de32020-05-29 17:14:15 -06001262// Note: ImageBarriers do not operate at offset/extent resolution, only at the whole subreources level
John Zulauf355e49b2020-04-24 15:11:15 -06001263void AccessContext::ApplyImageBarrier(const IMAGE_STATE &image, const SyncBarrier &barrier,
1264 const VkImageSubresourceRange &subresource_range, bool layout_transition,
1265 const ResourceUsageTag &tag) {
1266 ApplyImageBarrier(image, barrier.src_exec_scope, barrier.src_access_scope, barrier.dst_exec_scope, barrier.dst_access_scope,
1267 subresource_range, layout_transition, tag);
1268}
1269
1270// Suitable only for *subpass* access contexts
John Zulauf7635de32020-05-29 17:14:15 -06001271HazardResult AccessContext::DetectSubpassTransitionHazard(const TrackBack &track_back, const IMAGE_VIEW_STATE *attach_view) const {
John Zulauf355e49b2020-04-24 15:11:15 -06001272 if (!attach_view) return HazardResult();
1273 const auto image_state = attach_view->image_state.get();
1274 if (!image_state) return HazardResult();
1275
John Zulauf355e49b2020-04-24 15:11:15 -06001276 // We should never ask for a transition from a context we don't have
John Zulauf7635de32020-05-29 17:14:15 -06001277 assert(track_back.context);
John Zulauf355e49b2020-04-24 15:11:15 -06001278
1279 // Do the detection against the specific prior context independent of other contexts. (Synchronous only)
John Zulauf7635de32020-05-29 17:14:15 -06001280 auto hazard = track_back.context->DetectImageBarrierHazard(*image_state, track_back.barrier.src_exec_scope,
1281 track_back.barrier.src_access_scope,
1282 attach_view->normalized_subresource_range, kDetectPrevious);
John Zulauf355e49b2020-04-24 15:11:15 -06001283 if (!hazard.hazard) {
1284 // The Async hazard check is against the current context's async set.
John Zulauf7635de32020-05-29 17:14:15 -06001285 hazard = DetectImageBarrierHazard(*image_state, track_back.barrier.src_exec_scope, track_back.barrier.src_access_scope,
John Zulauf355e49b2020-04-24 15:11:15 -06001286 attach_view->normalized_subresource_range, kDetectAsync);
1287 }
1288 return hazard;
1289}
1290
1291// Class CommandBufferAccessContext: Keep track of resource access state information for a specific command buffer
1292bool CommandBufferAccessContext::ValidateBeginRenderPass(const RENDER_PASS_STATE &rp_state,
1293
1294 const VkRenderPassBeginInfo *pRenderPassBegin,
1295 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
1296 const char *func_name) const {
1297 // Check if any of the layout transitions are hazardous.... but we don't have the renderpass context to work with, so we
1298 bool skip = false;
1299 uint32_t subpass = 0;
1300 const auto &transitions = rp_state.subpass_transitions[subpass];
1301 if (transitions.size()) {
1302 const std::vector<AccessContext> empty_context_vector;
1303 // Create context we can use to validate against...
1304 AccessContext temp_context(subpass, queue_flags_, rp_state.subpass_dependencies, empty_context_vector,
1305 const_cast<AccessContext *>(&cb_access_context_));
1306
1307 assert(pRenderPassBegin);
1308 if (nullptr == pRenderPassBegin) return skip;
1309
1310 const auto fb_state = sync_state_->Get<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer);
1311 assert(fb_state);
1312 if (nullptr == fb_state) return skip;
1313
1314 // Create a limited array of views (which we'll need to toss
1315 std::vector<const IMAGE_VIEW_STATE *> views;
1316 const auto count_attachment = GetFramebufferAttachments(*pRenderPassBegin, *fb_state);
1317 const auto attachment_count = count_attachment.first;
1318 const auto *attachments = count_attachment.second;
1319 views.resize(attachment_count, nullptr);
1320 for (const auto &transition : transitions) {
1321 assert(transition.attachment < attachment_count);
1322 views[transition.attachment] = sync_state_->Get<IMAGE_VIEW_STATE>(attachments[transition.attachment]);
1323 }
1324
John Zulauf7635de32020-05-29 17:14:15 -06001325 skip |= temp_context.ValidateLayoutTransitions(*sync_state_, rp_state, pRenderPassBegin->renderArea, 0, views, func_name);
1326 skip |= temp_context.ValidateLoadOperation(*sync_state_, rp_state, pRenderPassBegin->renderArea, 0, views, func_name);
John Zulauf355e49b2020-04-24 15:11:15 -06001327 }
1328 return skip;
1329}
1330
locke-lunarg61870c22020-06-09 14:51:50 -06001331bool CommandBufferAccessContext::ValidateDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint,
1332 const char *func_name) const {
1333 bool skip = false;
1334 const PIPELINE_STATE *pPipe = nullptr;
1335 const std::vector<LAST_BOUND_STATE::PER_SET> *per_sets = nullptr;
1336 GetCurrentPipelineAndDesriptorSetsFromCommandBuffer(*cb_state_.get(), pipelineBindPoint, &pPipe, &per_sets);
1337 if (!pPipe || !per_sets) {
1338 return skip;
1339 }
1340
1341 using DescriptorClass = cvdescriptorset::DescriptorClass;
1342 using BufferDescriptor = cvdescriptorset::BufferDescriptor;
1343 using ImageDescriptor = cvdescriptorset::ImageDescriptor;
1344 using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor;
1345 using TexelDescriptor = cvdescriptorset::TexelDescriptor;
1346
1347 for (const auto &stage_state : pPipe->stage_state) {
locke-lunarg37047832020-06-12 13:44:45 -06001348 if (stage_state.stage_flag == VK_SHADER_STAGE_FRAGMENT_BIT && pPipe->graphicsPipelineCI.pRasterizationState &&
locke-lunarge9f1cdf2020-06-12 12:28:57 -06001349 pPipe->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable)
1350 continue;
locke-lunarg61870c22020-06-09 14:51:50 -06001351 for (const auto &set_binding : stage_state.descriptor_uses) {
1352 cvdescriptorset::DescriptorSet *descriptor_set = (*per_sets)[set_binding.first.first].bound_descriptor_set;
1353 cvdescriptorset::DescriptorSetLayout::ConstBindingIterator binding_it(descriptor_set->GetLayout().get(),
1354 set_binding.first.second);
1355 const auto descriptor_type = binding_it.GetType();
1356 cvdescriptorset::IndexRange index_range = binding_it.GetGlobalIndexRange();
1357 auto array_idx = 0;
1358
1359 if (binding_it.IsVariableDescriptorCount()) {
1360 index_range.end = index_range.start + descriptor_set->GetVariableDescriptorCount();
1361 }
1362 SyncStageAccessIndex sync_index =
1363 GetSyncStageAccessIndexsByDescriptorSet(descriptor_type, set_binding.second, stage_state.stage_flag);
1364
1365 for (uint32_t i = index_range.start; i < index_range.end; ++i, ++array_idx) {
1366 uint32_t index = i - index_range.start;
1367 const auto *descriptor = descriptor_set->GetDescriptorFromGlobalIndex(i);
1368 switch (descriptor->GetClass()) {
1369 case DescriptorClass::ImageSampler:
1370 case DescriptorClass::Image: {
1371 const IMAGE_VIEW_STATE *img_view_state = nullptr;
1372 if (descriptor->GetClass() == DescriptorClass::ImageSampler) {
1373 img_view_state = static_cast<const ImageSamplerDescriptor *>(descriptor)->GetImageViewState();
1374 } else {
1375 img_view_state = static_cast<const ImageDescriptor *>(descriptor)->GetImageViewState();
1376 }
1377 if (!img_view_state) continue;
1378 const IMAGE_STATE *img_state = img_view_state->image_state.get();
1379 VkExtent3D extent = {};
1380 VkOffset3D offset = {};
1381 if (sync_index == SYNC_FRAGMENT_SHADER_INPUT_ATTACHMENT_READ) {
1382 extent = CastTo3D(cb_state_->activeRenderPassBeginInfo.renderArea.extent);
1383 offset = CastTo3D(cb_state_->activeRenderPassBeginInfo.renderArea.offset);
1384 } else {
1385 extent = img_state->createInfo.extent;
1386 }
1387 auto hazard = current_context_->DetectHazard(*img_state, sync_index,
1388 img_view_state->normalized_subresource_range, offset, extent);
1389 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06001390 skip |= sync_state_->LogError(
1391 img_view_state->image_view, string_SyncHazardVUID(hazard.hazard),
1392 "%s: Hazard %s for %s in %s, %s, and %s binding #%" PRIu32 " index %" PRIu32 ". Prior access %s.",
1393 func_name, string_SyncHazard(hazard.hazard),
1394 sync_state_->report_data->FormatHandle(img_view_state->image_view).c_str(),
1395 sync_state_->report_data->FormatHandle(cb_state_->commandBuffer).c_str(),
1396 sync_state_->report_data->FormatHandle(pPipe->pipeline).c_str(),
1397 sync_state_->report_data->FormatHandle(descriptor_set->GetSet()).c_str(), set_binding.first.second,
1398 index, string_UsageTag(hazard.tag).c_str());
locke-lunarg61870c22020-06-09 14:51:50 -06001399 }
1400 break;
1401 }
1402 case DescriptorClass::TexelBuffer: {
1403 auto buf_view_state = static_cast<const TexelDescriptor *>(descriptor)->GetBufferViewState();
1404 if (!buf_view_state) continue;
1405 const BUFFER_STATE *buf_state = buf_view_state->buffer_state.get();
1406 ResourceAccessRange range =
1407 MakeRange(buf_view_state->create_info.offset,
1408 GetRealWholeSize(buf_view_state->create_info.offset, buf_view_state->create_info.range,
1409 buf_state->createInfo.size));
1410 auto hazard = current_context_->DetectHazard(*buf_state, sync_index, range);
1411 if (hazard.hazard) {
locke-lunarg88dbb542020-06-23 22:05:42 -06001412 skip |= sync_state_->LogError(
1413 buf_view_state->buffer_view, string_SyncHazardVUID(hazard.hazard),
1414 "%s: Hazard %s for %s in %s, %s, and %s binding #%d index %d. Prior access %s.", func_name,
1415 string_SyncHazard(hazard.hazard),
1416 sync_state_->report_data->FormatHandle(buf_view_state->buffer_view).c_str(),
1417 sync_state_->report_data->FormatHandle(cb_state_->commandBuffer).c_str(),
1418 sync_state_->report_data->FormatHandle(pPipe->pipeline).c_str(),
1419 sync_state_->report_data->FormatHandle(descriptor_set->GetSet()).c_str(), set_binding.first.second,
1420 index, string_UsageTag(hazard.tag).c_str());
locke-lunarg61870c22020-06-09 14:51:50 -06001421 }
1422 break;
1423 }
1424 case DescriptorClass::GeneralBuffer: {
1425 const auto *buffer_descriptor = static_cast<const BufferDescriptor *>(descriptor);
1426 auto buf_state = buffer_descriptor->GetBufferState();
1427 if (!buf_state) continue;
1428 ResourceAccessRange range = MakeRange(buffer_descriptor->GetOffset(), buffer_descriptor->GetRange());
1429 auto hazard = current_context_->DetectHazard(*buf_state, sync_index, range);
1430 if (hazard.hazard) {
locke-lunarg88dbb542020-06-23 22:05:42 -06001431 skip |= sync_state_->LogError(
1432 buf_state->buffer, string_SyncHazardVUID(hazard.hazard),
1433 "%s: Hazard %s for %s in %s, %s, and %s binding #%d index %d. Prior access %s.", func_name,
1434 string_SyncHazard(hazard.hazard), sync_state_->report_data->FormatHandle(buf_state->buffer).c_str(),
1435 sync_state_->report_data->FormatHandle(cb_state_->commandBuffer).c_str(),
1436 sync_state_->report_data->FormatHandle(pPipe->pipeline).c_str(),
1437 sync_state_->report_data->FormatHandle(descriptor_set->GetSet()).c_str(), set_binding.first.second,
1438 index, string_UsageTag(hazard.tag).c_str());
locke-lunarg61870c22020-06-09 14:51:50 -06001439 }
1440 break;
1441 }
1442 // TODO: INLINE_UNIFORM_BLOCK_EXT, ACCELERATION_STRUCTURE_KHR
1443 default:
1444 break;
1445 }
1446 }
1447 }
1448 }
1449 return skip;
1450}
1451
1452void CommandBufferAccessContext::RecordDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint,
1453 const ResourceUsageTag &tag) {
1454 const PIPELINE_STATE *pPipe = nullptr;
1455 const std::vector<LAST_BOUND_STATE::PER_SET> *per_sets = nullptr;
1456 GetCurrentPipelineAndDesriptorSetsFromCommandBuffer(*cb_state_.get(), pipelineBindPoint, &pPipe, &per_sets);
1457 if (!pPipe || !per_sets) {
1458 return;
1459 }
1460
1461 using DescriptorClass = cvdescriptorset::DescriptorClass;
1462 using BufferDescriptor = cvdescriptorset::BufferDescriptor;
1463 using ImageDescriptor = cvdescriptorset::ImageDescriptor;
1464 using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor;
1465 using TexelDescriptor = cvdescriptorset::TexelDescriptor;
1466
1467 for (const auto &stage_state : pPipe->stage_state) {
locke-lunarge9f1cdf2020-06-12 12:28:57 -06001468 if (stage_state.stage_flag == VK_SHADER_STAGE_FRAGMENT_BIT && pPipe->graphicsPipelineCI.pRasterizationState &&
1469 pPipe->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable)
1470 continue;
locke-lunarg61870c22020-06-09 14:51:50 -06001471 for (const auto &set_binding : stage_state.descriptor_uses) {
1472 cvdescriptorset::DescriptorSet *descriptor_set = (*per_sets)[set_binding.first.first].bound_descriptor_set;
1473 cvdescriptorset::DescriptorSetLayout::ConstBindingIterator binding_it(descriptor_set->GetLayout().get(),
1474 set_binding.first.second);
1475 const auto descriptor_type = binding_it.GetType();
1476 cvdescriptorset::IndexRange index_range = binding_it.GetGlobalIndexRange();
1477 auto array_idx = 0;
1478
1479 if (binding_it.IsVariableDescriptorCount()) {
1480 index_range.end = index_range.start + descriptor_set->GetVariableDescriptorCount();
1481 }
1482 SyncStageAccessIndex sync_index =
1483 GetSyncStageAccessIndexsByDescriptorSet(descriptor_type, set_binding.second, stage_state.stage_flag);
1484
1485 for (uint32_t i = index_range.start; i < index_range.end; ++i, ++array_idx) {
1486 const auto *descriptor = descriptor_set->GetDescriptorFromGlobalIndex(i);
1487 switch (descriptor->GetClass()) {
1488 case DescriptorClass::ImageSampler:
1489 case DescriptorClass::Image: {
1490 const IMAGE_VIEW_STATE *img_view_state = nullptr;
1491 if (descriptor->GetClass() == DescriptorClass::ImageSampler) {
1492 img_view_state = static_cast<const ImageSamplerDescriptor *>(descriptor)->GetImageViewState();
1493 } else {
1494 img_view_state = static_cast<const ImageDescriptor *>(descriptor)->GetImageViewState();
1495 }
1496 if (!img_view_state) continue;
1497 const IMAGE_STATE *img_state = img_view_state->image_state.get();
1498 VkExtent3D extent = {};
1499 VkOffset3D offset = {};
1500 if (sync_index == SYNC_FRAGMENT_SHADER_INPUT_ATTACHMENT_READ) {
1501 extent = CastTo3D(cb_state_->activeRenderPassBeginInfo.renderArea.extent);
1502 offset = CastTo3D(cb_state_->activeRenderPassBeginInfo.renderArea.offset);
1503 } else {
1504 extent = img_state->createInfo.extent;
1505 }
1506 current_context_->UpdateAccessState(*img_state, sync_index, img_view_state->normalized_subresource_range,
1507 offset, extent, tag);
1508 break;
1509 }
1510 case DescriptorClass::TexelBuffer: {
1511 auto buf_view_state = static_cast<const TexelDescriptor *>(descriptor)->GetBufferViewState();
1512 if (!buf_view_state) continue;
1513 const BUFFER_STATE *buf_state = buf_view_state->buffer_state.get();
1514 ResourceAccessRange range =
1515 MakeRange(buf_view_state->create_info.offset, buf_view_state->create_info.range);
1516 current_context_->UpdateAccessState(*buf_state, sync_index, range, tag);
1517 break;
1518 }
1519 case DescriptorClass::GeneralBuffer: {
1520 const auto *buffer_descriptor = static_cast<const BufferDescriptor *>(descriptor);
1521 auto buf_state = buffer_descriptor->GetBufferState();
1522 if (!buf_state) continue;
1523 ResourceAccessRange range = MakeRange(buffer_descriptor->GetOffset(), buffer_descriptor->GetRange());
1524 current_context_->UpdateAccessState(*buf_state, sync_index, range, tag);
1525 break;
1526 }
1527 // TODO: INLINE_UNIFORM_BLOCK_EXT, ACCELERATION_STRUCTURE_KHR
1528 default:
1529 break;
1530 }
1531 }
1532 }
1533 }
1534}
1535
1536bool CommandBufferAccessContext::ValidateDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const char *func_name) const {
1537 bool skip = false;
1538 const auto *pPipe = GetCurrentPipelineFromCommandBuffer(*cb_state_.get(), VK_PIPELINE_BIND_POINT_GRAPHICS);
1539 if (!pPipe) {
1540 return skip;
1541 }
1542
1543 const auto &binding_buffers = cb_state_->current_vertex_buffer_binding_info.vertex_buffer_bindings;
1544 const auto &binding_buffers_size = binding_buffers.size();
1545 const auto &binding_descriptions_size = pPipe->vertex_binding_descriptions_.size();
1546
1547 for (size_t i = 0; i < binding_descriptions_size; ++i) {
1548 const auto &binding_description = pPipe->vertex_binding_descriptions_[i];
1549 if (binding_description.binding < binding_buffers_size) {
1550 const auto &binding_buffer = binding_buffers[binding_description.binding];
1551 if (binding_buffer.buffer == VK_NULL_HANDLE) continue;
1552
1553 auto *buf_state = sync_state_->Get<BUFFER_STATE>(binding_buffer.buffer);
1554 VkDeviceSize range_start = 0;
1555 VkDeviceSize range_size = 0;
1556 GetBufferRange(range_start, range_size, binding_buffer.offset, buf_state->createInfo.size, firstVertex, vertexCount,
1557 binding_description.stride);
1558 ResourceAccessRange range = MakeRange(range_start, range_size);
1559 auto hazard = current_context_->DetectHazard(*buf_state, SYNC_VERTEX_INPUT_VERTEX_ATTRIBUTE_READ, range);
1560 if (hazard.hazard) {
locke-lunarg88dbb542020-06-23 22:05:42 -06001561 skip |= sync_state_->LogError(
1562 buf_state->buffer, string_SyncHazardVUID(hazard.hazard), "%s: Hazard %s for vertex %s in %s. Prior access %s.",
1563 func_name, string_SyncHazard(hazard.hazard), sync_state_->report_data->FormatHandle(buf_state->buffer).c_str(),
1564 sync_state_->report_data->FormatHandle(cb_state_->commandBuffer).c_str(), string_UsageTag(hazard.tag).c_str());
locke-lunarg61870c22020-06-09 14:51:50 -06001565 }
1566 }
1567 }
1568 return skip;
1569}
1570
1571void CommandBufferAccessContext::RecordDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const ResourceUsageTag &tag) {
1572 const auto *pPipe = GetCurrentPipelineFromCommandBuffer(*cb_state_.get(), VK_PIPELINE_BIND_POINT_GRAPHICS);
1573 if (!pPipe) {
1574 return;
1575 }
1576 const auto &binding_buffers = cb_state_->current_vertex_buffer_binding_info.vertex_buffer_bindings;
1577 const auto &binding_buffers_size = binding_buffers.size();
1578 const auto &binding_descriptions_size = pPipe->vertex_binding_descriptions_.size();
1579
1580 for (size_t i = 0; i < binding_descriptions_size; ++i) {
1581 const auto &binding_description = pPipe->vertex_binding_descriptions_[i];
1582 if (binding_description.binding < binding_buffers_size) {
1583 const auto &binding_buffer = binding_buffers[binding_description.binding];
1584 if (binding_buffer.buffer == VK_NULL_HANDLE) continue;
1585
1586 auto *buf_state = sync_state_->Get<BUFFER_STATE>(binding_buffer.buffer);
1587 VkDeviceSize range_start = 0;
1588 VkDeviceSize range_size = 0;
1589 GetBufferRange(range_start, range_size, binding_buffer.offset, buf_state->createInfo.size, firstVertex, vertexCount,
1590 binding_description.stride);
1591 ResourceAccessRange range = MakeRange(range_start, range_size);
1592 current_context_->UpdateAccessState(*buf_state, SYNC_VERTEX_INPUT_VERTEX_ATTRIBUTE_READ, range, tag);
1593 }
1594 }
1595}
1596
1597bool CommandBufferAccessContext::ValidateDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const char *func_name) const {
1598 bool skip = false;
1599 if (cb_state_->index_buffer_binding.buffer == VK_NULL_HANDLE) return skip;
1600
1601 auto *index_buf_state = sync_state_->Get<BUFFER_STATE>(cb_state_->index_buffer_binding.buffer);
1602 const auto index_size = GetIndexAlignment(cb_state_->index_buffer_binding.index_type);
1603 VkDeviceSize range_start = 0;
1604 VkDeviceSize range_size = 0;
1605 GetBufferRange(range_start, range_size, cb_state_->index_buffer_binding.offset, index_buf_state->createInfo.size, firstIndex,
1606 indexCount, index_size);
1607 ResourceAccessRange range = MakeRange(range_start, range_size);
1608 auto hazard = current_context_->DetectHazard(*index_buf_state, SYNC_VERTEX_INPUT_INDEX_READ, range);
1609 if (hazard.hazard) {
locke-lunarg88dbb542020-06-23 22:05:42 -06001610 skip |= sync_state_->LogError(
1611 index_buf_state->buffer, string_SyncHazardVUID(hazard.hazard), "%s: Hazard %s for index %s in %s. Prior access %s.",
1612 func_name, string_SyncHazard(hazard.hazard), sync_state_->report_data->FormatHandle(index_buf_state->buffer).c_str(),
1613 sync_state_->report_data->FormatHandle(cb_state_->commandBuffer).c_str(), string_UsageTag(hazard.tag).c_str());
locke-lunarg61870c22020-06-09 14:51:50 -06001614 }
1615
1616 // TODO: For now, we detect the whole vertex buffer. Index buffer could be changed until SubmitQueue.
1617 // We will detect more accurate range in the future.
1618 skip |= ValidateDrawVertex(UINT32_MAX, 0, func_name);
1619 return skip;
1620}
1621
1622void CommandBufferAccessContext::RecordDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const ResourceUsageTag &tag) {
1623 if (cb_state_->index_buffer_binding.buffer == VK_NULL_HANDLE) return;
1624
1625 auto *index_buf_state = sync_state_->Get<BUFFER_STATE>(cb_state_->index_buffer_binding.buffer);
1626 const auto index_size = GetIndexAlignment(cb_state_->index_buffer_binding.index_type);
1627 VkDeviceSize range_start = 0;
1628 VkDeviceSize range_size = 0;
1629 GetBufferRange(range_start, range_size, cb_state_->index_buffer_binding.offset, index_buf_state->createInfo.size, firstIndex,
1630 indexCount, index_size);
1631 ResourceAccessRange range = MakeRange(range_start, range_size);
1632 current_context_->UpdateAccessState(*index_buf_state, SYNC_VERTEX_INPUT_INDEX_READ, range, tag);
1633
1634 // TODO: For now, we detect the whole vertex buffer. Index buffer could be changed until SubmitQueue.
1635 // We will detect more accurate range in the future.
1636 RecordDrawVertex(UINT32_MAX, 0, tag);
1637}
1638
1639bool CommandBufferAccessContext::ValidateDrawSubpassAttachment(const char *func_name) const {
locke-lunarg7077d502020-06-18 21:37:26 -06001640 bool skip = false;
1641 if (!current_renderpass_context_) return skip;
1642 skip |= current_renderpass_context_->ValidateDrawSubpassAttachment(*sync_state_, *cb_state_.get(),
1643 cb_state_->activeRenderPassBeginInfo.renderArea, func_name);
1644 return skip;
locke-lunarg61870c22020-06-09 14:51:50 -06001645}
1646
1647void CommandBufferAccessContext::RecordDrawSubpassAttachment(const ResourceUsageTag &tag) {
locke-lunarg7077d502020-06-18 21:37:26 -06001648 if (current_renderpass_context_)
1649 current_renderpass_context_->RecordDrawSubpassAttachment(*cb_state_.get(), cb_state_->activeRenderPassBeginInfo.renderArea,
1650 tag);
locke-lunarg61870c22020-06-09 14:51:50 -06001651}
1652
John Zulauf355e49b2020-04-24 15:11:15 -06001653bool CommandBufferAccessContext::ValidateNextSubpass(const char *func_name) const {
John Zulauf355e49b2020-04-24 15:11:15 -06001654 bool skip = false;
locke-lunarg7077d502020-06-18 21:37:26 -06001655 if (!current_renderpass_context_) return skip;
John Zulauf1507ee42020-05-18 11:33:09 -06001656 skip |=
1657 current_renderpass_context_->ValidateNextSubpass(*sync_state_, cb_state_->activeRenderPassBeginInfo.renderArea, func_name);
John Zulauf355e49b2020-04-24 15:11:15 -06001658
1659 return skip;
1660}
1661
1662bool CommandBufferAccessContext::ValidateEndRenderpass(const char *func_name) const {
1663 // TODO: Things to add here.
John Zulauf7635de32020-05-29 17:14:15 -06001664 // Validate Preserve attachments
John Zulauf355e49b2020-04-24 15:11:15 -06001665 bool skip = false;
locke-lunarg7077d502020-06-18 21:37:26 -06001666 if (!current_renderpass_context_) return skip;
John Zulauf7635de32020-05-29 17:14:15 -06001667 skip |= current_renderpass_context_->ValidateEndRenderPass(*sync_state_, cb_state_->activeRenderPassBeginInfo.renderArea,
1668 func_name);
John Zulauf355e49b2020-04-24 15:11:15 -06001669
1670 return skip;
1671}
1672
1673void CommandBufferAccessContext::RecordBeginRenderPass(const ResourceUsageTag &tag) {
1674 assert(sync_state_);
1675 if (!cb_state_) return;
1676
1677 // Create an access context the current renderpass.
1678 render_pass_contexts_.emplace_back(&cb_access_context_);
John Zulauf16adfc92020-04-08 10:28:33 -06001679 current_renderpass_context_ = &render_pass_contexts_.back();
John Zulauf355e49b2020-04-24 15:11:15 -06001680 current_renderpass_context_->RecordBeginRenderPass(*sync_state_, *cb_state_, queue_flags_, tag);
John Zulauf16adfc92020-04-08 10:28:33 -06001681 current_context_ = &current_renderpass_context_->CurrentContext();
John Zulauf16adfc92020-04-08 10:28:33 -06001682}
1683
John Zulauf355e49b2020-04-24 15:11:15 -06001684void CommandBufferAccessContext::RecordNextSubpass(const RENDER_PASS_STATE &rp_state, const ResourceUsageTag &tag) {
John Zulauf16adfc92020-04-08 10:28:33 -06001685 assert(current_renderpass_context_);
John Zulauf1507ee42020-05-18 11:33:09 -06001686 current_renderpass_context_->RecordNextSubpass(cb_state_->activeRenderPassBeginInfo.renderArea, tag);
John Zulauf16adfc92020-04-08 10:28:33 -06001687 current_context_ = &current_renderpass_context_->CurrentContext();
1688}
1689
John Zulauf355e49b2020-04-24 15:11:15 -06001690void CommandBufferAccessContext::RecordEndRenderPass(const RENDER_PASS_STATE &render_pass, const ResourceUsageTag &tag) {
John Zulauf16adfc92020-04-08 10:28:33 -06001691 assert(current_renderpass_context_);
1692 if (!current_renderpass_context_) return;
1693
John Zulauf7635de32020-05-29 17:14:15 -06001694 current_renderpass_context_->RecordEndRenderPass(cb_state_->activeRenderPassBeginInfo.renderArea, tag);
John Zulauf355e49b2020-04-24 15:11:15 -06001695 current_context_ = &cb_access_context_;
John Zulauf16adfc92020-04-08 10:28:33 -06001696 current_renderpass_context_ = nullptr;
1697}
1698
locke-lunarg61870c22020-06-09 14:51:50 -06001699bool RenderPassAccessContext::ValidateDrawSubpassAttachment(const SyncValidator &sync_state, const CMD_BUFFER_STATE &cmd,
1700 const VkRect2D &render_area, const char *func_name) const {
1701 bool skip = false;
locke-lunarg96dc9632020-06-10 17:22:18 -06001702 const auto *pPipe = GetCurrentPipelineFromCommandBuffer(cmd, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunarge9f1cdf2020-06-12 12:28:57 -06001703 if (!pPipe ||
1704 (pPipe->graphicsPipelineCI.pRasterizationState && pPipe->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable)) {
locke-lunarg96dc9632020-06-10 17:22:18 -06001705 return skip;
1706 }
1707 const auto &list = pPipe->fragmentShader_writable_output_location_list;
locke-lunarg61870c22020-06-09 14:51:50 -06001708 const auto &subpass = rp_state_->createInfo.pSubpasses[current_subpass_];
1709 VkExtent3D extent = CastTo3D(render_area.extent);
1710 VkOffset3D offset = CastTo3D(render_area.offset);
locke-lunarg37047832020-06-12 13:44:45 -06001711
locke-lunarg44f9bb12020-06-10 14:43:57 -06001712 // Subpass's inputAttachment has been done in ValidateDispatchDrawDescriptorSet
locke-lunarg96dc9632020-06-10 17:22:18 -06001713 if (subpass.pColorAttachments && subpass.colorAttachmentCount && !list.empty()) {
1714 for (const auto location : list) {
1715 if (location >= subpass.colorAttachmentCount || subpass.pColorAttachments[location].attachment == VK_ATTACHMENT_UNUSED)
1716 continue;
1717 const IMAGE_VIEW_STATE *img_view_state = attachment_views_[subpass.pColorAttachments[location].attachment];
1718 HazardResult hazard = external_context_->DetectHazard(
1719 img_view_state, SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, kColorAttachmentRasterOrder, offset, extent);
1720 if (hazard.hazard) {
locke-lunarg88dbb542020-06-23 22:05:42 -06001721 skip |= sync_state.LogError(img_view_state->image_view, string_SyncHazardVUID(hazard.hazard),
1722 "%s: Hazard %s for %s in %s, Subpass #%d, and pColorAttachments #%d. Prior access %s.",
1723 func_name, string_SyncHazard(hazard.hazard),
1724 sync_state.report_data->FormatHandle(img_view_state->image_view).c_str(),
1725 sync_state.report_data->FormatHandle(cmd.commandBuffer).c_str(), cmd.activeSubpass,
1726 location, string_UsageTag(hazard.tag).c_str());
locke-lunarg61870c22020-06-09 14:51:50 -06001727 }
1728 }
1729 }
locke-lunarg37047832020-06-12 13:44:45 -06001730
1731 // PHASE1 TODO: Add layout based read/vs. write selection.
1732 // PHASE1 TODO: Read operations for both depth and stencil are possible in the future.
1733 if (pPipe->graphicsPipelineCI.pDepthStencilState && subpass.pDepthStencilAttachment &&
1734 subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
locke-lunarg61870c22020-06-09 14:51:50 -06001735 const IMAGE_VIEW_STATE *img_view_state = attachment_views_[subpass.pDepthStencilAttachment->attachment];
locke-lunarg37047832020-06-12 13:44:45 -06001736 bool depth_write = false, stencil_write = false;
1737
1738 // PHASE1 TODO: These validation should be in core_checks.
1739 if (!FormatIsStencilOnly(img_view_state->create_info.format) &&
1740 pPipe->graphicsPipelineCI.pDepthStencilState->depthTestEnable &&
1741 pPipe->graphicsPipelineCI.pDepthStencilState->depthWriteEnable &&
1742 IsImageLayoutDepthWritable(subpass.pDepthStencilAttachment->layout)) {
1743 depth_write = true;
1744 }
1745 // PHASE1 TODO: It needs to check if stencil is writable.
1746 // If failOp, passOp, or depthFailOp are not KEEP, and writeMask isn't 0, it's writable.
1747 // If depth test is disable, it's considered depth test passes, and then depthFailOp doesn't run.
1748 // PHASE1 TODO: These validation should be in core_checks.
1749 if (!FormatIsDepthOnly(img_view_state->create_info.format) &&
1750 pPipe->graphicsPipelineCI.pDepthStencilState->stencilTestEnable &&
1751 IsImageLayoutStencilWritable(subpass.pDepthStencilAttachment->layout)) {
1752 stencil_write = true;
1753 }
1754
1755 // PHASE1 TODO: Add EARLY stage detection based on ExecutionMode.
1756 if (depth_write) {
1757 HazardResult hazard =
1758 external_context_->DetectHazard(img_view_state, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE,
1759 kDepthStencilAttachmentRasterOrder, offset, extent, VK_IMAGE_ASPECT_DEPTH_BIT);
1760 if (hazard.hazard) {
locke-lunarg88dbb542020-06-23 22:05:42 -06001761 skip |= sync_state.LogError(
1762 img_view_state->image_view, string_SyncHazardVUID(hazard.hazard),
1763 "%s: Hazard %s for %s in %s, Subpass #%d, and depth part of pDepthStencilAttachment. Prior access %s.",
1764 func_name, string_SyncHazard(hazard.hazard),
1765 sync_state.report_data->FormatHandle(img_view_state->image_view).c_str(),
1766 sync_state.report_data->FormatHandle(cmd.commandBuffer).c_str(), cmd.activeSubpass,
1767 string_UsageTag(hazard.tag).c_str());
locke-lunarg37047832020-06-12 13:44:45 -06001768 }
1769 }
1770 if (stencil_write) {
1771 HazardResult hazard =
1772 external_context_->DetectHazard(img_view_state, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE,
1773 kDepthStencilAttachmentRasterOrder, offset, extent, VK_IMAGE_ASPECT_STENCIL_BIT);
1774 if (hazard.hazard) {
locke-lunarg88dbb542020-06-23 22:05:42 -06001775 skip |= sync_state.LogError(
1776 img_view_state->image_view, string_SyncHazardVUID(hazard.hazard),
1777 "%s: Hazard %s for %s in %s, Subpass #%d, and stencil part of pDepthStencilAttachment. Prior access %s.",
1778 func_name, string_SyncHazard(hazard.hazard),
1779 sync_state.report_data->FormatHandle(img_view_state->image_view).c_str(),
1780 sync_state.report_data->FormatHandle(cmd.commandBuffer).c_str(), cmd.activeSubpass,
1781 string_UsageTag(hazard.tag).c_str());
locke-lunarg37047832020-06-12 13:44:45 -06001782 }
locke-lunarg61870c22020-06-09 14:51:50 -06001783 }
1784 }
1785 return skip;
1786}
1787
locke-lunarg96dc9632020-06-10 17:22:18 -06001788void RenderPassAccessContext::RecordDrawSubpassAttachment(const CMD_BUFFER_STATE &cmd, const VkRect2D &render_area,
1789 const ResourceUsageTag &tag) {
1790 const auto *pPipe = GetCurrentPipelineFromCommandBuffer(cmd, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunarge9f1cdf2020-06-12 12:28:57 -06001791 if (!pPipe ||
1792 (pPipe->graphicsPipelineCI.pRasterizationState && pPipe->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable)) {
locke-lunarg96dc9632020-06-10 17:22:18 -06001793 return;
1794 }
1795 const auto &list = pPipe->fragmentShader_writable_output_location_list;
locke-lunarg61870c22020-06-09 14:51:50 -06001796 const auto &subpass = rp_state_->createInfo.pSubpasses[current_subpass_];
1797 VkExtent3D extent = CastTo3D(render_area.extent);
1798 VkOffset3D offset = CastTo3D(render_area.offset);
1799
locke-lunarg44f9bb12020-06-10 14:43:57 -06001800 // Subpass's inputAttachment has been done in RecordDispatchDrawDescriptorSet
locke-lunarg96dc9632020-06-10 17:22:18 -06001801 if (subpass.pColorAttachments && subpass.colorAttachmentCount && !list.empty()) {
1802 for (const auto location : list) {
1803 if (location >= subpass.colorAttachmentCount || subpass.pColorAttachments[location].attachment == VK_ATTACHMENT_UNUSED)
1804 continue;
1805 const IMAGE_VIEW_STATE *img_view_state = attachment_views_[subpass.pColorAttachments[location].attachment];
1806 external_context_->UpdateAccessState(img_view_state, SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, offset,
1807 extent, 0, tag);
locke-lunarg61870c22020-06-09 14:51:50 -06001808 }
1809 }
locke-lunarg37047832020-06-12 13:44:45 -06001810
1811 // PHASE1 TODO: Add layout based read/vs. write selection.
1812 // PHASE1 TODO: Read operations for both depth and stencil are possible in the future.
1813 if (pPipe->graphicsPipelineCI.pDepthStencilState && subpass.pDepthStencilAttachment &&
1814 subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
locke-lunarg61870c22020-06-09 14:51:50 -06001815 const IMAGE_VIEW_STATE *img_view_state = attachment_views_[subpass.pDepthStencilAttachment->attachment];
locke-lunarg37047832020-06-12 13:44:45 -06001816 bool depth_write = false, stencil_write = false;
1817
1818 // PHASE1 TODO: These validation should be in core_checks.
1819 if (!FormatIsStencilOnly(img_view_state->create_info.format) &&
1820 pPipe->graphicsPipelineCI.pDepthStencilState->depthTestEnable &&
1821 pPipe->graphicsPipelineCI.pDepthStencilState->depthWriteEnable &&
1822 IsImageLayoutDepthWritable(subpass.pDepthStencilAttachment->layout)) {
1823 depth_write = true;
1824 }
1825 // PHASE1 TODO: It needs to check if stencil is writable.
1826 // If failOp, passOp, or depthFailOp are not KEEP, and writeMask isn't 0, it's writable.
1827 // If depth test is disable, it's considered depth test passes, and then depthFailOp doesn't run.
1828 // PHASE1 TODO: These validation should be in core_checks.
1829 if (!FormatIsDepthOnly(img_view_state->create_info.format) &&
1830 pPipe->graphicsPipelineCI.pDepthStencilState->stencilTestEnable &&
1831 IsImageLayoutStencilWritable(subpass.pDepthStencilAttachment->layout)) {
1832 stencil_write = true;
1833 }
1834
1835 // PHASE1 TODO: Add EARLY stage detection based on ExecutionMode.
1836 if (depth_write) {
1837 external_context_->UpdateAccessState(img_view_state, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, offset,
1838 extent, VK_IMAGE_ASPECT_DEPTH_BIT, tag);
1839 }
1840 if (stencil_write) {
1841 external_context_->UpdateAccessState(img_view_state, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, offset,
1842 extent, VK_IMAGE_ASPECT_STENCIL_BIT, tag);
1843 }
locke-lunarg61870c22020-06-09 14:51:50 -06001844 }
1845}
1846
John Zulauf1507ee42020-05-18 11:33:09 -06001847bool RenderPassAccessContext::ValidateNextSubpass(const SyncValidator &sync_state, const VkRect2D &render_area,
1848 const char *func_name) const {
John Zulaufaff20662020-06-01 14:07:58 -06001849 // PHASE1 TODO: Add Validate Preserve attachments
John Zulauf355e49b2020-04-24 15:11:15 -06001850 bool skip = false;
John Zulaufb027cdb2020-05-21 14:25:22 -06001851 skip |= CurrentContext().ValidateResolveOperations(sync_state, *rp_state_, render_area, attachment_views_, func_name,
1852 current_subpass_);
John Zulaufaff20662020-06-01 14:07:58 -06001853 skip |= CurrentContext().ValidateStoreOperation(sync_state, *rp_state_, render_area, current_subpass_, attachment_views_,
1854 func_name);
1855
John Zulauf355e49b2020-04-24 15:11:15 -06001856 const auto next_subpass = current_subpass_ + 1;
John Zulauf1507ee42020-05-18 11:33:09 -06001857 const auto &next_context = subpass_contexts_[next_subpass];
John Zulauf7635de32020-05-29 17:14:15 -06001858 skip |= next_context.ValidateLayoutTransitions(sync_state, *rp_state_, render_area, next_subpass, attachment_views_, func_name);
1859 skip |= next_context.ValidateLoadOperation(sync_state, *rp_state_, render_area, next_subpass, attachment_views_, func_name);
1860 return skip;
1861}
1862bool RenderPassAccessContext::ValidateEndRenderPass(const SyncValidator &sync_state, const VkRect2D &render_area,
1863 const char *func_name) const {
John Zulaufaff20662020-06-01 14:07:58 -06001864 // PHASE1 TODO: Validate Preserve
John Zulauf7635de32020-05-29 17:14:15 -06001865 bool skip = false;
1866 skip |= CurrentContext().ValidateResolveOperations(sync_state, *rp_state_, render_area, attachment_views_, func_name,
1867 current_subpass_);
John Zulaufaff20662020-06-01 14:07:58 -06001868 skip |= CurrentContext().ValidateStoreOperation(sync_state, *rp_state_, render_area, current_subpass_, attachment_views_,
1869 func_name);
John Zulauf7635de32020-05-29 17:14:15 -06001870 skip |= ValidateFinalSubpassLayoutTransitions(sync_state, render_area, func_name);
John Zulauf355e49b2020-04-24 15:11:15 -06001871 return skip;
1872}
1873
John Zulauf7635de32020-05-29 17:14:15 -06001874AccessContext *RenderPassAccessContext::CreateStoreResolveProxy(const VkRect2D &render_area) const {
1875 return CreateStoreResolveProxyContext(CurrentContext(), *rp_state_, current_subpass_, render_area, attachment_views_);
1876}
1877
1878bool RenderPassAccessContext::ValidateFinalSubpassLayoutTransitions(const SyncValidator &sync_state, const VkRect2D &render_area,
1879 const char *func_name) const {
John Zulauf355e49b2020-04-24 15:11:15 -06001880 bool skip = false;
1881
John Zulauf7635de32020-05-29 17:14:15 -06001882 // As validation methods are const and precede the record/update phase, for any tranistions from the current (last)
1883 // subpass, we have to validate them against a copy of the current AccessContext, with resolve operations applied.
1884 // Note: we could be more efficient by tracking whether or not we actually *have* any changes (e.g. attachment resolve)
1885 // to apply and only copy then, if this proves a hot spot.
1886 std::unique_ptr<AccessContext> proxy_for_current;
1887
John Zulauf355e49b2020-04-24 15:11:15 -06001888 // Validate the "finalLayout" transitions to external
1889 // Get them from where there we're hidding in the extra entry.
1890 const auto &final_transitions = rp_state_->subpass_transitions.back();
1891 for (const auto &transition : final_transitions) {
1892 const auto &attach_view = attachment_views_[transition.attachment];
1893 const auto &trackback = subpass_contexts_[transition.prev_pass].GetDstExternalTrackBack();
1894 assert(trackback.context); // Transitions are given implicit transitions if the StateTracker is working correctly
John Zulauf7635de32020-05-29 17:14:15 -06001895 auto *context = trackback.context;
1896
1897 if (transition.prev_pass == current_subpass_) {
1898 if (!proxy_for_current) {
1899 // We haven't recorded resolve ofor the current_subpass, so we need to copy current and update it *as if*
1900 proxy_for_current.reset(CreateStoreResolveProxy(render_area));
1901 }
1902 context = proxy_for_current.get();
1903 }
1904
1905 auto hazard = context->DetectImageBarrierHazard(
John Zulauf355e49b2020-04-24 15:11:15 -06001906 *attach_view->image_state, trackback.barrier.src_exec_scope, trackback.barrier.src_access_scope,
1907 attach_view->normalized_subresource_range, AccessContext::DetectOptions::kDetectPrevious);
1908 if (hazard.hazard) {
1909 skip |= sync_state.LogError(rp_state_->renderPass, string_SyncHazardVUID(hazard.hazard),
1910 "%s: Hazard %s with last use subpass %" PRIu32 " for attachment %" PRIu32
John Zulauf1dae9192020-06-16 15:46:44 -06001911 " final image layout transition. Prior access %s.",
1912 func_name, string_SyncHazard(hazard.hazard), transition.prev_pass, transition.attachment,
1913 string_UsageTag(hazard.tag).c_str());
John Zulauf355e49b2020-04-24 15:11:15 -06001914 }
1915 }
1916 return skip;
1917}
1918
1919void RenderPassAccessContext::RecordLayoutTransitions(const ResourceUsageTag &tag) {
1920 // Add layout transitions...
1921 const auto &transitions = rp_state_->subpass_transitions[current_subpass_];
1922 auto &subpass_context = subpass_contexts_[current_subpass_];
John Zulaufc9201222020-05-13 15:13:03 -06001923 std::set<const IMAGE_VIEW_STATE *> view_seen;
John Zulauf355e49b2020-04-24 15:11:15 -06001924 for (const auto &transition : transitions) {
1925 const auto attachment_view = attachment_views_[transition.attachment];
1926 if (!attachment_view) continue;
1927 const auto image = attachment_view->image_state.get();
1928 if (!image) continue;
1929
1930 const auto *barrier = subpass_context.GetTrackBackFromSubpass(transition.prev_pass);
John Zulaufc9201222020-05-13 15:13:03 -06001931 auto insert_pair = view_seen.insert(attachment_view);
1932 if (insert_pair.second) {
1933 // We haven't recorded the transistion yet, so treat this as a normal barrier with transistion.
1934 subpass_context.ApplyImageBarrier(*image, barrier->barrier, attachment_view->normalized_subresource_range, true, tag);
1935
1936 } else {
1937 // We've recorded the transition, but we need to added on the additional dest barriers, and rerecording the transition
1938 // would clear out the prior barrier flags, so apply this as a *non* transition barrier
1939 auto barrier_to_transition = barrier->barrier;
1940 barrier_to_transition.src_access_scope |= SYNC_IMAGE_LAYOUT_TRANSITION_BIT;
1941 subpass_context.ApplyImageBarrier(*image, barrier->barrier, attachment_view->normalized_subresource_range, false, tag);
1942 }
John Zulauf355e49b2020-04-24 15:11:15 -06001943 }
1944}
1945
John Zulauf1507ee42020-05-18 11:33:09 -06001946void RenderPassAccessContext::RecordLoadOperations(const VkRect2D &render_area, const ResourceUsageTag &tag) {
1947 const auto *attachment_ci = rp_state_->createInfo.pAttachments;
1948 auto &subpass_context = subpass_contexts_[current_subpass_];
1949 VkExtent3D extent = CastTo3D(render_area.extent);
1950 VkOffset3D offset = CastTo3D(render_area.offset);
1951
1952 for (uint32_t i = 0; i < rp_state_->createInfo.attachmentCount; i++) {
1953 if (rp_state_->attachment_first_subpass[i] == current_subpass_) {
1954 if (attachment_views_[i] == nullptr) continue; // UNUSED
1955 const auto &view = *attachment_views_[i];
1956 const IMAGE_STATE *image = view.image_state.get();
1957 if (image == nullptr) continue;
1958
1959 const auto &ci = attachment_ci[i];
1960 const bool has_depth = FormatHasDepth(ci.format);
John Zulaufb027cdb2020-05-21 14:25:22 -06001961 const bool has_stencil = FormatHasStencil(ci.format);
John Zulauf1507ee42020-05-18 11:33:09 -06001962 const bool is_color = !(has_depth || has_stencil);
1963
1964 if (is_color) {
1965 subpass_context.UpdateAccessState(*image, ColorLoadUsage(ci.loadOp), view.normalized_subresource_range, offset,
1966 extent, tag);
1967 } else {
1968 auto update_range = view.normalized_subresource_range;
1969 if (has_depth) {
1970 update_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
1971 subpass_context.UpdateAccessState(*image, DepthStencilLoadUsage(ci.loadOp), update_range, offset, extent, tag);
1972 }
1973 if (has_stencil) {
1974 update_range.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
1975 subpass_context.UpdateAccessState(*image, DepthStencilLoadUsage(ci.stencilLoadOp), update_range, offset, extent,
1976 tag);
1977 }
1978 }
1979 }
1980 }
1981}
1982
John Zulauf355e49b2020-04-24 15:11:15 -06001983void RenderPassAccessContext::RecordBeginRenderPass(const SyncValidator &state, const CMD_BUFFER_STATE &cb_state,
1984 VkQueueFlags queue_flags, const ResourceUsageTag &tag) {
1985 current_subpass_ = 0;
locke-lunargaecf2152020-05-12 17:15:41 -06001986 rp_state_ = cb_state.activeRenderPass.get();
John Zulauf355e49b2020-04-24 15:11:15 -06001987 subpass_contexts_.reserve(rp_state_->createInfo.subpassCount);
1988 // Add this for all subpasses here so that they exsist during next subpass validation
1989 for (uint32_t pass = 0; pass < rp_state_->createInfo.subpassCount; pass++) {
1990 subpass_contexts_.emplace_back(pass, queue_flags, rp_state_->subpass_dependencies, subpass_contexts_, external_context_);
1991 }
1992 attachment_views_ = state.GetCurrentAttachmentViews(cb_state);
1993
1994 RecordLayoutTransitions(tag);
John Zulauf1507ee42020-05-18 11:33:09 -06001995 RecordLoadOperations(cb_state.activeRenderPassBeginInfo.renderArea, tag);
John Zulauf355e49b2020-04-24 15:11:15 -06001996}
John Zulauf1507ee42020-05-18 11:33:09 -06001997
1998void RenderPassAccessContext::RecordNextSubpass(const VkRect2D &render_area, const ResourceUsageTag &tag) {
John Zulauf7635de32020-05-29 17:14:15 -06001999 // Resolves are against *prior* subpass context and thus *before* the subpass increment
2000 CurrentContext().UpdateAttachmentResolveAccess(*rp_state_, render_area, attachment_views_, current_subpass_, tag);
John Zulaufaff20662020-06-01 14:07:58 -06002001 CurrentContext().UpdateAttachmentStoreAccess(*rp_state_, render_area, attachment_views_, current_subpass_, tag);
John Zulauf7635de32020-05-29 17:14:15 -06002002
John Zulauf355e49b2020-04-24 15:11:15 -06002003 current_subpass_++;
2004 assert(current_subpass_ < subpass_contexts_.size());
2005 RecordLayoutTransitions(tag);
John Zulauf1507ee42020-05-18 11:33:09 -06002006 RecordLoadOperations(render_area, tag);
John Zulauf355e49b2020-04-24 15:11:15 -06002007}
2008
John Zulauf7635de32020-05-29 17:14:15 -06002009void RenderPassAccessContext::RecordEndRenderPass(const VkRect2D &render_area, const ResourceUsageTag &tag) {
John Zulaufaff20662020-06-01 14:07:58 -06002010 // Add the resolve and store accesses
John Zulauf7635de32020-05-29 17:14:15 -06002011 CurrentContext().UpdateAttachmentResolveAccess(*rp_state_, render_area, attachment_views_, current_subpass_, tag);
John Zulaufaff20662020-06-01 14:07:58 -06002012 CurrentContext().UpdateAttachmentStoreAccess(*rp_state_, render_area, attachment_views_, current_subpass_, tag);
John Zulauf7635de32020-05-29 17:14:15 -06002013
John Zulauf355e49b2020-04-24 15:11:15 -06002014 // Export the accesses from the renderpass...
2015 external_context_->ResolveChildContexts(subpass_contexts_);
2016
2017 // Add the "finalLayout" transitions to external
2018 // Get them from where there we're hidding in the extra entry.
2019 const auto &final_transitions = rp_state_->subpass_transitions.back();
2020 for (const auto &transition : final_transitions) {
2021 const auto &attachment = attachment_views_[transition.attachment];
2022 const auto &last_trackback = subpass_contexts_[transition.prev_pass].GetDstExternalTrackBack();
2023 assert(external_context_ == last_trackback.context);
2024 external_context_->ApplyImageBarrier(*attachment->image_state, last_trackback.barrier,
2025 attachment->normalized_subresource_range, true, tag);
2026 }
2027}
2028
John Zulauf3d84f1b2020-03-09 13:33:25 -06002029SyncBarrier::SyncBarrier(VkQueueFlags queue_flags, const VkSubpassDependency2 &barrier) {
2030 const auto src_stage_mask = ExpandPipelineStages(queue_flags, barrier.srcStageMask);
2031 src_exec_scope = WithEarlierPipelineStages(src_stage_mask);
2032 src_access_scope = SyncStageAccess::AccessScope(src_stage_mask, barrier.srcAccessMask);
2033 const auto dst_stage_mask = ExpandPipelineStages(queue_flags, barrier.dstStageMask);
2034 dst_exec_scope = WithLaterPipelineStages(dst_stage_mask);
2035 dst_access_scope = SyncStageAccess::AccessScope(dst_stage_mask, barrier.dstAccessMask);
2036}
2037
2038void ResourceAccessState::ApplyBarrier(const SyncBarrier &barrier) {
2039 ApplyExecutionBarrier(barrier.src_exec_scope, barrier.dst_exec_scope);
2040 ApplyMemoryAccessBarrier(barrier.src_exec_scope, barrier.src_access_scope, barrier.dst_exec_scope, barrier.dst_access_scope);
2041}
2042
John Zulauf9cb530d2019-09-30 14:14:10 -06002043HazardResult ResourceAccessState::DetectHazard(SyncStageAccessIndex usage_index) const {
2044 HazardResult hazard;
2045 auto usage = FlagBit(usage_index);
2046 if (IsRead(usage)) {
John Zulaufc9201222020-05-13 15:13:03 -06002047 if (last_write && IsWriteHazard(usage)) {
John Zulauf9cb530d2019-09-30 14:14:10 -06002048 hazard.Set(READ_AFTER_WRITE, write_tag);
2049 }
2050 } else {
2051 // Assume write
2052 // TODO determine what to do with READ-WRITE usage states if any
2053 // Write-After-Write check -- if we have a previous write to test against
2054 if (last_write && IsWriteHazard(usage)) {
2055 hazard.Set(WRITE_AFTER_WRITE, write_tag);
2056 } else {
John Zulauf69133422020-05-20 14:55:53 -06002057 // Look for casus belli for WAR
John Zulauf9cb530d2019-09-30 14:14:10 -06002058 const auto usage_stage = PipelineStageBit(usage_index);
2059 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
2060 if (IsReadHazard(usage_stage, last_reads[read_index])) {
2061 hazard.Set(WRITE_AFTER_READ, last_reads[read_index].tag);
2062 break;
2063 }
2064 }
2065 }
2066 }
2067 return hazard;
2068}
2069
John Zulauf69133422020-05-20 14:55:53 -06002070HazardResult ResourceAccessState::DetectHazard(SyncStageAccessIndex usage_index, const SyncOrderingBarrier &ordering) const {
2071 // The ordering guarantees act as barriers to the last accesses, independent of synchronization operations
2072 HazardResult hazard;
2073 const auto usage = FlagBit(usage_index);
2074 const bool write_is_ordered = (last_write & ordering.access_scope) == last_write; // Is true if no write, and that's good.
2075 if (IsRead(usage)) {
2076 if (!write_is_ordered && IsWriteHazard(usage)) {
2077 hazard.Set(READ_AFTER_WRITE, write_tag);
2078 }
2079 } else {
2080 if (!write_is_ordered && IsWriteHazard(usage)) {
2081 hazard.Set(WRITE_AFTER_WRITE, write_tag);
2082 } else {
2083 const auto usage_stage = PipelineStageBit(usage_index);
2084 const auto unordered_reads = last_read_stages & ~ordering.exec_scope;
2085 if (unordered_reads) {
2086 // Look for any WAR hazards outside the ordered set of stages
2087 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
2088 if (last_reads[read_index].stage & unordered_reads) {
2089 if (IsReadHazard(usage_stage, last_reads[read_index])) {
2090 hazard.Set(WRITE_AFTER_READ, last_reads[read_index].tag);
2091 break;
2092 }
2093 }
2094 }
2095 }
2096 }
2097 }
2098 return hazard;
2099}
2100
John Zulauf2f952d22020-02-10 11:34:51 -07002101// Asynchronous Hazards occur between subpasses with no connection through the DAG
John Zulauf3d84f1b2020-03-09 13:33:25 -06002102HazardResult ResourceAccessState::DetectAsyncHazard(SyncStageAccessIndex usage_index) const {
John Zulauf2f952d22020-02-10 11:34:51 -07002103 HazardResult hazard;
2104 auto usage = FlagBit(usage_index);
2105 if (IsRead(usage)) {
2106 if (last_write != 0) {
2107 hazard.Set(READ_RACING_WRITE, write_tag);
2108 }
2109 } else {
2110 if (last_write != 0) {
2111 hazard.Set(WRITE_RACING_WRITE, write_tag);
2112 } else if (last_read_count > 0) {
2113 hazard.Set(WRITE_RACING_READ, last_reads[0].tag);
2114 }
2115 }
2116 return hazard;
2117}
2118
John Zulauf36bcf6a2020-02-03 15:12:52 -07002119HazardResult ResourceAccessState::DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags src_exec_scope,
2120 SyncStageAccessFlags src_access_scope) const {
John Zulauf0cb5be22020-01-23 12:18:22 -07002121 // Only supporting image layout transitions for now
2122 assert(usage_index == SyncStageAccessIndex::SYNC_IMAGE_LAYOUT_TRANSITION);
2123 HazardResult hazard;
2124 if (last_write) {
2125 // If the previous write is *not* in the 1st access scope
2126 // *AND* the current barrier is not in the dependency chain
2127 // *AND* the there is no prior memory barrier for the previous write in the dependency chain
2128 // then the barrier access is unsafe (R/W after W)
John Zulauf36bcf6a2020-02-03 15:12:52 -07002129 if (((last_write & src_access_scope) == 0) && ((src_exec_scope & write_dependency_chain) == 0) && (write_barriers == 0)) {
John Zulauf0cb5be22020-01-23 12:18:22 -07002130 // TODO: Do we need a difference hazard name for this?
2131 hazard.Set(WRITE_AFTER_WRITE, write_tag);
2132 }
John Zulauf355e49b2020-04-24 15:11:15 -06002133 }
2134 if (!hazard.hazard) {
2135 // Look at the reads if any
John Zulauf0cb5be22020-01-23 12:18:22 -07002136 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
John Zulauf36bcf6a2020-02-03 15:12:52 -07002137 const auto &read_access = last_reads[read_index];
2138 // If the read stage is not in the src sync sync
2139 // *AND* not execution chained with an existing sync barrier (that's the or)
2140 // then the barrier access is unsafe (R/W after R)
2141 if ((src_exec_scope & (read_access.stage | read_access.barriers)) == 0) {
2142 hazard.Set(WRITE_AFTER_READ, read_access.tag);
John Zulauf0cb5be22020-01-23 12:18:22 -07002143 break;
2144 }
2145 }
2146 }
2147 return hazard;
2148}
2149
John Zulauf5f13a792020-03-10 07:31:21 -06002150// The logic behind resolves is the same as update, we assume that earlier hazards have be reported, and that no
2151// tranistive hazard can exists with a hazard between the earlier operations. Yes, an early hazard can mask that another
2152// exists, but if you fix *that* hazard it either fixes or unmasks the subsequent ones.
2153void ResourceAccessState::Resolve(const ResourceAccessState &other) {
2154 if (write_tag.IsBefore(other.write_tag)) {
2155 // If this is a later write, we've reported any exsiting hazard, and we can just overwrite as the more recent operation
2156 *this = other;
2157 } else if (!other.write_tag.IsBefore(write_tag)) {
2158 // This is the *equals* case for write operations, we merged the write barriers and the read state (but without the
2159 // dependency chaining logic or any stage expansion)
2160 write_barriers |= other.write_barriers;
2161
2162 // Merge that read states
2163 for (uint32_t other_read_index = 0; other_read_index < other.last_read_count; other_read_index++) {
2164 auto &other_read = other.last_reads[other_read_index];
2165 if (last_read_stages & other_read.stage) {
2166 // Merge in the barriers for read stages that exist in *both* this and other
2167 // TODO: This is N^2 with stages... perhaps the ReadStates should be by stage index.
2168 for (uint32_t my_read_index = 0; my_read_index < last_read_count; my_read_index++) {
2169 auto &my_read = last_reads[my_read_index];
2170 if (other_read.stage == my_read.stage) {
2171 if (my_read.tag.IsBefore(other_read.tag)) {
2172 my_read.tag = other_read.tag;
2173 }
2174 my_read.barriers |= other_read.barriers;
2175 break;
2176 }
2177 }
2178 } else {
2179 // The other read stage doesn't exist in this, so add it.
2180 last_reads[last_read_count] = other_read;
2181 last_read_count++;
2182 last_read_stages |= other_read.stage;
2183 }
2184 }
2185 } // the else clause would be that other write is before this write... in which case we supercede the other state and ignore
2186 // it.
2187}
2188
John Zulauf9cb530d2019-09-30 14:14:10 -06002189void ResourceAccessState::Update(SyncStageAccessIndex usage_index, const ResourceUsageTag &tag) {
2190 // Move this logic in the ResourceStateTracker as methods, thereof (or we'll repeat it for every flavor of resource...
2191 const auto usage_bit = FlagBit(usage_index);
2192 if (IsRead(usage_index)) {
2193 // Mulitple outstanding reads may be of interest and do dependency chains independently
2194 // However, for purposes of barrier tracking, only one read per pipeline stage matters
2195 const auto usage_stage = PipelineStageBit(usage_index);
2196 if (usage_stage & last_read_stages) {
2197 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
2198 ReadState &access = last_reads[read_index];
2199 if (access.stage == usage_stage) {
2200 access.barriers = 0;
2201 access.tag = tag;
2202 break;
2203 }
2204 }
2205 } else {
2206 // We don't have this stage in the list yet...
2207 assert(last_read_count < last_reads.size());
2208 ReadState &access = last_reads[last_read_count++];
2209 access.stage = usage_stage;
2210 access.barriers = 0;
2211 access.tag = tag;
2212 last_read_stages |= usage_stage;
2213 }
2214 } else {
2215 // Assume write
2216 // TODO determine what to do with READ-WRITE operations if any
2217 // Clobber last read and both sets of barriers... because all we have is DANGER, DANGER, WILL ROBINSON!!!
2218 // if the last_reads/last_write were unsafe, we've reported them,
2219 // in either case the prior access is irrelevant, we can overwrite them as *this* write is now after them
2220 last_read_count = 0;
2221 last_read_stages = 0;
2222
2223 write_barriers = 0;
2224 write_dependency_chain = 0;
2225 write_tag = tag;
2226 last_write = usage_bit;
2227 }
2228}
John Zulauf5f13a792020-03-10 07:31:21 -06002229
John Zulauf9cb530d2019-09-30 14:14:10 -06002230void ResourceAccessState::ApplyExecutionBarrier(VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask) {
2231 // Execution Barriers only protect read operations
2232 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
2233 ReadState &access = last_reads[read_index];
2234 // The | implements the "dependency chain" logic for this access, as the barriers field stores the second sync scope
2235 if (srcStageMask & (access.stage | access.barriers)) {
2236 access.barriers |= dstStageMask;
2237 }
2238 }
2239 if (write_dependency_chain & srcStageMask) write_dependency_chain |= dstStageMask;
2240}
2241
John Zulauf36bcf6a2020-02-03 15:12:52 -07002242void ResourceAccessState::ApplyMemoryAccessBarrier(VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope,
2243 VkPipelineStageFlags dst_exec_scope, SyncStageAccessFlags dst_access_scope) {
John Zulauf9cb530d2019-09-30 14:14:10 -06002244 // Assuming we've applied the execution side of this barrier, we update just the write
2245 // The || implements the "dependency chain" logic for this barrier
John Zulauf36bcf6a2020-02-03 15:12:52 -07002246 if ((src_access_scope & last_write) || (write_dependency_chain & src_exec_scope)) {
2247 write_barriers |= dst_access_scope;
2248 write_dependency_chain |= dst_exec_scope;
John Zulauf9cb530d2019-09-30 14:14:10 -06002249 }
2250}
2251
John Zulaufd1f85d42020-04-15 12:23:15 -06002252void SyncValidator::ResetCommandBufferCallback(VkCommandBuffer command_buffer) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06002253 auto *access_context = GetAccessContextNoInsert(command_buffer);
2254 if (access_context) {
2255 access_context->Reset();
John Zulauf9cb530d2019-09-30 14:14:10 -06002256 }
2257}
2258
John Zulaufd1f85d42020-04-15 12:23:15 -06002259void SyncValidator::FreeCommandBufferCallback(VkCommandBuffer command_buffer) {
2260 auto access_found = cb_access_state.find(command_buffer);
2261 if (access_found != cb_access_state.end()) {
2262 access_found->second->Reset();
2263 cb_access_state.erase(access_found);
2264 }
2265}
2266
John Zulauf540266b2020-04-06 18:54:53 -06002267void SyncValidator::ApplyGlobalBarriers(AccessContext *context, VkPipelineStageFlags srcStageMask,
John Zulauf36bcf6a2020-02-03 15:12:52 -07002268 VkPipelineStageFlags dstStageMask, SyncStageAccessFlags src_access_scope,
2269 SyncStageAccessFlags dst_access_scope, uint32_t memoryBarrierCount,
John Zulauf9cb530d2019-09-30 14:14:10 -06002270 const VkMemoryBarrier *pMemoryBarriers) {
2271 // TODO: Implement this better (maybe some delayed/on-demand integration).
John Zulauf36bcf6a2020-02-03 15:12:52 -07002272 ApplyGlobalBarrierFunctor barriers_functor(srcStageMask, dstStageMask, src_access_scope, dst_access_scope, memoryBarrierCount,
John Zulauf9cb530d2019-09-30 14:14:10 -06002273 pMemoryBarriers);
John Zulauf540266b2020-04-06 18:54:53 -06002274 context->ApplyGlobalBarriers(barriers_functor);
John Zulauf9cb530d2019-09-30 14:14:10 -06002275}
2276
John Zulauf540266b2020-04-06 18:54:53 -06002277void SyncValidator::ApplyBufferBarriers(AccessContext *context, VkPipelineStageFlags src_exec_scope,
John Zulauf36bcf6a2020-02-03 15:12:52 -07002278 SyncStageAccessFlags src_stage_accesses, VkPipelineStageFlags dst_exec_scope,
2279 SyncStageAccessFlags dst_stage_accesses, uint32_t barrier_count,
John Zulauf9cb530d2019-09-30 14:14:10 -06002280 const VkBufferMemoryBarrier *barriers) {
John Zulauf9cb530d2019-09-30 14:14:10 -06002281 for (uint32_t index = 0; index < barrier_count; index++) {
locke-lunarg3c038002020-04-30 23:08:08 -06002282 auto barrier = barriers[index];
John Zulauf9cb530d2019-09-30 14:14:10 -06002283 const auto *buffer = Get<BUFFER_STATE>(barrier.buffer);
2284 if (!buffer) continue;
locke-lunarg3c038002020-04-30 23:08:08 -06002285 barrier.size = GetRealWholeSize(barrier.offset, barrier.size, buffer->createInfo.size);
John Zulauf16adfc92020-04-08 10:28:33 -06002286 ResourceAccessRange range = MakeRange(barrier);
John Zulauf540266b2020-04-06 18:54:53 -06002287 const auto src_access_scope = AccessScope(src_stage_accesses, barrier.srcAccessMask);
2288 const auto dst_access_scope = AccessScope(dst_stage_accesses, barrier.dstAccessMask);
2289 const ApplyMemoryAccessBarrierFunctor update_action(src_exec_scope, src_access_scope, dst_exec_scope, dst_access_scope);
2290 context->UpdateMemoryAccess(*buffer, range, update_action);
John Zulauf9cb530d2019-09-30 14:14:10 -06002291 }
2292}
2293
John Zulauf540266b2020-04-06 18:54:53 -06002294void SyncValidator::ApplyImageBarriers(AccessContext *context, VkPipelineStageFlags src_exec_scope,
2295 SyncStageAccessFlags src_stage_accesses, VkPipelineStageFlags dst_exec_scope,
2296 SyncStageAccessFlags dst_stage_accesses, uint32_t barrier_count,
John Zulauf355e49b2020-04-24 15:11:15 -06002297 const VkImageMemoryBarrier *barriers, const ResourceUsageTag &tag) {
John Zulauf5c5e88d2019-12-26 11:22:02 -07002298 for (uint32_t index = 0; index < barrier_count; index++) {
2299 const auto &barrier = barriers[index];
2300 const auto *image = Get<IMAGE_STATE>(barrier.image);
2301 if (!image) continue;
John Zulauf540266b2020-04-06 18:54:53 -06002302 auto subresource_range = NormalizeSubresourceRange(image->createInfo, barrier.subresourceRange);
John Zulauf355e49b2020-04-24 15:11:15 -06002303 bool layout_transition = barrier.oldLayout != barrier.newLayout;
2304 const auto src_access_scope = AccessScope(src_stage_accesses, barrier.srcAccessMask);
2305 const auto dst_access_scope = AccessScope(dst_stage_accesses, barrier.dstAccessMask);
2306 context->ApplyImageBarrier(*image, src_exec_scope, src_access_scope, dst_exec_scope, dst_access_scope, subresource_range,
2307 layout_transition, tag);
John Zulauf9cb530d2019-09-30 14:14:10 -06002308 }
John Zulauf9cb530d2019-09-30 14:14:10 -06002309}
2310
2311bool SyncValidator::PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
2312 uint32_t regionCount, const VkBufferCopy *pRegions) const {
2313 bool skip = false;
John Zulauf3d84f1b2020-03-09 13:33:25 -06002314 const auto *cb_context = GetAccessContext(commandBuffer);
2315 assert(cb_context);
2316 if (!cb_context) return skip;
2317 const auto *context = cb_context->GetCurrentAccessContext();
John Zulauf9cb530d2019-09-30 14:14:10 -06002318
John Zulauf3d84f1b2020-03-09 13:33:25 -06002319 // If we have no previous accesses, we have no hazards
John Zulauf3d84f1b2020-03-09 13:33:25 -06002320 const auto *src_buffer = Get<BUFFER_STATE>(srcBuffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002321 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002322
2323 for (uint32_t region = 0; region < regionCount; region++) {
2324 const auto &copy_region = pRegions[region];
John Zulauf16adfc92020-04-08 10:28:33 -06002325 if (src_buffer) {
locke-lunargff255f92020-05-13 18:53:52 -06002326 ResourceAccessRange src_range = MakeRange(
2327 copy_region.srcOffset, GetRealWholeSize(copy_region.srcOffset, copy_region.size, src_buffer->createInfo.size));
John Zulauf16adfc92020-04-08 10:28:33 -06002328 auto hazard = context->DetectHazard(*src_buffer, SYNC_TRANSFER_TRANSFER_READ, src_range);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002329 if (hazard.hazard) {
2330 // TODO -- add tag information to log msg when useful.
locke-lunarga0003652020-03-10 11:38:51 -06002331 skip |= LogError(srcBuffer, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002332 "vkCmdCopyBuffer: Hazard %s for srcBuffer %s, region %" PRIu32 ". Prior access %s.",
2333 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcBuffer).c_str(), region,
2334 string_UsageTag(hazard.tag).c_str());
John Zulauf9cb530d2019-09-30 14:14:10 -06002335 }
John Zulauf9cb530d2019-09-30 14:14:10 -06002336 }
John Zulauf16adfc92020-04-08 10:28:33 -06002337 if (dst_buffer && !skip) {
locke-lunargff255f92020-05-13 18:53:52 -06002338 ResourceAccessRange dst_range = MakeRange(
2339 copy_region.dstOffset, GetRealWholeSize(copy_region.dstOffset, copy_region.size, dst_buffer->createInfo.size));
John Zulauf355e49b2020-04-24 15:11:15 -06002340 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, dst_range);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002341 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002342 skip |= LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002343 "vkCmdCopyBuffer: Hazard %s for dstBuffer %s, region %" PRIu32 ". Prior access %s.",
2344 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstBuffer).c_str(), region,
2345 string_UsageTag(hazard.tag).c_str());
John Zulauf3d84f1b2020-03-09 13:33:25 -06002346 }
2347 }
2348 if (skip) break;
John Zulauf9cb530d2019-09-30 14:14:10 -06002349 }
2350 return skip;
2351}
2352
2353void SyncValidator::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
2354 uint32_t regionCount, const VkBufferCopy *pRegions) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06002355 auto *cb_context = GetAccessContext(commandBuffer);
2356 assert(cb_context);
John Zulauf2b151bf2020-04-24 15:37:44 -06002357 const auto tag = cb_context->NextCommandTag(CMD_COPYBUFFER);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002358 auto *context = cb_context->GetCurrentAccessContext();
2359
John Zulauf9cb530d2019-09-30 14:14:10 -06002360 const auto *src_buffer = Get<BUFFER_STATE>(srcBuffer);
John Zulauf9cb530d2019-09-30 14:14:10 -06002361 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
John Zulauf9cb530d2019-09-30 14:14:10 -06002362
2363 for (uint32_t region = 0; region < regionCount; region++) {
2364 const auto &copy_region = pRegions[region];
John Zulauf16adfc92020-04-08 10:28:33 -06002365 if (src_buffer) {
locke-lunargff255f92020-05-13 18:53:52 -06002366 ResourceAccessRange src_range = MakeRange(
2367 copy_region.srcOffset, GetRealWholeSize(copy_region.srcOffset, copy_region.size, src_buffer->createInfo.size));
John Zulauf16adfc92020-04-08 10:28:33 -06002368 context->UpdateAccessState(*src_buffer, SYNC_TRANSFER_TRANSFER_READ, src_range, tag);
John Zulauf9cb530d2019-09-30 14:14:10 -06002369 }
John Zulauf16adfc92020-04-08 10:28:33 -06002370 if (dst_buffer) {
locke-lunargff255f92020-05-13 18:53:52 -06002371 ResourceAccessRange dst_range = MakeRange(
2372 copy_region.dstOffset, GetRealWholeSize(copy_region.dstOffset, copy_region.size, dst_buffer->createInfo.size));
John Zulauf16adfc92020-04-08 10:28:33 -06002373 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, dst_range, tag);
John Zulauf5c5e88d2019-12-26 11:22:02 -07002374 }
2375 }
2376}
2377
2378bool SyncValidator::PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2379 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2380 const VkImageCopy *pRegions) const {
2381 bool skip = false;
John Zulauf3d84f1b2020-03-09 13:33:25 -06002382 const auto *cb_access_context = GetAccessContext(commandBuffer);
2383 assert(cb_access_context);
2384 if (!cb_access_context) return skip;
John Zulauf5c5e88d2019-12-26 11:22:02 -07002385
John Zulauf3d84f1b2020-03-09 13:33:25 -06002386 const auto *context = cb_access_context->GetCurrentAccessContext();
2387 assert(context);
2388 if (!context) return skip;
2389
2390 const auto *src_image = Get<IMAGE_STATE>(srcImage);
2391 const auto *dst_image = Get<IMAGE_STATE>(dstImage);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002392 for (uint32_t region = 0; region < regionCount; region++) {
2393 const auto &copy_region = pRegions[region];
2394 if (src_image) {
John Zulauf540266b2020-04-06 18:54:53 -06002395 auto hazard = context->DetectHazard(*src_image, SYNC_TRANSFER_TRANSFER_READ, copy_region.srcSubresource,
John Zulauf3d84f1b2020-03-09 13:33:25 -06002396 copy_region.srcOffset, copy_region.extent);
2397 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002398 skip |= LogError(srcImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002399 "vkCmdCopyImage: Hazard %s for srcImage %s, region %" PRIu32 ". Prior access %s.",
2400 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcImage).c_str(), region,
2401 string_UsageTag(hazard.tag).c_str());
John Zulauf5c5e88d2019-12-26 11:22:02 -07002402 }
John Zulauf3d84f1b2020-03-09 13:33:25 -06002403 }
2404
2405 if (dst_image) {
locke-lunarg1df1f882020-03-02 16:42:08 -07002406 VkExtent3D dst_copy_extent =
2407 GetAdjustedDestImageExtent(src_image->createInfo.format, dst_image->createInfo.format, copy_region.extent);
John Zulauf540266b2020-04-06 18:54:53 -06002408 auto hazard = context->DetectHazard(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, copy_region.dstSubresource,
locke-lunarg1df1f882020-03-02 16:42:08 -07002409 copy_region.dstOffset, dst_copy_extent);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002410 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002411 skip |= LogError(dstImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002412 "vkCmdCopyImage: Hazard %s for dstImage %s, region %" PRIu32 ". Prior access %s.",
2413 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstImage).c_str(), region,
2414 string_UsageTag(hazard.tag).c_str());
John Zulauf5c5e88d2019-12-26 11:22:02 -07002415 }
locke-lunarg1dbbb9e2020-02-28 22:43:53 -07002416 if (skip) break;
John Zulauf5c5e88d2019-12-26 11:22:02 -07002417 }
2418 }
John Zulauf3d84f1b2020-03-09 13:33:25 -06002419
John Zulauf5c5e88d2019-12-26 11:22:02 -07002420 return skip;
2421}
2422
2423void SyncValidator::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2424 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2425 const VkImageCopy *pRegions) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06002426 auto *cb_access_context = GetAccessContext(commandBuffer);
2427 assert(cb_access_context);
John Zulauf2b151bf2020-04-24 15:37:44 -06002428 const auto tag = cb_access_context->NextCommandTag(CMD_COPYIMAGE);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002429 auto *context = cb_access_context->GetCurrentAccessContext();
2430 assert(context);
2431
John Zulauf5c5e88d2019-12-26 11:22:02 -07002432 auto *src_image = Get<IMAGE_STATE>(srcImage);
John Zulauf5c5e88d2019-12-26 11:22:02 -07002433 auto *dst_image = Get<IMAGE_STATE>(dstImage);
John Zulauf5c5e88d2019-12-26 11:22:02 -07002434
2435 for (uint32_t region = 0; region < regionCount; region++) {
2436 const auto &copy_region = pRegions[region];
John Zulauf3d84f1b2020-03-09 13:33:25 -06002437 if (src_image) {
John Zulauf540266b2020-04-06 18:54:53 -06002438 context->UpdateAccessState(*src_image, SYNC_TRANSFER_TRANSFER_READ, copy_region.srcSubresource, copy_region.srcOffset,
2439 copy_region.extent, tag);
John Zulauf5c5e88d2019-12-26 11:22:02 -07002440 }
John Zulauf3d84f1b2020-03-09 13:33:25 -06002441 if (dst_image) {
locke-lunarg1df1f882020-03-02 16:42:08 -07002442 VkExtent3D dst_copy_extent =
2443 GetAdjustedDestImageExtent(src_image->createInfo.format, dst_image->createInfo.format, copy_region.extent);
John Zulauf540266b2020-04-06 18:54:53 -06002444 context->UpdateAccessState(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, copy_region.dstSubresource, copy_region.dstOffset,
2445 dst_copy_extent, tag);
John Zulauf9cb530d2019-09-30 14:14:10 -06002446 }
2447 }
2448}
2449
2450bool SyncValidator::PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
2451 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
2452 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2453 uint32_t bufferMemoryBarrierCount,
2454 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2455 uint32_t imageMemoryBarrierCount,
2456 const VkImageMemoryBarrier *pImageMemoryBarriers) const {
2457 bool skip = false;
John Zulauf3d84f1b2020-03-09 13:33:25 -06002458 const auto *cb_access_context = GetAccessContext(commandBuffer);
2459 assert(cb_access_context);
2460 if (!cb_access_context) return skip;
John Zulauf0cb5be22020-01-23 12:18:22 -07002461
John Zulauf3d84f1b2020-03-09 13:33:25 -06002462 const auto *context = cb_access_context->GetCurrentAccessContext();
2463 assert(context);
2464 if (!context) return skip;
John Zulauf0cb5be22020-01-23 12:18:22 -07002465
John Zulauf3d84f1b2020-03-09 13:33:25 -06002466 const auto src_stage_mask = ExpandPipelineStages(cb_access_context->GetQueueFlags(), srcStageMask);
John Zulauf36bcf6a2020-02-03 15:12:52 -07002467 const auto src_exec_scope = WithEarlierPipelineStages(src_stage_mask);
2468 auto src_stage_accesses = AccessScopeByStage(src_stage_mask);
John Zulauf0cb5be22020-01-23 12:18:22 -07002469 // Validate Image Layout transitions
2470 for (uint32_t index = 0; index < imageMemoryBarrierCount; index++) {
2471 const auto &barrier = pImageMemoryBarriers[index];
2472 if (barrier.newLayout == barrier.oldLayout) continue; // Only interested in layout transitions at this point.
2473 const auto *image_state = Get<IMAGE_STATE>(barrier.image);
2474 if (!image_state) continue;
John Zulauf16adfc92020-04-08 10:28:33 -06002475 const auto hazard = context->DetectImageBarrierHazard(*image_state, src_exec_scope, src_stage_accesses, barrier);
John Zulauf0cb5be22020-01-23 12:18:22 -07002476 if (hazard.hazard) {
John Zulauf7635de32020-05-29 17:14:15 -06002477 // PHASE1 TODO -- add tag information to log msg when useful.
locke-lunarga0003652020-03-10 11:38:51 -06002478 skip |= LogError(barrier.image, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002479 "vkCmdPipelineBarrier: Hazard %s for image barrier %" PRIu32 " %s. Prior access %s.",
2480 string_SyncHazard(hazard.hazard), index, report_data->FormatHandle(barrier.image).c_str(),
2481 string_UsageTag(hazard.tag).c_str());
John Zulauf0cb5be22020-01-23 12:18:22 -07002482 }
2483 }
John Zulauf9cb530d2019-09-30 14:14:10 -06002484
2485 return skip;
2486}
2487
2488void SyncValidator::PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
2489 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
2490 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2491 uint32_t bufferMemoryBarrierCount,
2492 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2493 uint32_t imageMemoryBarrierCount,
2494 const VkImageMemoryBarrier *pImageMemoryBarriers) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06002495 auto *cb_access_context = GetAccessContext(commandBuffer);
2496 assert(cb_access_context);
2497 if (!cb_access_context) return;
John Zulauf2b151bf2020-04-24 15:37:44 -06002498 const auto tag = cb_access_context->NextCommandTag(CMD_PIPELINEBARRIER);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002499 auto access_context = cb_access_context->GetCurrentAccessContext();
2500 assert(access_context);
2501 if (!access_context) return;
John Zulauf9cb530d2019-09-30 14:14:10 -06002502
John Zulauf3d84f1b2020-03-09 13:33:25 -06002503 const auto src_stage_mask = ExpandPipelineStages(cb_access_context->GetQueueFlags(), srcStageMask);
John Zulauf36bcf6a2020-02-03 15:12:52 -07002504 auto src_stage_accesses = AccessScopeByStage(src_stage_mask);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002505 const auto dst_stage_mask = ExpandPipelineStages(cb_access_context->GetQueueFlags(), dstStageMask);
John Zulauf36bcf6a2020-02-03 15:12:52 -07002506 auto dst_stage_accesses = AccessScopeByStage(dst_stage_mask);
2507 const auto src_exec_scope = WithEarlierPipelineStages(src_stage_mask);
2508 const auto dst_exec_scope = WithLaterPipelineStages(dst_stage_mask);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002509 ApplyBufferBarriers(access_context, src_exec_scope, src_stage_accesses, dst_exec_scope, dst_stage_accesses,
2510 bufferMemoryBarrierCount, pBufferMemoryBarriers);
John Zulauf540266b2020-04-06 18:54:53 -06002511 ApplyImageBarriers(access_context, src_exec_scope, src_stage_accesses, dst_exec_scope, dst_stage_accesses,
John Zulauf355e49b2020-04-24 15:11:15 -06002512 imageMemoryBarrierCount, pImageMemoryBarriers, tag);
John Zulauf9cb530d2019-09-30 14:14:10 -06002513
2514 // Apply these last in-case there operation is a superset of the other two and would clean them up...
John Zulauf3d84f1b2020-03-09 13:33:25 -06002515 ApplyGlobalBarriers(access_context, src_exec_scope, dst_exec_scope, src_stage_accesses, dst_stage_accesses, memoryBarrierCount,
John Zulauf0cb5be22020-01-23 12:18:22 -07002516 pMemoryBarriers);
John Zulauf9cb530d2019-09-30 14:14:10 -06002517}
2518
2519void SyncValidator::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
2520 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result) {
2521 // The state tracker sets up the device state
2522 StateTracker::PostCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, result);
2523
John Zulauf5f13a792020-03-10 07:31:21 -06002524 // Add the callback hooks for the functions that are either broadly or deeply used and that the ValidationStateTracker
2525 // refactor would be messier without.
John Zulauf9cb530d2019-09-30 14:14:10 -06002526 // TODO: Find a good way to do this hooklessly.
2527 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
2528 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, LayerObjectTypeSyncValidation);
2529 SyncValidator *sync_device_state = static_cast<SyncValidator *>(validation_data);
2530
John Zulaufd1f85d42020-04-15 12:23:15 -06002531 sync_device_state->SetCommandBufferResetCallback([sync_device_state](VkCommandBuffer command_buffer) -> void {
2532 sync_device_state->ResetCommandBufferCallback(command_buffer);
2533 });
2534 sync_device_state->SetCommandBufferFreeCallback([sync_device_state](VkCommandBuffer command_buffer) -> void {
2535 sync_device_state->FreeCommandBufferCallback(command_buffer);
2536 });
John Zulauf9cb530d2019-09-30 14:14:10 -06002537}
John Zulauf3d84f1b2020-03-09 13:33:25 -06002538
John Zulauf355e49b2020-04-24 15:11:15 -06002539bool SyncValidator::ValidateBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
2540 const VkSubpassBeginInfoKHR *pSubpassBeginInfo, const char *func_name) const {
2541 bool skip = false;
2542 const auto rp_state = Get<RENDER_PASS_STATE>(pRenderPassBegin->renderPass);
2543 auto cb_context = GetAccessContext(commandBuffer);
2544
2545 if (rp_state && cb_context) {
2546 skip |= cb_context->ValidateBeginRenderPass(*rp_state, pRenderPassBegin, pSubpassBeginInfo, func_name);
2547 }
2548
2549 return skip;
2550}
2551
2552bool SyncValidator::PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
2553 VkSubpassContents contents) const {
2554 bool skip = StateTracker::PreCallValidateCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
2555 auto subpass_begin_info = lvl_init_struct<VkSubpassBeginInfo>();
2556 subpass_begin_info.contents = contents;
2557 skip |= ValidateBeginRenderPass(commandBuffer, pRenderPassBegin, &subpass_begin_info, "vkCmdBeginRenderPass");
2558 return skip;
2559}
2560
2561bool SyncValidator::PreCallValidateCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
2562 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) const {
2563 bool skip = StateTracker::PreCallValidateCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
2564 skip |= ValidateBeginRenderPass(commandBuffer, pRenderPassBegin, pSubpassBeginInfo, "vkCmdBeginRenderPass2");
2565 return skip;
2566}
2567
2568bool SyncValidator::PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
2569 const VkRenderPassBeginInfo *pRenderPassBegin,
2570 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) const {
2571 bool skip = StateTracker::PreCallValidateCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
2572 skip |= ValidateBeginRenderPass(commandBuffer, pRenderPassBegin, pSubpassBeginInfo, "vkCmdBeginRenderPass2KHR");
2573 return skip;
2574}
2575
John Zulauf3d84f1b2020-03-09 13:33:25 -06002576void SyncValidator::PostCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo,
2577 VkResult result) {
2578 // The state tracker sets up the command buffer state
2579 StateTracker::PostCallRecordBeginCommandBuffer(commandBuffer, pBeginInfo, result);
2580
2581 // Create/initialize the structure that trackers accesses at the command buffer scope.
2582 auto cb_access_context = GetAccessContext(commandBuffer);
2583 assert(cb_access_context);
2584 cb_access_context->Reset();
2585}
2586
2587void SyncValidator::RecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
John Zulauf355e49b2020-04-24 15:11:15 -06002588 const VkSubpassBeginInfo *pSubpassBeginInfo, CMD_TYPE command) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06002589 auto cb_context = GetAccessContext(commandBuffer);
John Zulauf355e49b2020-04-24 15:11:15 -06002590 if (cb_context) {
2591 cb_context->RecordBeginRenderPass(cb_context->NextCommandTag(command));
John Zulauf3d84f1b2020-03-09 13:33:25 -06002592 }
2593}
2594
2595void SyncValidator::PostCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
2596 VkSubpassContents contents) {
2597 StateTracker::PostCallRecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
2598 auto subpass_begin_info = lvl_init_struct<VkSubpassBeginInfo>();
2599 subpass_begin_info.contents = contents;
John Zulauf355e49b2020-04-24 15:11:15 -06002600 RecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, &subpass_begin_info, CMD_BEGINRENDERPASS);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002601}
2602
2603void SyncValidator::PostCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
2604 const VkSubpassBeginInfo *pSubpassBeginInfo) {
2605 StateTracker::PostCallRecordCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06002606 RecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, pSubpassBeginInfo, CMD_BEGINRENDERPASS2);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002607}
2608
2609void SyncValidator::PostCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
2610 const VkRenderPassBeginInfo *pRenderPassBegin,
2611 const VkSubpassBeginInfo *pSubpassBeginInfo) {
2612 StateTracker::PostCallRecordCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06002613 RecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, pSubpassBeginInfo, CMD_BEGINRENDERPASS2);
2614}
2615
2616bool SyncValidator::ValidateCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
2617 const VkSubpassEndInfoKHR *pSubpassEndInfo, const char *func_name) const {
2618 bool skip = false;
2619
2620 auto cb_context = GetAccessContext(commandBuffer);
2621 assert(cb_context);
2622 auto cb_state = cb_context->GetCommandBufferState();
2623 if (!cb_state) return skip;
2624
2625 auto rp_state = cb_state->activeRenderPass;
2626 if (!rp_state) return skip;
2627
2628 skip |= cb_context->ValidateNextSubpass(func_name);
2629
2630 return skip;
2631}
2632
2633bool SyncValidator::PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const {
2634 bool skip = StateTracker::PreCallValidateCmdNextSubpass(commandBuffer, contents);
2635 auto subpass_begin_info = lvl_init_struct<VkSubpassBeginInfo>();
2636 subpass_begin_info.contents = contents;
2637 skip |= ValidateCmdNextSubpass(commandBuffer, &subpass_begin_info, nullptr, "vkCmdNextSubpass");
2638 return skip;
2639}
2640
2641bool SyncValidator::PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
2642 const VkSubpassEndInfoKHR *pSubpassEndInfo) const {
2643 bool skip = StateTracker::PreCallValidateCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
2644 skip |= ValidateCmdNextSubpass(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo, "vkCmdNextSubpass2KHR");
2645 return skip;
2646}
2647
2648bool SyncValidator::PreCallValidateCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
2649 const VkSubpassEndInfo *pSubpassEndInfo) const {
2650 bool skip = StateTracker::PreCallValidateCmdNextSubpass2(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
2651 skip |= ValidateCmdNextSubpass(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo, "vkCmdNextSubpass2");
2652 return skip;
John Zulauf3d84f1b2020-03-09 13:33:25 -06002653}
2654
2655void SyncValidator::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
John Zulauf355e49b2020-04-24 15:11:15 -06002656 const VkSubpassEndInfo *pSubpassEndInfo, CMD_TYPE command) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06002657 auto cb_context = GetAccessContext(commandBuffer);
2658 assert(cb_context);
2659 auto cb_state = cb_context->GetCommandBufferState();
2660 if (!cb_state) return;
2661
2662 auto rp_state = cb_state->activeRenderPass;
2663 if (!rp_state) return;
2664
John Zulauf355e49b2020-04-24 15:11:15 -06002665 cb_context->RecordNextSubpass(*rp_state, cb_context->NextCommandTag(command));
John Zulauf3d84f1b2020-03-09 13:33:25 -06002666}
2667
2668void SyncValidator::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
2669 StateTracker::PostCallRecordCmdNextSubpass(commandBuffer, contents);
2670 auto subpass_begin_info = lvl_init_struct<VkSubpassBeginInfo>();
2671 subpass_begin_info.contents = contents;
John Zulauf355e49b2020-04-24 15:11:15 -06002672 RecordCmdNextSubpass(commandBuffer, &subpass_begin_info, nullptr, CMD_NEXTSUBPASS);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002673}
2674
2675void SyncValidator::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
2676 const VkSubpassEndInfo *pSubpassEndInfo) {
2677 StateTracker::PostCallRecordCmdNextSubpass2(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06002678 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo, CMD_NEXTSUBPASS2);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002679}
2680
2681void SyncValidator::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
2682 const VkSubpassEndInfo *pSubpassEndInfo) {
2683 StateTracker::PostCallRecordCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06002684 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo, CMD_NEXTSUBPASS2);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002685}
2686
John Zulauf355e49b2020-04-24 15:11:15 -06002687bool SyncValidator::ValidateCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo,
2688 const char *func_name) const {
2689 bool skip = false;
2690
2691 auto cb_context = GetAccessContext(commandBuffer);
2692 assert(cb_context);
2693 auto cb_state = cb_context->GetCommandBufferState();
2694 if (!cb_state) return skip;
2695
2696 auto rp_state = cb_state->activeRenderPass;
2697 if (!rp_state) return skip;
2698
2699 skip |= cb_context->ValidateEndRenderpass(func_name);
2700 return skip;
2701}
2702
2703bool SyncValidator::PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) const {
2704 bool skip = StateTracker::PreCallValidateCmdEndRenderPass(commandBuffer);
2705 skip |= ValidateCmdEndRenderPass(commandBuffer, nullptr, "vkEndRenderPass");
2706 return skip;
2707}
2708
2709bool SyncValidator::PreCallValidateCmdEndRenderPass2(VkCommandBuffer commandBuffer,
2710 const VkSubpassEndInfoKHR *pSubpassEndInfo) const {
2711 bool skip = StateTracker::PreCallValidateCmdEndRenderPass2(commandBuffer, pSubpassEndInfo);
2712 skip |= ValidateCmdEndRenderPass(commandBuffer, pSubpassEndInfo, "vkEndRenderPass2");
2713 return skip;
2714}
2715
2716bool SyncValidator::PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
2717 const VkSubpassEndInfoKHR *pSubpassEndInfo) const {
2718 bool skip = StateTracker::PreCallValidateCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
2719 skip |= ValidateCmdEndRenderPass(commandBuffer, pSubpassEndInfo, "vkEndRenderPass2KHR");
2720 return skip;
2721}
2722
2723void SyncValidator::RecordCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo,
2724 CMD_TYPE command) {
John Zulaufe5da6e52020-03-18 15:32:18 -06002725 // Resolve the all subpass contexts to the command buffer contexts
2726 auto cb_context = GetAccessContext(commandBuffer);
2727 assert(cb_context);
2728 auto cb_state = cb_context->GetCommandBufferState();
2729 if (!cb_state) return;
2730
locke-lunargaecf2152020-05-12 17:15:41 -06002731 const auto *rp_state = cb_state->activeRenderPass.get();
John Zulaufe5da6e52020-03-18 15:32:18 -06002732 if (!rp_state) return;
2733
John Zulauf355e49b2020-04-24 15:11:15 -06002734 cb_context->RecordEndRenderPass(*rp_state, cb_context->NextCommandTag(command));
John Zulaufe5da6e52020-03-18 15:32:18 -06002735}
John Zulauf3d84f1b2020-03-09 13:33:25 -06002736
2737void SyncValidator::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
John Zulauf355e49b2020-04-24 15:11:15 -06002738 RecordCmdEndRenderPass(commandBuffer, nullptr, CMD_ENDRENDERPASS);
John Zulauf5a1a5382020-06-22 17:23:25 -06002739 StateTracker::PostCallRecordCmdEndRenderPass(commandBuffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002740}
2741
2742void SyncValidator::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) {
John Zulauf355e49b2020-04-24 15:11:15 -06002743 RecordCmdEndRenderPass(commandBuffer, pSubpassEndInfo, CMD_ENDRENDERPASS2);
John Zulauf5a1a5382020-06-22 17:23:25 -06002744 StateTracker::PostCallRecordCmdEndRenderPass2(commandBuffer, pSubpassEndInfo);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002745}
2746
2747void SyncValidator::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) {
John Zulauf355e49b2020-04-24 15:11:15 -06002748 RecordCmdEndRenderPass(commandBuffer, pSubpassEndInfo, CMD_ENDRENDERPASS2);
John Zulauf5a1a5382020-06-22 17:23:25 -06002749 StateTracker::PostCallRecordCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002750}
locke-lunarga19c71d2020-03-02 18:17:04 -07002751
2752bool SyncValidator::PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
2753 VkImageLayout dstImageLayout, uint32_t regionCount,
2754 const VkBufferImageCopy *pRegions) const {
2755 bool skip = false;
2756 const auto *cb_access_context = GetAccessContext(commandBuffer);
2757 assert(cb_access_context);
2758 if (!cb_access_context) return skip;
2759
2760 const auto *context = cb_access_context->GetCurrentAccessContext();
2761 assert(context);
2762 if (!context) return skip;
2763
2764 const auto *src_buffer = Get<BUFFER_STATE>(srcBuffer);
locke-lunarga19c71d2020-03-02 18:17:04 -07002765 const auto *dst_image = Get<IMAGE_STATE>(dstImage);
2766
2767 for (uint32_t region = 0; region < regionCount; region++) {
2768 const auto &copy_region = pRegions[region];
John Zulauf16adfc92020-04-08 10:28:33 -06002769 if (src_buffer) {
John Zulauf355e49b2020-04-24 15:11:15 -06002770 ResourceAccessRange src_range =
2771 MakeRange(copy_region.bufferOffset, GetBufferSizeFromCopyImage(copy_region, dst_image->createInfo.format));
John Zulauf16adfc92020-04-08 10:28:33 -06002772 auto hazard = context->DetectHazard(*src_buffer, SYNC_TRANSFER_TRANSFER_READ, src_range);
locke-lunarga19c71d2020-03-02 18:17:04 -07002773 if (hazard.hazard) {
John Zulauf7635de32020-05-29 17:14:15 -06002774 // PHASE1 TODO -- add tag information to log msg when useful.
locke-lunarga0003652020-03-10 11:38:51 -06002775 skip |= LogError(srcBuffer, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002776 "vkCmdCopyBufferToImage: Hazard %s for srcBuffer %s, region %" PRIu32 ". Prior access %s.",
2777 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcBuffer).c_str(), region,
2778 string_UsageTag(hazard.tag).c_str());
locke-lunarga19c71d2020-03-02 18:17:04 -07002779 }
2780 }
2781 if (dst_image) {
John Zulauf540266b2020-04-06 18:54:53 -06002782 auto hazard = context->DetectHazard(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, copy_region.imageSubresource,
locke-lunarga19c71d2020-03-02 18:17:04 -07002783 copy_region.imageOffset, copy_region.imageExtent);
2784 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002785 skip |= LogError(dstImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002786 "vkCmdCopyBufferToImage: Hazard %s for dstImage %s, region %" PRIu32 ". Prior access %s.",
2787 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstImage).c_str(), region,
2788 string_UsageTag(hazard.tag).c_str());
locke-lunarga19c71d2020-03-02 18:17:04 -07002789 }
2790 if (skip) break;
2791 }
2792 if (skip) break;
2793 }
2794 return skip;
2795}
2796
2797void SyncValidator::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
2798 VkImageLayout dstImageLayout, uint32_t regionCount,
2799 const VkBufferImageCopy *pRegions) {
locke-lunarg8ec19162020-06-16 18:48:34 -06002800 StateTracker::PreCallRecordCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
locke-lunarga19c71d2020-03-02 18:17:04 -07002801 auto *cb_access_context = GetAccessContext(commandBuffer);
2802 assert(cb_access_context);
John Zulauf2b151bf2020-04-24 15:37:44 -06002803 const auto tag = cb_access_context->NextCommandTag(CMD_COPYBUFFERTOIMAGE);
locke-lunarga19c71d2020-03-02 18:17:04 -07002804 auto *context = cb_access_context->GetCurrentAccessContext();
2805 assert(context);
2806
2807 const auto *src_buffer = Get<BUFFER_STATE>(srcBuffer);
John Zulauf16adfc92020-04-08 10:28:33 -06002808 const auto *dst_image = Get<IMAGE_STATE>(dstImage);
locke-lunarga19c71d2020-03-02 18:17:04 -07002809
2810 for (uint32_t region = 0; region < regionCount; region++) {
2811 const auto &copy_region = pRegions[region];
2812 if (src_buffer) {
John Zulauf355e49b2020-04-24 15:11:15 -06002813 ResourceAccessRange src_range =
2814 MakeRange(copy_region.bufferOffset, GetBufferSizeFromCopyImage(copy_region, dst_image->createInfo.format));
John Zulauf16adfc92020-04-08 10:28:33 -06002815 context->UpdateAccessState(*src_buffer, SYNC_TRANSFER_TRANSFER_READ, src_range, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07002816 }
2817 if (dst_image) {
John Zulauf540266b2020-04-06 18:54:53 -06002818 context->UpdateAccessState(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, copy_region.imageSubresource,
John Zulauf5f13a792020-03-10 07:31:21 -06002819 copy_region.imageOffset, copy_region.imageExtent, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07002820 }
2821 }
2822}
2823
2824bool SyncValidator::PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
2825 VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount,
2826 const VkBufferImageCopy *pRegions) const {
2827 bool skip = false;
2828 const auto *cb_access_context = GetAccessContext(commandBuffer);
2829 assert(cb_access_context);
2830 if (!cb_access_context) return skip;
2831
2832 const auto *context = cb_access_context->GetCurrentAccessContext();
2833 assert(context);
2834 if (!context) return skip;
2835
2836 const auto *src_image = Get<IMAGE_STATE>(srcImage);
2837 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
2838 const auto dst_mem = (dst_buffer && !dst_buffer->sparse) ? dst_buffer->binding.mem_state->mem : VK_NULL_HANDLE;
2839 for (uint32_t region = 0; region < regionCount; region++) {
2840 const auto &copy_region = pRegions[region];
2841 if (src_image) {
John Zulauf540266b2020-04-06 18:54:53 -06002842 auto hazard = context->DetectHazard(*src_image, SYNC_TRANSFER_TRANSFER_READ, copy_region.imageSubresource,
locke-lunarga19c71d2020-03-02 18:17:04 -07002843 copy_region.imageOffset, copy_region.imageExtent);
2844 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002845 skip |= LogError(srcImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002846 "vkCmdCopyImageToBuffer: Hazard %s for srcImage %s, region %" PRIu32 ". Prior access %s.",
2847 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcImage).c_str(), region,
2848 string_UsageTag(hazard.tag).c_str());
locke-lunarga19c71d2020-03-02 18:17:04 -07002849 }
2850 }
2851 if (dst_mem) {
John Zulauf355e49b2020-04-24 15:11:15 -06002852 ResourceAccessRange dst_range =
2853 MakeRange(copy_region.bufferOffset, GetBufferSizeFromCopyImage(copy_region, src_image->createInfo.format));
John Zulauf16adfc92020-04-08 10:28:33 -06002854 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, dst_range);
locke-lunarga19c71d2020-03-02 18:17:04 -07002855 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002856 skip |= LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002857 "vkCmdCopyImageToBuffer: Hazard %s for dstBuffer %s, region %" PRIu32 ". Prior access %s.",
2858 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstBuffer).c_str(), region,
2859 string_UsageTag(hazard.tag).c_str());
locke-lunarga19c71d2020-03-02 18:17:04 -07002860 }
2861 }
2862 if (skip) break;
2863 }
2864 return skip;
2865}
2866
2867void SyncValidator::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2868 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) {
locke-lunarg8ec19162020-06-16 18:48:34 -06002869 StateTracker::PreCallRecordCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
locke-lunarga19c71d2020-03-02 18:17:04 -07002870 auto *cb_access_context = GetAccessContext(commandBuffer);
2871 assert(cb_access_context);
John Zulauf2b151bf2020-04-24 15:37:44 -06002872 const auto tag = cb_access_context->NextCommandTag(CMD_COPYIMAGETOBUFFER);
locke-lunarga19c71d2020-03-02 18:17:04 -07002873 auto *context = cb_access_context->GetCurrentAccessContext();
2874 assert(context);
2875
2876 const auto *src_image = Get<IMAGE_STATE>(srcImage);
locke-lunarga19c71d2020-03-02 18:17:04 -07002877 auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
2878 const auto dst_mem = (dst_buffer && !dst_buffer->sparse) ? dst_buffer->binding.mem_state->mem : VK_NULL_HANDLE;
John Zulauf5f13a792020-03-10 07:31:21 -06002879 const VulkanTypedHandle dst_handle(dst_mem, kVulkanObjectTypeDeviceMemory);
locke-lunarga19c71d2020-03-02 18:17:04 -07002880
2881 for (uint32_t region = 0; region < regionCount; region++) {
2882 const auto &copy_region = pRegions[region];
2883 if (src_image) {
John Zulauf540266b2020-04-06 18:54:53 -06002884 context->UpdateAccessState(*src_image, SYNC_TRANSFER_TRANSFER_READ, copy_region.imageSubresource,
John Zulauf5f13a792020-03-10 07:31:21 -06002885 copy_region.imageOffset, copy_region.imageExtent, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07002886 }
2887 if (dst_buffer) {
John Zulauf355e49b2020-04-24 15:11:15 -06002888 ResourceAccessRange dst_range =
2889 MakeRange(copy_region.bufferOffset, GetBufferSizeFromCopyImage(copy_region, src_image->createInfo.format));
John Zulauf16adfc92020-04-08 10:28:33 -06002890 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, dst_range, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07002891 }
2892 }
2893}
2894
2895bool SyncValidator::PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2896 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2897 const VkImageBlit *pRegions, VkFilter filter) const {
2898 bool skip = false;
2899 const auto *cb_access_context = GetAccessContext(commandBuffer);
2900 assert(cb_access_context);
2901 if (!cb_access_context) return skip;
2902
2903 const auto *context = cb_access_context->GetCurrentAccessContext();
2904 assert(context);
2905 if (!context) return skip;
2906
2907 const auto *src_image = Get<IMAGE_STATE>(srcImage);
2908 const auto *dst_image = Get<IMAGE_STATE>(dstImage);
2909
2910 for (uint32_t region = 0; region < regionCount; region++) {
2911 const auto &blit_region = pRegions[region];
2912 if (src_image) {
locke-lunarg8f93acc2020-06-18 21:26:46 -06002913 VkOffset3D offset = {std::min(blit_region.srcOffsets[0].x, blit_region.srcOffsets[1].x),
2914 std::min(blit_region.srcOffsets[0].y, blit_region.srcOffsets[1].y),
2915 std::min(blit_region.srcOffsets[0].z, blit_region.srcOffsets[1].z)};
2916 VkExtent3D extent = {static_cast<uint32_t>(abs(blit_region.srcOffsets[1].x - blit_region.srcOffsets[0].x)),
2917 static_cast<uint32_t>(abs(blit_region.srcOffsets[1].y - blit_region.srcOffsets[0].y)),
2918 static_cast<uint32_t>(abs(blit_region.srcOffsets[1].z - blit_region.srcOffsets[0].z))};
2919 auto hazard =
2920 context->DetectHazard(*src_image, SYNC_TRANSFER_TRANSFER_READ, blit_region.srcSubresource, offset, extent);
locke-lunarga19c71d2020-03-02 18:17:04 -07002921 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002922 skip |= LogError(srcImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002923 "vkCmdBlitImage: Hazard %s for srcImage %s, region %" PRIu32 ". Prior access %s.",
2924 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcImage).c_str(), region,
2925 string_UsageTag(hazard.tag).c_str());
locke-lunarga19c71d2020-03-02 18:17:04 -07002926 }
2927 }
2928
2929 if (dst_image) {
locke-lunarg8f93acc2020-06-18 21:26:46 -06002930 VkOffset3D offset = {std::min(blit_region.dstOffsets[0].x, blit_region.dstOffsets[1].x),
2931 std::min(blit_region.dstOffsets[0].y, blit_region.dstOffsets[1].y),
2932 std::min(blit_region.dstOffsets[0].z, blit_region.dstOffsets[1].z)};
2933 VkExtent3D extent = {static_cast<uint32_t>(abs(blit_region.dstOffsets[1].x - blit_region.dstOffsets[0].x)),
2934 static_cast<uint32_t>(abs(blit_region.dstOffsets[1].y - blit_region.dstOffsets[0].y)),
2935 static_cast<uint32_t>(abs(blit_region.dstOffsets[1].z - blit_region.dstOffsets[0].z))};
2936 auto hazard =
2937 context->DetectHazard(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, blit_region.dstSubresource, offset, extent);
locke-lunarga19c71d2020-03-02 18:17:04 -07002938 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002939 skip |= LogError(dstImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002940 "vkCmdBlitImage: Hazard %s for dstImage %s, region %" PRIu32 ". Prior access %s.",
2941 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstImage).c_str(), region,
2942 string_UsageTag(hazard.tag).c_str());
locke-lunarga19c71d2020-03-02 18:17:04 -07002943 }
2944 if (skip) break;
2945 }
2946 }
2947
2948 return skip;
2949}
2950
2951void SyncValidator::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2952 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2953 const VkImageBlit *pRegions, VkFilter filter) {
locke-lunarg8ec19162020-06-16 18:48:34 -06002954 StateTracker::PreCallRecordCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount,
2955 pRegions, filter);
locke-lunarga19c71d2020-03-02 18:17:04 -07002956 auto *cb_access_context = GetAccessContext(commandBuffer);
2957 assert(cb_access_context);
John Zulauf2b151bf2020-04-24 15:37:44 -06002958 const auto tag = cb_access_context->NextCommandTag(CMD_BLITIMAGE);
locke-lunarga19c71d2020-03-02 18:17:04 -07002959 auto *context = cb_access_context->GetCurrentAccessContext();
2960 assert(context);
2961
2962 auto *src_image = Get<IMAGE_STATE>(srcImage);
locke-lunarga19c71d2020-03-02 18:17:04 -07002963 auto *dst_image = Get<IMAGE_STATE>(dstImage);
locke-lunarga19c71d2020-03-02 18:17:04 -07002964
2965 for (uint32_t region = 0; region < regionCount; region++) {
2966 const auto &blit_region = pRegions[region];
2967 if (src_image) {
locke-lunarg8f93acc2020-06-18 21:26:46 -06002968 VkOffset3D offset = {std::min(blit_region.srcOffsets[0].x, blit_region.srcOffsets[1].x),
2969 std::min(blit_region.srcOffsets[0].y, blit_region.srcOffsets[1].y),
2970 std::min(blit_region.srcOffsets[0].z, blit_region.srcOffsets[1].z)};
2971 VkExtent3D extent = {static_cast<uint32_t>(abs(blit_region.srcOffsets[1].x - blit_region.srcOffsets[0].x)),
2972 static_cast<uint32_t>(abs(blit_region.srcOffsets[1].y - blit_region.srcOffsets[0].y)),
2973 static_cast<uint32_t>(abs(blit_region.srcOffsets[1].z - blit_region.srcOffsets[0].z))};
2974 context->UpdateAccessState(*src_image, SYNC_TRANSFER_TRANSFER_READ, blit_region.srcSubresource, offset, extent, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07002975 }
2976 if (dst_image) {
locke-lunarg8f93acc2020-06-18 21:26:46 -06002977 VkOffset3D offset = {std::min(blit_region.dstOffsets[0].x, blit_region.dstOffsets[1].x),
2978 std::min(blit_region.dstOffsets[0].y, blit_region.dstOffsets[1].y),
2979 std::min(blit_region.dstOffsets[0].z, blit_region.dstOffsets[1].z)};
2980 VkExtent3D extent = {static_cast<uint32_t>(abs(blit_region.dstOffsets[1].x - blit_region.dstOffsets[0].x)),
2981 static_cast<uint32_t>(abs(blit_region.dstOffsets[1].y - blit_region.dstOffsets[0].y)),
2982 static_cast<uint32_t>(abs(blit_region.dstOffsets[1].z - blit_region.dstOffsets[0].z))};
2983 context->UpdateAccessState(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, blit_region.dstSubresource, offset, extent, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07002984 }
2985 }
2986}
locke-lunarg36ba2592020-04-03 09:42:04 -06002987
locke-lunarg61870c22020-06-09 14:51:50 -06002988bool SyncValidator::ValidateIndirectBuffer(const AccessContext &context, VkCommandBuffer commandBuffer,
2989 const VkDeviceSize struct_size, const VkBuffer buffer, const VkDeviceSize offset,
2990 const uint32_t drawCount, const uint32_t stride, const char *function) const {
locke-lunargff255f92020-05-13 18:53:52 -06002991 bool skip = false;
2992 if (drawCount == 0) return skip;
2993
2994 const auto *buf_state = Get<BUFFER_STATE>(buffer);
2995 VkDeviceSize size = struct_size;
2996 if (drawCount == 1 || stride == size) {
2997 if (drawCount > 1) size *= drawCount;
2998 ResourceAccessRange range = MakeRange(offset, size);
2999 auto hazard = context.DetectHazard(*buf_state, SYNC_DRAW_INDIRECT_INDIRECT_COMMAND_READ, range);
3000 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06003001 skip |= LogError(buf_state->buffer, string_SyncHazardVUID(hazard.hazard),
3002 "%s: Hazard %s for indirect %s in %s. Prior access %s.", function, string_SyncHazard(hazard.hazard),
3003 report_data->FormatHandle(buffer).c_str(), report_data->FormatHandle(commandBuffer).c_str(),
3004 string_UsageTag(hazard.tag).c_str());
locke-lunargff255f92020-05-13 18:53:52 -06003005 }
3006 } else {
3007 for (uint32_t i = 0; i < drawCount; ++i) {
3008 ResourceAccessRange range = MakeRange(offset + i * stride, size);
3009 auto hazard = context.DetectHazard(*buf_state, SYNC_DRAW_INDIRECT_INDIRECT_COMMAND_READ, range);
3010 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06003011 skip |= LogError(buf_state->buffer, string_SyncHazardVUID(hazard.hazard),
3012 "%s: Hazard %s for indirect %s in %s. Prior access %s.", function,
3013 string_SyncHazard(hazard.hazard), report_data->FormatHandle(buffer).c_str(),
3014 report_data->FormatHandle(commandBuffer).c_str(), string_UsageTag(hazard.tag).c_str());
locke-lunargff255f92020-05-13 18:53:52 -06003015 break;
3016 }
3017 }
3018 }
3019 return skip;
3020}
3021
locke-lunarg61870c22020-06-09 14:51:50 -06003022void SyncValidator::RecordIndirectBuffer(AccessContext &context, const ResourceUsageTag &tag, const VkDeviceSize struct_size,
3023 const VkBuffer buffer, const VkDeviceSize offset, const uint32_t drawCount,
3024 uint32_t stride) {
locke-lunargff255f92020-05-13 18:53:52 -06003025 const auto *buf_state = Get<BUFFER_STATE>(buffer);
3026 VkDeviceSize size = struct_size;
3027 if (drawCount == 1 || stride == size) {
3028 if (drawCount > 1) size *= drawCount;
3029 ResourceAccessRange range = MakeRange(offset, size);
3030 context.UpdateAccessState(*buf_state, SYNC_DRAW_INDIRECT_INDIRECT_COMMAND_READ, range, tag);
3031 } else {
3032 for (uint32_t i = 0; i < drawCount; ++i) {
3033 ResourceAccessRange range = MakeRange(offset + i * stride, size);
3034 context.UpdateAccessState(*buf_state, SYNC_DRAW_INDIRECT_INDIRECT_COMMAND_READ, range, tag);
3035 }
3036 }
3037}
3038
locke-lunarg61870c22020-06-09 14:51:50 -06003039bool SyncValidator::ValidateCountBuffer(const AccessContext &context, VkCommandBuffer commandBuffer, VkBuffer buffer,
3040 VkDeviceSize offset, const char *function) const {
locke-lunargff255f92020-05-13 18:53:52 -06003041 bool skip = false;
3042
3043 const auto *count_buf_state = Get<BUFFER_STATE>(buffer);
3044 ResourceAccessRange range = MakeRange(offset, 4);
3045 auto hazard = context.DetectHazard(*count_buf_state, SYNC_DRAW_INDIRECT_INDIRECT_COMMAND_READ, range);
3046 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06003047 skip |= LogError(count_buf_state->buffer, string_SyncHazardVUID(hazard.hazard),
3048 "%s: Hazard %s for countBuffer %s in %s. Prior access %s.", function, string_SyncHazard(hazard.hazard),
3049 report_data->FormatHandle(buffer).c_str(), report_data->FormatHandle(commandBuffer).c_str(),
3050 string_UsageTag(hazard.tag).c_str());
locke-lunargff255f92020-05-13 18:53:52 -06003051 }
3052 return skip;
3053}
3054
locke-lunarg61870c22020-06-09 14:51:50 -06003055void SyncValidator::RecordCountBuffer(AccessContext &context, const ResourceUsageTag &tag, VkBuffer buffer, VkDeviceSize offset) {
locke-lunargff255f92020-05-13 18:53:52 -06003056 const auto *count_buf_state = Get<BUFFER_STATE>(buffer);
3057 ResourceAccessRange range = MakeRange(offset, 4);
3058 context.UpdateAccessState(*count_buf_state, SYNC_DRAW_INDIRECT_INDIRECT_COMMAND_READ, range, tag);
3059}
3060
locke-lunarg36ba2592020-04-03 09:42:04 -06003061bool SyncValidator::PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) const {
locke-lunargff255f92020-05-13 18:53:52 -06003062 bool skip = false;
locke-lunargff255f92020-05-13 18:53:52 -06003063 const auto *cb_access_context = GetAccessContext(commandBuffer);
3064 assert(cb_access_context);
3065 if (!cb_access_context) return skip;
3066
locke-lunarg61870c22020-06-09 14:51:50 -06003067 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatch");
locke-lunargff255f92020-05-13 18:53:52 -06003068 return skip;
locke-lunarg36ba2592020-04-03 09:42:04 -06003069}
3070
3071void SyncValidator::PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003072 StateTracker::PreCallRecordCmdDispatch(commandBuffer, x, y, z);
locke-lunargff255f92020-05-13 18:53:52 -06003073 auto *cb_access_context = GetAccessContext(commandBuffer);
3074 assert(cb_access_context);
3075 const auto tag = cb_access_context->NextCommandTag(CMD_DISPATCH);
locke-lunargff255f92020-05-13 18:53:52 -06003076
locke-lunarg61870c22020-06-09 14:51:50 -06003077 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_COMPUTE, tag);
locke-lunarg36ba2592020-04-03 09:42:04 -06003078}
locke-lunarge1a67022020-04-29 00:15:36 -06003079
3080bool SyncValidator::PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const {
locke-lunargff255f92020-05-13 18:53:52 -06003081 bool skip = false;
locke-lunargff255f92020-05-13 18:53:52 -06003082 const auto *cb_access_context = GetAccessContext(commandBuffer);
3083 assert(cb_access_context);
3084 if (!cb_access_context) return skip;
3085
3086 const auto *context = cb_access_context->GetCurrentAccessContext();
3087 assert(context);
3088 if (!context) return skip;
3089
locke-lunarg61870c22020-06-09 14:51:50 -06003090 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatchIndirect");
3091 skip |= ValidateIndirectBuffer(*context, commandBuffer, sizeof(VkDispatchIndirectCommand), buffer, offset, 1,
3092 sizeof(VkDispatchIndirectCommand), "vkCmdDispatchIndirect");
locke-lunargff255f92020-05-13 18:53:52 -06003093 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003094}
3095
3096void SyncValidator::PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003097 StateTracker::PreCallRecordCmdDispatchIndirect(commandBuffer, buffer, offset);
locke-lunargff255f92020-05-13 18:53:52 -06003098 auto *cb_access_context = GetAccessContext(commandBuffer);
3099 assert(cb_access_context);
3100 const auto tag = cb_access_context->NextCommandTag(CMD_DISPATCHINDIRECT);
3101 auto *context = cb_access_context->GetCurrentAccessContext();
3102 assert(context);
3103
locke-lunarg61870c22020-06-09 14:51:50 -06003104 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_COMPUTE, tag);
3105 RecordIndirectBuffer(*context, tag, sizeof(VkDispatchIndirectCommand), buffer, offset, 1, sizeof(VkDispatchIndirectCommand));
locke-lunarge1a67022020-04-29 00:15:36 -06003106}
3107
3108bool SyncValidator::PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
3109 uint32_t firstVertex, uint32_t firstInstance) const {
locke-lunarga4d39ea2020-05-22 14:17:29 -06003110 bool skip = false;
locke-lunargff255f92020-05-13 18:53:52 -06003111 const auto *cb_access_context = GetAccessContext(commandBuffer);
3112 assert(cb_access_context);
3113 if (!cb_access_context) return skip;
3114
locke-lunarg61870c22020-06-09 14:51:50 -06003115 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDraw");
3116 skip |= cb_access_context->ValidateDrawVertex(vertexCount, firstVertex, "vkCmdDraw");
3117 skip |= cb_access_context->ValidateDrawSubpassAttachment("vkCmdDraw");
locke-lunarga4d39ea2020-05-22 14:17:29 -06003118 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003119}
3120
3121void SyncValidator::PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
3122 uint32_t firstVertex, uint32_t firstInstance) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003123 StateTracker::PreCallRecordCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
locke-lunargff255f92020-05-13 18:53:52 -06003124 auto *cb_access_context = GetAccessContext(commandBuffer);
3125 assert(cb_access_context);
3126 const auto tag = cb_access_context->NextCommandTag(CMD_DRAW);
locke-lunargff255f92020-05-13 18:53:52 -06003127
locke-lunarg61870c22020-06-09 14:51:50 -06003128 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, tag);
3129 cb_access_context->RecordDrawVertex(vertexCount, firstVertex, tag);
3130 cb_access_context->RecordDrawSubpassAttachment(tag);
locke-lunarge1a67022020-04-29 00:15:36 -06003131}
3132
3133bool SyncValidator::PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
3134 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) const {
locke-lunarga4d39ea2020-05-22 14:17:29 -06003135 bool skip = false;
locke-lunargff255f92020-05-13 18:53:52 -06003136 const auto *cb_access_context = GetAccessContext(commandBuffer);
3137 assert(cb_access_context);
3138 if (!cb_access_context) return skip;
3139
locke-lunarg61870c22020-06-09 14:51:50 -06003140 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexed");
3141 skip |= cb_access_context->ValidateDrawVertexIndex(indexCount, firstIndex, "vkCmdDrawIndexed");
3142 skip |= cb_access_context->ValidateDrawSubpassAttachment("vkCmdDrawIndexed");
locke-lunarga4d39ea2020-05-22 14:17:29 -06003143 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003144}
3145
3146void SyncValidator::PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
3147 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003148 StateTracker::PreCallRecordCmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
locke-lunargff255f92020-05-13 18:53:52 -06003149 auto *cb_access_context = GetAccessContext(commandBuffer);
3150 assert(cb_access_context);
3151 const auto tag = cb_access_context->NextCommandTag(CMD_DRAWINDEXED);
locke-lunargff255f92020-05-13 18:53:52 -06003152
locke-lunarg61870c22020-06-09 14:51:50 -06003153 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, tag);
3154 cb_access_context->RecordDrawVertexIndex(indexCount, firstIndex, tag);
3155 cb_access_context->RecordDrawSubpassAttachment(tag);
locke-lunarge1a67022020-04-29 00:15:36 -06003156}
3157
3158bool SyncValidator::PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3159 uint32_t drawCount, uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003160 bool skip = false;
3161 if (drawCount == 0) return skip;
3162
locke-lunargff255f92020-05-13 18:53:52 -06003163 const auto *cb_access_context = GetAccessContext(commandBuffer);
3164 assert(cb_access_context);
3165 if (!cb_access_context) return skip;
3166
3167 const auto *context = cb_access_context->GetCurrentAccessContext();
3168 assert(context);
3169 if (!context) return skip;
3170
locke-lunarg61870c22020-06-09 14:51:50 -06003171 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndirect");
3172 skip |= cb_access_context->ValidateDrawSubpassAttachment("vkCmdDrawIndirect");
3173 skip |= ValidateIndirectBuffer(*context, commandBuffer, sizeof(VkDrawIndirectCommand), buffer, offset, drawCount, stride,
3174 "vkCmdDrawIndirect");
locke-lunargff255f92020-05-13 18:53:52 -06003175
3176 // TODO: For now, we validate the whole vertex buffer. It might cause some false positive.
3177 // VkDrawIndirectCommand buffer could be changed until SubmitQueue.
3178 // We will validate the vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003179 skip |= cb_access_context->ValidateDrawVertex(UINT32_MAX, 0, "vkCmdDrawIndirect");
locke-lunargff255f92020-05-13 18:53:52 -06003180 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003181}
3182
3183void SyncValidator::PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3184 uint32_t drawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003185 StateTracker::PreCallRecordCmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
locke-lunargff255f92020-05-13 18:53:52 -06003186 if (drawCount == 0) return;
locke-lunargff255f92020-05-13 18:53:52 -06003187 auto *cb_access_context = GetAccessContext(commandBuffer);
3188 assert(cb_access_context);
3189 const auto tag = cb_access_context->NextCommandTag(CMD_DRAWINDIRECT);
3190 auto *context = cb_access_context->GetCurrentAccessContext();
3191 assert(context);
3192
locke-lunarg61870c22020-06-09 14:51:50 -06003193 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, tag);
3194 cb_access_context->RecordDrawSubpassAttachment(tag);
3195 RecordIndirectBuffer(*context, tag, sizeof(VkDrawIndirectCommand), buffer, offset, drawCount, stride);
locke-lunargff255f92020-05-13 18:53:52 -06003196
3197 // TODO: For now, we record the whole vertex buffer. It might cause some false positive.
3198 // VkDrawIndirectCommand buffer could be changed until SubmitQueue.
3199 // We will record the vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003200 cb_access_context->RecordDrawVertex(UINT32_MAX, 0, tag);
locke-lunarge1a67022020-04-29 00:15:36 -06003201}
3202
3203bool SyncValidator::PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3204 uint32_t drawCount, uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003205 bool skip = false;
3206 if (drawCount == 0) return skip;
locke-lunargff255f92020-05-13 18:53:52 -06003207 const auto *cb_access_context = GetAccessContext(commandBuffer);
3208 assert(cb_access_context);
3209 if (!cb_access_context) return skip;
3210
3211 const auto *context = cb_access_context->GetCurrentAccessContext();
3212 assert(context);
3213 if (!context) return skip;
3214
locke-lunarg61870c22020-06-09 14:51:50 -06003215 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexedIndirect");
3216 skip |= cb_access_context->ValidateDrawSubpassAttachment("vkCmdDrawIndexedIndirect");
3217 skip |= ValidateIndirectBuffer(*context, commandBuffer, sizeof(VkDrawIndexedIndirectCommand), buffer, offset, drawCount, stride,
3218 "vkCmdDrawIndexedIndirect");
locke-lunargff255f92020-05-13 18:53:52 -06003219
3220 // TODO: For now, we validate the whole index and vertex buffer. It might cause some false positive.
3221 // VkDrawIndexedIndirectCommand buffer could be changed until SubmitQueue.
3222 // We will validate the index and vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003223 skip |= cb_access_context->ValidateDrawVertexIndex(UINT32_MAX, 0, "vkCmdDrawIndexedIndirect");
locke-lunargff255f92020-05-13 18:53:52 -06003224 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003225}
3226
3227void SyncValidator::PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3228 uint32_t drawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003229 StateTracker::PreCallRecordCmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
locke-lunargff255f92020-05-13 18:53:52 -06003230 auto *cb_access_context = GetAccessContext(commandBuffer);
3231 assert(cb_access_context);
3232 const auto tag = cb_access_context->NextCommandTag(CMD_DRAWINDEXEDINDIRECT);
3233 auto *context = cb_access_context->GetCurrentAccessContext();
3234 assert(context);
3235
locke-lunarg61870c22020-06-09 14:51:50 -06003236 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, tag);
3237 cb_access_context->RecordDrawSubpassAttachment(tag);
3238 RecordIndirectBuffer(*context, tag, sizeof(VkDrawIndexedIndirectCommand), buffer, offset, drawCount, stride);
locke-lunargff255f92020-05-13 18:53:52 -06003239
3240 // TODO: For now, we record the whole index and vertex buffer. It might cause some false positive.
3241 // VkDrawIndexedIndirectCommand buffer could be changed until SubmitQueue.
3242 // We will record the index and vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003243 cb_access_context->RecordDrawVertexIndex(UINT32_MAX, 0, tag);
locke-lunargff255f92020-05-13 18:53:52 -06003244}
3245
3246bool SyncValidator::ValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3247 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
3248 uint32_t stride, const char *function) const {
3249 bool skip = false;
locke-lunargff255f92020-05-13 18:53:52 -06003250 const auto *cb_access_context = GetAccessContext(commandBuffer);
3251 assert(cb_access_context);
3252 if (!cb_access_context) return skip;
3253
3254 const auto *context = cb_access_context->GetCurrentAccessContext();
3255 assert(context);
3256 if (!context) return skip;
3257
locke-lunarg61870c22020-06-09 14:51:50 -06003258 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, function);
3259 skip |= cb_access_context->ValidateDrawSubpassAttachment(function);
3260 skip |= ValidateIndirectBuffer(*context, commandBuffer, sizeof(VkDrawIndirectCommand), buffer, offset, maxDrawCount, stride,
3261 function);
3262 skip |= ValidateCountBuffer(*context, commandBuffer, countBuffer, countBufferOffset, function);
locke-lunargff255f92020-05-13 18:53:52 -06003263
3264 // TODO: For now, we validate the whole vertex buffer. It might cause some false positive.
3265 // VkDrawIndirectCommand buffer could be changed until SubmitQueue.
3266 // We will validate the vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003267 skip |= cb_access_context->ValidateDrawVertex(UINT32_MAX, 0, function);
locke-lunargff255f92020-05-13 18:53:52 -06003268 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003269}
3270
3271bool SyncValidator::PreCallValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3272 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
3273 uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003274 return ValidateCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
3275 "vkCmdDrawIndirectCount");
locke-lunarge1a67022020-04-29 00:15:36 -06003276}
3277
3278void SyncValidator::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3279 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
3280 uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003281 StateTracker::PreCallRecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount,
3282 stride);
locke-lunargff255f92020-05-13 18:53:52 -06003283 auto *cb_access_context = GetAccessContext(commandBuffer);
3284 assert(cb_access_context);
3285 const auto tag = cb_access_context->NextCommandTag(CMD_DRAWINDIRECTCOUNT);
3286 auto *context = cb_access_context->GetCurrentAccessContext();
3287 assert(context);
3288
locke-lunarg61870c22020-06-09 14:51:50 -06003289 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, tag);
3290 cb_access_context->RecordDrawSubpassAttachment(tag);
3291 RecordIndirectBuffer(*context, tag, sizeof(VkDrawIndirectCommand), buffer, offset, 1, stride);
3292 RecordCountBuffer(*context, tag, countBuffer, countBufferOffset);
locke-lunargff255f92020-05-13 18:53:52 -06003293
3294 // TODO: For now, we record the whole vertex buffer. It might cause some false positive.
3295 // VkDrawIndirectCommand buffer could be changed until SubmitQueue.
3296 // We will record the vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003297 cb_access_context->RecordDrawVertex(UINT32_MAX, 0, tag);
locke-lunarge1a67022020-04-29 00:15:36 -06003298}
3299
3300bool SyncValidator::PreCallValidateCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3301 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3302 uint32_t maxDrawCount, uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003303 return ValidateCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
3304 "vkCmdDrawIndirectCountKHR");
locke-lunarge1a67022020-04-29 00:15:36 -06003305}
3306
3307void SyncValidator::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3308 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3309 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003310 StateTracker::PreCallRecordCmdDrawIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount,
3311 stride);
locke-lunargff255f92020-05-13 18:53:52 -06003312 PreCallRecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
locke-lunarge1a67022020-04-29 00:15:36 -06003313}
3314
3315bool SyncValidator::PreCallValidateCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3316 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3317 uint32_t maxDrawCount, uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003318 return ValidateCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
3319 "vkCmdDrawIndirectCountAMD");
locke-lunarge1a67022020-04-29 00:15:36 -06003320}
3321
3322void SyncValidator::PreCallRecordCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3323 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3324 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003325 StateTracker::PreCallRecordCmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount,
3326 stride);
locke-lunargff255f92020-05-13 18:53:52 -06003327 PreCallRecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
3328}
3329
3330bool SyncValidator::ValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3331 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
3332 uint32_t stride, const char *function) const {
3333 bool skip = false;
locke-lunargff255f92020-05-13 18:53:52 -06003334 const auto *cb_access_context = GetAccessContext(commandBuffer);
3335 assert(cb_access_context);
3336 if (!cb_access_context) return skip;
3337
3338 const auto *context = cb_access_context->GetCurrentAccessContext();
3339 assert(context);
3340 if (!context) return skip;
3341
locke-lunarg61870c22020-06-09 14:51:50 -06003342 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, function);
3343 skip |= cb_access_context->ValidateDrawSubpassAttachment(function);
3344 skip |= ValidateIndirectBuffer(*context, commandBuffer, sizeof(VkDrawIndexedIndirectCommand), buffer, offset, maxDrawCount,
3345 stride, function);
3346 skip |= ValidateCountBuffer(*context, commandBuffer, countBuffer, countBufferOffset, function);
locke-lunargff255f92020-05-13 18:53:52 -06003347
3348 // TODO: For now, we validate the whole index and vertex buffer. It might cause some false positive.
3349 // VkDrawIndexedIndirectCommand buffer could be changed until SubmitQueue.
3350 // We will validate the index and vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003351 skip |= cb_access_context->ValidateDrawVertexIndex(UINT32_MAX, 0, function);
locke-lunargff255f92020-05-13 18:53:52 -06003352 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003353}
3354
3355bool SyncValidator::PreCallValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3356 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3357 uint32_t maxDrawCount, uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003358 return ValidateCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
3359 "vkCmdDrawIndexedIndirectCount");
locke-lunarge1a67022020-04-29 00:15:36 -06003360}
3361
3362void SyncValidator::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3363 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3364 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003365 StateTracker::PreCallRecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset,
3366 maxDrawCount, stride);
locke-lunargff255f92020-05-13 18:53:52 -06003367 auto *cb_access_context = GetAccessContext(commandBuffer);
3368 assert(cb_access_context);
3369 const auto tag = cb_access_context->NextCommandTag(CMD_DRAWINDEXEDINDIRECTCOUNT);
3370 auto *context = cb_access_context->GetCurrentAccessContext();
3371 assert(context);
3372
locke-lunarg61870c22020-06-09 14:51:50 -06003373 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, tag);
3374 cb_access_context->RecordDrawSubpassAttachment(tag);
3375 RecordIndirectBuffer(*context, tag, sizeof(VkDrawIndexedIndirectCommand), buffer, offset, 1, stride);
3376 RecordCountBuffer(*context, tag, countBuffer, countBufferOffset);
locke-lunargff255f92020-05-13 18:53:52 -06003377
3378 // TODO: For now, we record the whole index and vertex buffer. It might cause some false positive.
3379 // VkDrawIndexedIndirectCommand buffer could be changed until SubmitQueue.
locke-lunarg61870c22020-06-09 14:51:50 -06003380 // We will update the index and vertex buffer in SubmitQueue in the future.
3381 cb_access_context->RecordDrawVertexIndex(UINT32_MAX, 0, tag);
locke-lunarge1a67022020-04-29 00:15:36 -06003382}
3383
3384bool SyncValidator::PreCallValidateCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
3385 VkDeviceSize offset, VkBuffer countBuffer,
3386 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
3387 uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003388 return ValidateCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
3389 "vkCmdDrawIndexedIndirectCountKHR");
locke-lunarge1a67022020-04-29 00:15:36 -06003390}
3391
3392void SyncValidator::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3393 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3394 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003395 StateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset,
3396 maxDrawCount, stride);
locke-lunarge1a67022020-04-29 00:15:36 -06003397 PreCallRecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
3398}
3399
3400bool SyncValidator::PreCallValidateCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer,
3401 VkDeviceSize offset, VkBuffer countBuffer,
3402 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
3403 uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003404 return ValidateCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
3405 "vkCmdDrawIndexedIndirectCountAMD");
locke-lunarge1a67022020-04-29 00:15:36 -06003406}
3407
3408void SyncValidator::PreCallRecordCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3409 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3410 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003411 StateTracker::PreCallRecordCmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset,
3412 maxDrawCount, stride);
locke-lunarge1a67022020-04-29 00:15:36 -06003413 PreCallRecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
3414}
3415
3416bool SyncValidator::PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
3417 const VkClearColorValue *pColor, uint32_t rangeCount,
3418 const VkImageSubresourceRange *pRanges) const {
3419 bool skip = false;
3420 const auto *cb_access_context = GetAccessContext(commandBuffer);
3421 assert(cb_access_context);
3422 if (!cb_access_context) return skip;
3423
3424 const auto *context = cb_access_context->GetCurrentAccessContext();
3425 assert(context);
3426 if (!context) return skip;
3427
3428 const auto *image_state = Get<IMAGE_STATE>(image);
3429
3430 for (uint32_t index = 0; index < rangeCount; index++) {
3431 const auto &range = pRanges[index];
3432 if (image_state) {
3433 auto hazard =
3434 context->DetectHazard(*image_state, SYNC_TRANSFER_TRANSFER_WRITE, range, {0, 0, 0}, image_state->createInfo.extent);
3435 if (hazard.hazard) {
3436 skip |= LogError(image, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06003437 "vkCmdClearColorImage: Hazard %s for %s, range index %" PRIu32 ". Prior access %s.",
3438 string_SyncHazard(hazard.hazard), report_data->FormatHandle(image).c_str(), index,
3439 string_UsageTag(hazard.tag).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06003440 }
3441 }
3442 }
3443 return skip;
3444}
3445
3446void SyncValidator::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
3447 const VkClearColorValue *pColor, uint32_t rangeCount,
3448 const VkImageSubresourceRange *pRanges) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003449 StateTracker::PreCallRecordCmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
locke-lunarge1a67022020-04-29 00:15:36 -06003450 auto *cb_access_context = GetAccessContext(commandBuffer);
3451 assert(cb_access_context);
3452 const auto tag = cb_access_context->NextCommandTag(CMD_CLEARCOLORIMAGE);
3453 auto *context = cb_access_context->GetCurrentAccessContext();
3454 assert(context);
3455
3456 const auto *image_state = Get<IMAGE_STATE>(image);
3457
3458 for (uint32_t index = 0; index < rangeCount; index++) {
3459 const auto &range = pRanges[index];
3460 if (image_state) {
3461 context->UpdateAccessState(*image_state, SYNC_TRANSFER_TRANSFER_WRITE, range, {0, 0, 0}, image_state->createInfo.extent,
3462 tag);
3463 }
3464 }
3465}
3466
3467bool SyncValidator::PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
3468 VkImageLayout imageLayout,
3469 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
3470 const VkImageSubresourceRange *pRanges) const {
3471 bool skip = false;
3472 const auto *cb_access_context = GetAccessContext(commandBuffer);
3473 assert(cb_access_context);
3474 if (!cb_access_context) return skip;
3475
3476 const auto *context = cb_access_context->GetCurrentAccessContext();
3477 assert(context);
3478 if (!context) return skip;
3479
3480 const auto *image_state = Get<IMAGE_STATE>(image);
3481
3482 for (uint32_t index = 0; index < rangeCount; index++) {
3483 const auto &range = pRanges[index];
3484 if (image_state) {
3485 auto hazard =
3486 context->DetectHazard(*image_state, SYNC_TRANSFER_TRANSFER_WRITE, range, {0, 0, 0}, image_state->createInfo.extent);
3487 if (hazard.hazard) {
3488 skip |= LogError(image, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06003489 "vkCmdClearDepthStencilImage: Hazard %s for %s, range index %" PRIu32 ". Prior access %s.",
3490 string_SyncHazard(hazard.hazard), report_data->FormatHandle(image).c_str(), index,
3491 string_UsageTag(hazard.tag).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06003492 }
3493 }
3494 }
3495 return skip;
3496}
3497
3498void SyncValidator::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
3499 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
3500 const VkImageSubresourceRange *pRanges) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003501 StateTracker::PreCallRecordCmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
locke-lunarge1a67022020-04-29 00:15:36 -06003502 auto *cb_access_context = GetAccessContext(commandBuffer);
3503 assert(cb_access_context);
3504 const auto tag = cb_access_context->NextCommandTag(CMD_CLEARDEPTHSTENCILIMAGE);
3505 auto *context = cb_access_context->GetCurrentAccessContext();
3506 assert(context);
3507
3508 const auto *image_state = Get<IMAGE_STATE>(image);
3509
3510 for (uint32_t index = 0; index < rangeCount; index++) {
3511 const auto &range = pRanges[index];
3512 if (image_state) {
3513 context->UpdateAccessState(*image_state, SYNC_TRANSFER_TRANSFER_WRITE, range, {0, 0, 0}, image_state->createInfo.extent,
3514 tag);
3515 }
3516 }
3517}
3518
3519bool SyncValidator::PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3520 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3521 VkDeviceSize dstOffset, VkDeviceSize stride,
3522 VkQueryResultFlags flags) const {
3523 bool skip = false;
3524 const auto *cb_access_context = GetAccessContext(commandBuffer);
3525 assert(cb_access_context);
3526 if (!cb_access_context) return skip;
3527
3528 const auto *context = cb_access_context->GetCurrentAccessContext();
3529 assert(context);
3530 if (!context) return skip;
3531
3532 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
3533
3534 if (dst_buffer) {
locke-lunargff255f92020-05-13 18:53:52 -06003535 ResourceAccessRange range = MakeRange(dstOffset, stride * queryCount);
locke-lunarge1a67022020-04-29 00:15:36 -06003536 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range);
3537 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06003538 skip |= LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
3539 "vkCmdCopyQueryPoolResults: Hazard %s for dstBuffer %s. Prior access %s.",
3540 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstBuffer).c_str(),
3541 string_UsageTag(hazard.tag).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06003542 }
3543 }
locke-lunargff255f92020-05-13 18:53:52 -06003544
3545 // TODO:Track VkQueryPool
locke-lunarge1a67022020-04-29 00:15:36 -06003546 return skip;
3547}
3548
3549void SyncValidator::PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
3550 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
3551 VkDeviceSize stride, VkQueryResultFlags flags) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003552 StateTracker::PreCallRecordCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset,
3553 stride, flags);
locke-lunarge1a67022020-04-29 00:15:36 -06003554 auto *cb_access_context = GetAccessContext(commandBuffer);
3555 assert(cb_access_context);
locke-lunargff255f92020-05-13 18:53:52 -06003556 const auto tag = cb_access_context->NextCommandTag(CMD_COPYQUERYPOOLRESULTS);
locke-lunarge1a67022020-04-29 00:15:36 -06003557 auto *context = cb_access_context->GetCurrentAccessContext();
3558 assert(context);
3559
3560 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
3561
3562 if (dst_buffer) {
locke-lunargff255f92020-05-13 18:53:52 -06003563 ResourceAccessRange range = MakeRange(dstOffset, stride * queryCount);
locke-lunarge1a67022020-04-29 00:15:36 -06003564 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range, tag);
3565 }
locke-lunargff255f92020-05-13 18:53:52 -06003566
3567 // TODO:Track VkQueryPool
locke-lunarge1a67022020-04-29 00:15:36 -06003568}
3569
3570bool SyncValidator::PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
3571 VkDeviceSize size, uint32_t data) const {
3572 bool skip = false;
3573 const auto *cb_access_context = GetAccessContext(commandBuffer);
3574 assert(cb_access_context);
3575 if (!cb_access_context) return skip;
3576
3577 const auto *context = cb_access_context->GetCurrentAccessContext();
3578 assert(context);
3579 if (!context) return skip;
3580
3581 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
3582
3583 if (dst_buffer) {
3584 ResourceAccessRange range = MakeRange(dstOffset, size);
3585 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range);
3586 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06003587 skip |= LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
3588 "vkCmdFillBuffer: Hazard %s for dstBuffer %s. Prior access %s.", string_SyncHazard(hazard.hazard),
3589 report_data->FormatHandle(dstBuffer).c_str(), string_UsageTag(hazard.tag).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06003590 }
3591 }
3592 return skip;
3593}
3594
3595void SyncValidator::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
3596 VkDeviceSize size, uint32_t data) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003597 StateTracker::PreCallRecordCmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
locke-lunarge1a67022020-04-29 00:15:36 -06003598 auto *cb_access_context = GetAccessContext(commandBuffer);
3599 assert(cb_access_context);
3600 const auto tag = cb_access_context->NextCommandTag(CMD_FILLBUFFER);
3601 auto *context = cb_access_context->GetCurrentAccessContext();
3602 assert(context);
3603
3604 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
3605
3606 if (dst_buffer) {
3607 ResourceAccessRange range = MakeRange(dstOffset, size);
3608 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range, tag);
3609 }
3610}
3611
3612bool SyncValidator::PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
3613 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
3614 const VkImageResolve *pRegions) const {
3615 bool skip = false;
3616 const auto *cb_access_context = GetAccessContext(commandBuffer);
3617 assert(cb_access_context);
3618 if (!cb_access_context) return skip;
3619
3620 const auto *context = cb_access_context->GetCurrentAccessContext();
3621 assert(context);
3622 if (!context) return skip;
3623
3624 const auto *src_image = Get<IMAGE_STATE>(srcImage);
3625 const auto *dst_image = Get<IMAGE_STATE>(dstImage);
3626
3627 for (uint32_t region = 0; region < regionCount; region++) {
3628 const auto &resolve_region = pRegions[region];
3629 if (src_image) {
3630 auto hazard = context->DetectHazard(*src_image, SYNC_TRANSFER_TRANSFER_READ, resolve_region.srcSubresource,
3631 resolve_region.srcOffset, resolve_region.extent);
3632 if (hazard.hazard) {
3633 skip |= LogError(srcImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06003634 "vkCmdResolveImage: Hazard %s for srcImage %s, region %" PRIu32 ". Prior access %s.",
3635 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcImage).c_str(), region,
3636 string_UsageTag(hazard.tag).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06003637 }
3638 }
3639
3640 if (dst_image) {
3641 auto hazard = context->DetectHazard(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, resolve_region.dstSubresource,
3642 resolve_region.dstOffset, resolve_region.extent);
3643 if (hazard.hazard) {
3644 skip |= LogError(dstImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06003645 "vkCmdResolveImage: Hazard %s for dstImage %s, region %" PRIu32 ". Prior access %s.",
3646 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstImage).c_str(), region,
3647 string_UsageTag(hazard.tag).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06003648 }
3649 if (skip) break;
3650 }
3651 }
3652
3653 return skip;
3654}
3655
3656void SyncValidator::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
3657 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
3658 const VkImageResolve *pRegions) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003659 StateTracker::PreCallRecordCmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount,
3660 pRegions);
locke-lunarge1a67022020-04-29 00:15:36 -06003661 auto *cb_access_context = GetAccessContext(commandBuffer);
3662 assert(cb_access_context);
3663 const auto tag = cb_access_context->NextCommandTag(CMD_RESOLVEIMAGE);
3664 auto *context = cb_access_context->GetCurrentAccessContext();
3665 assert(context);
3666
3667 auto *src_image = Get<IMAGE_STATE>(srcImage);
3668 auto *dst_image = Get<IMAGE_STATE>(dstImage);
3669
3670 for (uint32_t region = 0; region < regionCount; region++) {
3671 const auto &resolve_region = pRegions[region];
3672 if (src_image) {
3673 context->UpdateAccessState(*src_image, SYNC_TRANSFER_TRANSFER_READ, resolve_region.srcSubresource,
3674 resolve_region.srcOffset, resolve_region.extent, tag);
3675 }
3676 if (dst_image) {
3677 context->UpdateAccessState(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, resolve_region.dstSubresource,
3678 resolve_region.dstOffset, resolve_region.extent, tag);
3679 }
3680 }
3681}
3682
3683bool SyncValidator::PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
3684 VkDeviceSize dataSize, const void *pData) const {
3685 bool skip = false;
3686 const auto *cb_access_context = GetAccessContext(commandBuffer);
3687 assert(cb_access_context);
3688 if (!cb_access_context) return skip;
3689
3690 const auto *context = cb_access_context->GetCurrentAccessContext();
3691 assert(context);
3692 if (!context) return skip;
3693
3694 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
3695
3696 if (dst_buffer) {
3697 ResourceAccessRange range = MakeRange(dstOffset, dataSize);
3698 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range);
3699 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06003700 skip |= LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
3701 "vkCmdUpdateBuffer: Hazard %s for dstBuffer %s. Prior access %s.", string_SyncHazard(hazard.hazard),
3702 report_data->FormatHandle(dstBuffer).c_str(), string_UsageTag(hazard.tag).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06003703 }
3704 }
3705 return skip;
3706}
3707
3708void SyncValidator::PreCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
3709 VkDeviceSize dataSize, const void *pData) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003710 StateTracker::PreCallRecordCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
locke-lunarge1a67022020-04-29 00:15:36 -06003711 auto *cb_access_context = GetAccessContext(commandBuffer);
3712 assert(cb_access_context);
3713 const auto tag = cb_access_context->NextCommandTag(CMD_UPDATEBUFFER);
3714 auto *context = cb_access_context->GetCurrentAccessContext();
3715 assert(context);
3716
3717 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
3718
3719 if (dst_buffer) {
3720 ResourceAccessRange range = MakeRange(dstOffset, dataSize);
3721 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range, tag);
3722 }
3723}
locke-lunargff255f92020-05-13 18:53:52 -06003724
3725bool SyncValidator::PreCallValidateCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3726 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) const {
3727 bool skip = false;
3728 const auto *cb_access_context = GetAccessContext(commandBuffer);
3729 assert(cb_access_context);
3730 if (!cb_access_context) return skip;
3731
3732 const auto *context = cb_access_context->GetCurrentAccessContext();
3733 assert(context);
3734 if (!context) return skip;
3735
3736 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
3737
3738 if (dst_buffer) {
3739 ResourceAccessRange range = MakeRange(dstOffset, 4);
3740 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range);
3741 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06003742 skip |= LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
3743 "vkCmdWriteBufferMarkerAMD: Hazard %s for dstBuffer %s. Prior access %s.",
3744 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstBuffer).c_str(),
3745 string_UsageTag(hazard.tag).c_str());
locke-lunargff255f92020-05-13 18:53:52 -06003746 }
3747 }
3748 return skip;
3749}
3750
3751void SyncValidator::PreCallRecordCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3752 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003753 StateTracker::PreCallRecordCmdWriteBufferMarkerAMD(commandBuffer, pipelineStage, dstBuffer, dstOffset, marker);
locke-lunargff255f92020-05-13 18:53:52 -06003754 auto *cb_access_context = GetAccessContext(commandBuffer);
3755 assert(cb_access_context);
3756 const auto tag = cb_access_context->NextCommandTag(CMD_WRITEBUFFERMARKERAMD);
3757 auto *context = cb_access_context->GetCurrentAccessContext();
3758 assert(context);
3759
3760 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
3761
3762 if (dst_buffer) {
3763 ResourceAccessRange range = MakeRange(dstOffset, 4);
3764 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range, tag);
3765 }
3766}