blob: a561ca5b7b6f88cff07a5eff019c870d325cf1ed [file] [log] [blame]
locke-lunarg8ec19162020-06-16 18:48:34 -06001/* Copyright (c) 2019-2020 The Khronos Group Inc.
2 * Copyright (c) 2019-2020 Valve Corporation
3 * Copyright (c) 2019-2020 LunarG, Inc.
John Zulauf9cb530d2019-09-30 14:14:10 -06004 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 * Author: John Zulauf <jzulauf@lunarg.com>
18 */
19
20#include <limits>
21#include <vector>
locke-lunarg296a3c92020-03-25 01:04:29 -060022#include <memory>
23#include <bitset>
John Zulauf9cb530d2019-09-30 14:14:10 -060024#include "synchronization_validation.h"
25
26static const char *string_SyncHazardVUID(SyncHazard hazard) {
27 switch (hazard) {
28 case SyncHazard::NONE:
John Zulauf2f952d22020-02-10 11:34:51 -070029 return "SYNC-HAZARD-NONE";
John Zulauf9cb530d2019-09-30 14:14:10 -060030 break;
31 case SyncHazard::READ_AFTER_WRITE:
32 return "SYNC-HAZARD-READ_AFTER_WRITE";
33 break;
34 case SyncHazard::WRITE_AFTER_READ:
35 return "SYNC-HAZARD-WRITE_AFTER_READ";
36 break;
37 case SyncHazard::WRITE_AFTER_WRITE:
38 return "SYNC-HAZARD-WRITE_AFTER_WRITE";
39 break;
John Zulauf2f952d22020-02-10 11:34:51 -070040 case SyncHazard::READ_RACING_WRITE:
41 return "SYNC-HAZARD-READ-RACING-WRITE";
42 break;
43 case SyncHazard::WRITE_RACING_WRITE:
44 return "SYNC-HAZARD-WRITE-RACING-WRITE";
45 break;
46 case SyncHazard::WRITE_RACING_READ:
47 return "SYNC-HAZARD-WRITE-RACING-READ";
48 break;
John Zulauf9cb530d2019-09-30 14:14:10 -060049 default:
50 assert(0);
51 }
52 return "SYNC-HAZARD-INVALID";
53}
54
55static const char *string_SyncHazard(SyncHazard hazard) {
56 switch (hazard) {
57 case SyncHazard::NONE:
58 return "NONR";
59 break;
60 case SyncHazard::READ_AFTER_WRITE:
61 return "READ_AFTER_WRITE";
62 break;
63 case SyncHazard::WRITE_AFTER_READ:
64 return "WRITE_AFTER_READ";
65 break;
66 case SyncHazard::WRITE_AFTER_WRITE:
67 return "WRITE_AFTER_WRITE";
68 break;
John Zulauf2f952d22020-02-10 11:34:51 -070069 case SyncHazard::READ_RACING_WRITE:
70 return "READ_RACING_WRITE";
71 break;
72 case SyncHazard::WRITE_RACING_WRITE:
73 return "WRITE_RACING_WRITE";
74 break;
75 case SyncHazard::WRITE_RACING_READ:
76 return "WRITE_RACING_READ";
77 break;
John Zulauf9cb530d2019-09-30 14:14:10 -060078 default:
79 assert(0);
80 }
81 return "INVALID HAZARD";
82}
83
John Zulauf1dae9192020-06-16 15:46:44 -060084static std::string string_UsageTag(const ResourceUsageTag &tag) {
85 std::stringstream out;
John Zulaufcc6fecb2020-06-17 15:24:54 -060086 out << "(command " << CommandTypeString(tag.command) << ", seq #" << (tag.index & 0xFFFFFFFF) << ", reset #"
87 << (tag.index >> 32) << ")";
John Zulauf1dae9192020-06-16 15:46:44 -060088 return out.str();
89}
90
John Zulaufb027cdb2020-05-21 14:25:22 -060091static constexpr VkPipelineStageFlags kColorAttachmentExecScope = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
92static constexpr SyncStageAccessFlags kColorAttachmentAccessScope =
93 SyncStageAccessFlagBits::SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ_BIT |
94 SyncStageAccessFlagBits::SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT |
95 SyncStageAccessFlagBits::SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE_BIT;
96static constexpr VkPipelineStageFlags kDepthStencilAttachmentExecScope =
97 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
98static constexpr SyncStageAccessFlags kDepthStencilAttachmentAccessScope =
99 SyncStageAccessFlagBits::SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
100 SyncStageAccessFlagBits::SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
101 SyncStageAccessFlagBits::SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
102 SyncStageAccessFlagBits::SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
103
104static constexpr SyncOrderingBarrier kColorAttachmentRasterOrder = {kColorAttachmentExecScope, kColorAttachmentAccessScope};
105static constexpr SyncOrderingBarrier kDepthStencilAttachmentRasterOrder = {kDepthStencilAttachmentExecScope,
106 kDepthStencilAttachmentAccessScope};
107static constexpr SyncOrderingBarrier kAttachmentRasterOrder = {kDepthStencilAttachmentExecScope | kColorAttachmentExecScope,
108 kDepthStencilAttachmentAccessScope | kColorAttachmentAccessScope};
John Zulauf7635de32020-05-29 17:14:15 -0600109// Sometimes we have an internal access conflict, and we using the kCurrentCommandTag to set and detect in temporary/proxy contexts
John Zulaufcc6fecb2020-06-17 15:24:54 -0600110static const ResourceUsageTag kCurrentCommandTag(ResourceUsageTag::kMaxIndex, CMD_NONE);
John Zulaufb027cdb2020-05-21 14:25:22 -0600111
locke-lunarg3c038002020-04-30 23:08:08 -0600112inline VkDeviceSize GetRealWholeSize(VkDeviceSize offset, VkDeviceSize size, VkDeviceSize whole_size) {
113 if (size == VK_WHOLE_SIZE) {
114 return (whole_size - offset);
115 }
116 return size;
117}
118
John Zulauf16adfc92020-04-08 10:28:33 -0600119template <typename T>
John Zulauf355e49b2020-04-24 15:11:15 -0600120static ResourceAccessRange MakeRange(const T &has_offset_and_size) {
John Zulauf16adfc92020-04-08 10:28:33 -0600121 return ResourceAccessRange(has_offset_and_size.offset, (has_offset_and_size.offset + has_offset_and_size.size));
122}
123
John Zulauf355e49b2020-04-24 15:11:15 -0600124static ResourceAccessRange MakeRange(VkDeviceSize start, VkDeviceSize size) { return ResourceAccessRange(start, (start + size)); }
John Zulauf16adfc92020-04-08 10:28:33 -0600125
John Zulauf0cb5be22020-01-23 12:18:22 -0700126// Expand the pipeline stage without regard to whether the are valid w.r.t. queue or extension
127VkPipelineStageFlags ExpandPipelineStages(VkQueueFlags queue_flags, VkPipelineStageFlags stage_mask) {
128 VkPipelineStageFlags expanded = stage_mask;
129 if (VK_PIPELINE_STAGE_ALL_COMMANDS_BIT & stage_mask) {
130 expanded = expanded & ~VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
131 for (const auto &all_commands : syncAllCommandStagesByQueueFlags) {
132 if (all_commands.first & queue_flags) {
133 expanded |= all_commands.second;
134 }
135 }
136 }
137 if (VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT & stage_mask) {
138 expanded = expanded & ~VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT;
139 expanded |= syncAllCommandStagesByQueueFlags.at(VK_QUEUE_GRAPHICS_BIT) & ~VK_PIPELINE_STAGE_HOST_BIT;
140 }
141 return expanded;
142}
143
John Zulauf36bcf6a2020-02-03 15:12:52 -0700144VkPipelineStageFlags RelatedPipelineStages(VkPipelineStageFlags stage_mask,
145 std::map<VkPipelineStageFlagBits, VkPipelineStageFlags> &map) {
146 VkPipelineStageFlags unscanned = stage_mask;
147 VkPipelineStageFlags related = 0;
148 for (const auto entry : map) {
149 const auto stage = entry.first;
150 if (stage & unscanned) {
151 related = related | entry.second;
152 unscanned = unscanned & ~stage;
153 if (!unscanned) break;
154 }
155 }
156 return related;
157}
158
159VkPipelineStageFlags WithEarlierPipelineStages(VkPipelineStageFlags stage_mask) {
160 return stage_mask | RelatedPipelineStages(stage_mask, syncLogicallyEarlierStages);
161}
162
163VkPipelineStageFlags WithLaterPipelineStages(VkPipelineStageFlags stage_mask) {
164 return stage_mask | RelatedPipelineStages(stage_mask, syncLogicallyLaterStages);
165}
166
John Zulauf5c5e88d2019-12-26 11:22:02 -0700167static const ResourceAccessRange full_range(std::numeric_limits<VkDeviceSize>::min(), std::numeric_limits<VkDeviceSize>::max());
John Zulauf5c5e88d2019-12-26 11:22:02 -0700168
locke-lunargff255f92020-05-13 18:53:52 -0600169void GetBufferRange(VkDeviceSize &range_start, VkDeviceSize &range_size, VkDeviceSize offset, VkDeviceSize buf_whole_size,
170 uint32_t first_index, uint32_t count, VkDeviceSize stride) {
171 range_start = offset + first_index * stride;
172 range_size = 0;
173 if (count == UINT32_MAX) {
174 range_size = buf_whole_size - range_start;
175 } else {
176 range_size = count * stride;
177 }
178}
179
locke-lunarg654e3692020-06-04 17:19:15 -0600180SyncStageAccessIndex GetSyncStageAccessIndexsByDescriptorSet(VkDescriptorType descriptor_type, const interface_var &descriptor_data,
181 VkShaderStageFlagBits stage_flag) {
182 if (descriptor_type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) {
183 assert(stage_flag == VK_SHADER_STAGE_FRAGMENT_BIT);
184 return SYNC_FRAGMENT_SHADER_INPUT_ATTACHMENT_READ;
185 }
186 auto stage_access = syncStageAccessMaskByShaderStage.find(stage_flag);
187 if (stage_access == syncStageAccessMaskByShaderStage.end()) {
188 assert(0);
189 }
190 if (descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER || descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) {
191 return stage_access->second.uniform_read;
192 }
193
194 // If the desriptorSet is writable, we don't need to care SHADER_READ. SHADER_WRITE is enough.
195 // Because if write hazard happens, read hazard might or might not happen.
196 // But if write hazard doesn't happen, read hazard is impossible to happen.
197 if (descriptor_data.is_writable) {
198 return stage_access->second.shader_write;
199 }
200 return stage_access->second.shader_read;
201}
202
locke-lunarg37047832020-06-12 13:44:45 -0600203bool IsImageLayoutDepthWritable(VkImageLayout image_layout) {
204 return (image_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ||
205 image_layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL ||
206 image_layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL)
207 ? true
208 : false;
209}
210
211bool IsImageLayoutStencilWritable(VkImageLayout image_layout) {
212 return (image_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ||
213 image_layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL ||
214 image_layout == VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL)
215 ? true
216 : false;
217}
218
John Zulauf355e49b2020-04-24 15:11:15 -0600219// Class AccessContext stores the state of accesses specific to a Command, Subpass, or Queue
220const std::array<AccessContext::AddressType, AccessContext::kAddressTypeCount> AccessContext::kAddressTypes = {
221 AccessContext::AddressType::kLinearAddress, AccessContext::AddressType::kIdealizedAddress};
222
John Zulauf7635de32020-05-29 17:14:15 -0600223// Tranverse the attachment resolves for this a specific subpass, and do action() to them.
224// Used by both validation and record operations
225//
226// The signature for Action() reflect the needs of both uses.
227template <typename Action>
228void ResolveOperation(Action &action, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
229 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass) {
230 VkExtent3D extent = CastTo3D(render_area.extent);
231 VkOffset3D offset = CastTo3D(render_area.offset);
232 const auto &rp_ci = rp_state.createInfo;
233 const auto *attachment_ci = rp_ci.pAttachments;
234 const auto &subpass_ci = rp_ci.pSubpasses[subpass];
235
236 // Color resolves -- require an inuse color attachment and a matching inuse resolve attachment
237 const auto *color_attachments = subpass_ci.pColorAttachments;
238 const auto *color_resolve = subpass_ci.pResolveAttachments;
239 if (color_resolve && color_attachments) {
240 for (uint32_t i = 0; i < subpass_ci.colorAttachmentCount; i++) {
241 const auto &color_attach = color_attachments[i].attachment;
242 const auto &resolve_attach = subpass_ci.pResolveAttachments[i].attachment;
243 if ((color_attach != VK_ATTACHMENT_UNUSED) && (resolve_attach != VK_ATTACHMENT_UNUSED)) {
244 action("color", "resolve read", color_attach, resolve_attach, attachment_views[color_attach],
245 SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ, kColorAttachmentRasterOrder, offset, extent, 0);
246 action("color", "resolve write", color_attach, resolve_attach, attachment_views[resolve_attach],
247 SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, kColorAttachmentRasterOrder, offset, extent, 0);
248 }
249 }
250 }
251
252 // Depth stencil resolve only if the extension is present
253 const auto ds_resolve = lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolve>(subpass_ci.pNext);
254 if (ds_resolve && ds_resolve->pDepthStencilResolveAttachment &&
255 (ds_resolve->pDepthStencilResolveAttachment->attachment != VK_ATTACHMENT_UNUSED) && subpass_ci.pDepthStencilAttachment &&
256 (subpass_ci.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED)) {
257 const auto src_at = subpass_ci.pDepthStencilAttachment->attachment;
258 const auto src_ci = attachment_ci[src_at];
259 // The formats are required to match so we can pick either
260 const bool resolve_depth = (ds_resolve->depthResolveMode != VK_RESOLVE_MODE_NONE) && FormatHasDepth(src_ci.format);
261 const bool resolve_stencil = (ds_resolve->stencilResolveMode != VK_RESOLVE_MODE_NONE) && FormatHasStencil(src_ci.format);
262 const auto dst_at = ds_resolve->pDepthStencilResolveAttachment->attachment;
263 VkImageAspectFlags aspect_mask = 0u;
264
265 // Figure out which aspects are actually touched during resolve operations
266 const char *aspect_string = nullptr;
267 if (resolve_depth && resolve_stencil) {
268 // Validate all aspects together
269 aspect_mask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
270 aspect_string = "depth/stencil";
271 } else if (resolve_depth) {
272 // Validate depth only
273 aspect_mask = VK_IMAGE_ASPECT_DEPTH_BIT;
274 aspect_string = "depth";
275 } else if (resolve_stencil) {
276 // Validate all stencil only
277 aspect_mask = VK_IMAGE_ASPECT_STENCIL_BIT;
278 aspect_string = "stencil";
279 }
280
281 if (aspect_mask) {
282 action(aspect_string, "resolve read", src_at, dst_at, attachment_views[src_at],
283 SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ, kDepthStencilAttachmentRasterOrder, offset, extent,
284 aspect_mask);
285 action(aspect_string, "resolve write", src_at, dst_at, attachment_views[dst_at],
286 SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, kAttachmentRasterOrder, offset, extent, aspect_mask);
287 }
288 }
289}
290
291// Action for validating resolve operations
292class ValidateResolveAction {
293 public:
294 ValidateResolveAction(VkRenderPass render_pass, uint32_t subpass, const AccessContext &context, const SyncValidator &sync_state,
295 const char *func_name)
296 : render_pass_(render_pass),
297 subpass_(subpass),
298 context_(context),
299 sync_state_(sync_state),
300 func_name_(func_name),
301 skip_(false) {}
302 void operator()(const char *aspect_name, const char *attachment_name, uint32_t src_at, uint32_t dst_at,
303 const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const SyncOrderingBarrier &ordering,
304 const VkOffset3D &offset, const VkExtent3D &extent, VkImageAspectFlags aspect_mask) {
305 HazardResult hazard;
306 hazard = context_.DetectHazard(view, current_usage, ordering, offset, extent, aspect_mask);
307 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -0600308 skip_ |= sync_state_.LogError(render_pass_, string_SyncHazardVUID(hazard.hazard),
309 "%s: Hazard %s in subpass %" PRIu32 "during %s %s, from attachment %" PRIu32
310 " to resolve attachment %" PRIu32 ". Prior access %s.",
311 func_name_, string_SyncHazard(hazard.hazard), subpass_, aspect_name, attachment_name,
312 src_at, dst_at, string_UsageTag(hazard.tag).c_str());
John Zulauf7635de32020-05-29 17:14:15 -0600313 }
314 }
315 // Providing a mechanism for the constructing caller to get the result of the validation
316 bool GetSkip() const { return skip_; }
317
318 private:
319 VkRenderPass render_pass_;
320 const uint32_t subpass_;
321 const AccessContext &context_;
322 const SyncValidator &sync_state_;
323 const char *func_name_;
324 bool skip_;
325};
326
327// Update action for resolve operations
328class UpdateStateResolveAction {
329 public:
330 UpdateStateResolveAction(AccessContext &context, const ResourceUsageTag &tag) : context_(context), tag_(tag) {}
331 void operator()(const char *aspect_name, const char *attachment_name, uint32_t src_at, uint32_t dst_at,
332 const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const SyncOrderingBarrier &ordering,
333 const VkOffset3D &offset, const VkExtent3D &extent, VkImageAspectFlags aspect_mask) {
334 // Ignores validation only arguments...
335 context_.UpdateAccessState(view, current_usage, offset, extent, aspect_mask, tag_);
336 }
337
338 private:
339 AccessContext &context_;
340 const ResourceUsageTag &tag_;
341};
342
John Zulauf540266b2020-04-06 18:54:53 -0600343AccessContext::AccessContext(uint32_t subpass, VkQueueFlags queue_flags,
344 const std::vector<SubpassDependencyGraphNode> &dependencies,
345 const std::vector<AccessContext> &contexts, AccessContext *external_context) {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600346 Reset();
347 const auto &subpass_dep = dependencies[subpass];
348 prev_.reserve(subpass_dep.prev.size());
John Zulauf355e49b2020-04-24 15:11:15 -0600349 prev_by_subpass_.resize(subpass, nullptr); // Can't be more prevs than the subpass we're on
John Zulauf3d84f1b2020-03-09 13:33:25 -0600350 for (const auto &prev_dep : subpass_dep.prev) {
351 assert(prev_dep.dependency);
352 const auto dep = *prev_dep.dependency;
John Zulauf540266b2020-04-06 18:54:53 -0600353 prev_.emplace_back(const_cast<AccessContext *>(&contexts[dep.srcSubpass]), queue_flags, dep);
John Zulauf355e49b2020-04-24 15:11:15 -0600354 prev_by_subpass_[dep.srcSubpass] = &prev_.back();
John Zulauf5c5e88d2019-12-26 11:22:02 -0700355 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600356
357 async_.reserve(subpass_dep.async.size());
358 for (const auto async_subpass : subpass_dep.async) {
John Zulauf540266b2020-04-06 18:54:53 -0600359 async_.emplace_back(const_cast<AccessContext *>(&contexts[async_subpass]));
John Zulauf3d84f1b2020-03-09 13:33:25 -0600360 }
John Zulaufe5da6e52020-03-18 15:32:18 -0600361 if (subpass_dep.barrier_from_external) {
362 src_external_ = TrackBack(external_context, queue_flags, *subpass_dep.barrier_from_external);
363 } else {
364 src_external_ = TrackBack();
365 }
366 if (subpass_dep.barrier_to_external) {
367 dst_external_ = TrackBack(this, queue_flags, *subpass_dep.barrier_to_external);
368 } else {
369 dst_external_ = TrackBack();
John Zulauf3d84f1b2020-03-09 13:33:25 -0600370 }
John Zulauf5c5e88d2019-12-26 11:22:02 -0700371}
372
John Zulauf5f13a792020-03-10 07:31:21 -0600373template <typename Detector>
John Zulauf16adfc92020-04-08 10:28:33 -0600374HazardResult AccessContext::DetectPreviousHazard(AddressType type, const Detector &detector,
John Zulauf540266b2020-04-06 18:54:53 -0600375 const ResourceAccessRange &range) const {
John Zulauf5f13a792020-03-10 07:31:21 -0600376 ResourceAccessRangeMap descent_map;
John Zulauf69133422020-05-20 14:55:53 -0600377 ResolvePreviousAccess(type, range, &descent_map, nullptr);
John Zulauf5f13a792020-03-10 07:31:21 -0600378
379 HazardResult hazard;
380 for (auto prev = descent_map.begin(); prev != descent_map.end() && !hazard.hazard; ++prev) {
381 hazard = detector.Detect(prev);
382 }
383 return hazard;
384}
385
John Zulauf3d84f1b2020-03-09 13:33:25 -0600386// A recursive range walker for hazard detection, first for the current context and the (DetectHazardRecur) to walk
387// the DAG of the contexts (for example subpasses)
388template <typename Detector>
John Zulauf355e49b2020-04-24 15:11:15 -0600389HazardResult AccessContext::DetectHazard(AddressType type, const Detector &detector, const ResourceAccessRange &range,
390 DetectOptions options) const {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600391 HazardResult hazard;
John Zulauf5f13a792020-03-10 07:31:21 -0600392
John Zulauf355e49b2020-04-24 15:11:15 -0600393 if (static_cast<uint32_t>(options) | DetectOptions::kDetectAsync) {
394 // Async checks don't require recursive lookups, as the async lists are exhaustive for the top-level context
395 // so we'll check these first
396 for (const auto &async_context : async_) {
397 hazard = async_context->DetectAsyncHazard(type, detector, range);
398 if (hazard.hazard) return hazard;
399 }
John Zulauf5f13a792020-03-10 07:31:21 -0600400 }
401
John Zulauf69133422020-05-20 14:55:53 -0600402 const bool detect_prev = (static_cast<uint32_t>(options) | DetectOptions::kDetectPrevious) != 0;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600403
John Zulauf69133422020-05-20 14:55:53 -0600404 const auto &accesses = GetAccessStateMap(type);
405 const auto from = accesses.lower_bound(range);
406 const auto to = accesses.upper_bound(range);
407 ResourceAccessRange gap = {range.begin, range.begin};
John Zulauf5f13a792020-03-10 07:31:21 -0600408
John Zulauf69133422020-05-20 14:55:53 -0600409 for (auto pos = from; pos != to; ++pos) {
410 // Cover any leading gap, or gap between entries
411 if (detect_prev) {
412 // TODO: After profiling we may want to change the descent logic such that we don't recur per gap...
413 // Cover any leading gap, or gap between entries
414 gap.end = pos->first.begin; // We know this begin is < range.end
John Zulauf355e49b2020-04-24 15:11:15 -0600415 if (gap.non_empty()) {
John Zulauf69133422020-05-20 14:55:53 -0600416 // Recur on all gaps
John Zulauf16adfc92020-04-08 10:28:33 -0600417 hazard = DetectPreviousHazard(type, detector, gap);
John Zulauf5f13a792020-03-10 07:31:21 -0600418 if (hazard.hazard) return hazard;
419 }
John Zulauf69133422020-05-20 14:55:53 -0600420 // Set up for the next gap. If pos..end is >= range.end, loop will exit, and trailing gap will be empty
421 gap.begin = pos->first.end;
422 }
423
424 hazard = detector.Detect(pos);
425 if (hazard.hazard) return hazard;
426 }
427
428 if (detect_prev) {
429 // Detect in the trailing empty as needed
430 gap.end = range.end;
431 if (gap.non_empty()) {
432 hazard = DetectPreviousHazard(type, detector, gap);
John Zulauf16adfc92020-04-08 10:28:33 -0600433 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600434 }
435
436 return hazard;
437}
438
439// A non recursive range walker for the asynchronous contexts (those we have no barriers with)
440template <typename Detector>
John Zulauf355e49b2020-04-24 15:11:15 -0600441HazardResult AccessContext::DetectAsyncHazard(AddressType type, const Detector &detector, const ResourceAccessRange &range) const {
John Zulauf16adfc92020-04-08 10:28:33 -0600442 auto &accesses = GetAccessStateMap(type);
443 const auto from = accesses.lower_bound(range);
444 const auto to = accesses.upper_bound(range);
445
John Zulauf3d84f1b2020-03-09 13:33:25 -0600446 HazardResult hazard;
John Zulauf16adfc92020-04-08 10:28:33 -0600447 for (auto pos = from; pos != to && !hazard.hazard; ++pos) {
448 hazard = detector.DetectAsync(pos);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600449 }
John Zulauf16adfc92020-04-08 10:28:33 -0600450
John Zulauf3d84f1b2020-03-09 13:33:25 -0600451 return hazard;
452}
453
John Zulauf355e49b2020-04-24 15:11:15 -0600454// Returns the last resolved entry
455static void ResolveMapToEntry(ResourceAccessRangeMap *dest, ResourceAccessRangeMap::iterator entry,
456 ResourceAccessRangeMap::const_iterator first, ResourceAccessRangeMap::const_iterator last,
457 const SyncBarrier *barrier) {
458 auto at = entry;
459 for (auto pos = first; pos != last; ++pos) {
460 // Every member of the input iterator range must fit within the remaining portion of entry
461 assert(at->first.includes(pos->first));
462 assert(at != dest->end());
463 // Trim up at to the same size as the entry to resolve
464 at = sparse_container::split(at, *dest, pos->first);
465 auto access = pos->second;
466 if (barrier) {
467 access.ApplyBarrier(*barrier);
468 }
469 at->second.Resolve(access);
470 ++at; // Go to the remaining unused section of entry
471 }
472}
473
474void AccessContext::ResolveAccessRange(AddressType type, const ResourceAccessRange &range, const SyncBarrier *barrier,
475 ResourceAccessRangeMap *resolve_map, const ResourceAccessState *infill_state,
476 bool recur_to_infill) const {
John Zulauf3bcab5e2020-06-19 14:42:32 -0600477 if (!range.non_empty()) return;
478
John Zulauf355e49b2020-04-24 15:11:15 -0600479 ResourceRangeMergeIterator current(*resolve_map, GetAccessStateMap(type), range.begin);
480 while (current->range.non_empty() && range.includes(current->range.begin)) {
John Zulauf3bcab5e2020-06-19 14:42:32 -0600481 const auto current_range = current->range & range;
John Zulauf16adfc92020-04-08 10:28:33 -0600482 if (current->pos_B->valid) {
483 const auto &src_pos = current->pos_B->lower_bound;
John Zulauf355e49b2020-04-24 15:11:15 -0600484 auto access = src_pos->second;
485 if (barrier) {
486 access.ApplyBarrier(*barrier);
487 }
John Zulauf16adfc92020-04-08 10:28:33 -0600488 if (current->pos_A->valid) {
John Zulauf3bcab5e2020-06-19 14:42:32 -0600489 const auto trimmed = sparse_container::split(current->pos_A->lower_bound, *resolve_map, current_range);
490 trimmed->second.Resolve(access);
491 current.invalidate_A(trimmed);
John Zulauf5f13a792020-03-10 07:31:21 -0600492 } else {
John Zulauf3bcab5e2020-06-19 14:42:32 -0600493 auto inserted = resolve_map->insert(current->pos_A->lower_bound, std::make_pair(current_range, access));
John Zulauf355e49b2020-04-24 15:11:15 -0600494 current.invalidate_A(inserted); // Update the parallel iterator to point at the insert segment
John Zulauf5f13a792020-03-10 07:31:21 -0600495 }
John Zulauf16adfc92020-04-08 10:28:33 -0600496 } else {
497 // we have to descend to fill this gap
498 if (recur_to_infill) {
John Zulauf355e49b2020-04-24 15:11:15 -0600499 if (current->pos_A->valid) {
500 // Dest is valid, so we need to accumulate along the DAG and then resolve... in an N-to-1 resolve operation
501 ResourceAccessRangeMap gap_map;
John Zulauf3bcab5e2020-06-19 14:42:32 -0600502 ResolvePreviousAccess(type, current_range, &gap_map, infill_state);
John Zulauf355e49b2020-04-24 15:11:15 -0600503 ResolveMapToEntry(resolve_map, current->pos_A->lower_bound, gap_map.begin(), gap_map.end(), barrier);
504 } else {
John Zulauf3bcab5e2020-06-19 14:42:32 -0600505 // There isn't anything in dest in current)range, so we can accumulate directly into it.
506 ResolvePreviousAccess(type, current_range, resolve_map, infill_state);
John Zulauf355e49b2020-04-24 15:11:15 -0600507 if (barrier) {
508 // Need to apply the barrier to the accesses we accumulated, noting that we haven't updated current
John Zulauf3bcab5e2020-06-19 14:42:32 -0600509 for (auto pos = resolve_map->lower_bound(current_range); pos != current->pos_A->lower_bound; ++pos) {
John Zulauf355e49b2020-04-24 15:11:15 -0600510 pos->second.ApplyBarrier(*barrier);
511 }
512 }
513 }
514 // Given that there could be gaps we need to seek carefully to not repeatedly search the same gaps in the next
515 // iterator of the outer while.
516
517 // Set the parallel iterator to the end of this range s.t. ++ will move us to the next range whether or
518 // not the end of the range is a gap. For the seek to work, first we need to warn the parallel iterator
519 // we stepped on the dest map
John Zulauf3bcab5e2020-06-19 14:42:32 -0600520 const auto seek_to = current_range.end - 1; // The subtraction is safe as range can't be empty (loop condition)
John Zulauf355e49b2020-04-24 15:11:15 -0600521 current.invalidate_A(); // Changes current->range
522 current.seek(seek_to);
523 } else if (!current->pos_A->valid && infill_state) {
524 // If we didn't find anything in the current range, and we aren't reccuring... we infill if required
525 auto inserted = resolve_map->insert(current->pos_A->lower_bound, std::make_pair(current->range, *infill_state));
526 current.invalidate_A(inserted); // Update the parallel iterator to point at the correct segment after insert
John Zulauf16adfc92020-04-08 10:28:33 -0600527 }
John Zulauf5f13a792020-03-10 07:31:21 -0600528 }
John Zulauf16adfc92020-04-08 10:28:33 -0600529 ++current;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600530 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600531}
532
John Zulauf355e49b2020-04-24 15:11:15 -0600533void AccessContext::ResolvePreviousAccess(AddressType type, const ResourceAccessRange &range, ResourceAccessRangeMap *descent_map,
534 const ResourceAccessState *infill_state) const {
John Zulaufe5da6e52020-03-18 15:32:18 -0600535 if ((prev_.size() == 0) && (src_external_.context == nullptr)) {
John Zulauf5f13a792020-03-10 07:31:21 -0600536 if (range.non_empty() && infill_state) {
537 descent_map->insert(std::make_pair(range, *infill_state));
538 }
539 } else {
540 // Look for something to fill the gap further along.
541 for (const auto &prev_dep : prev_) {
John Zulauf355e49b2020-04-24 15:11:15 -0600542 prev_dep.context->ResolveAccessRange(type, range, &prev_dep.barrier, descent_map, infill_state);
John Zulauf5f13a792020-03-10 07:31:21 -0600543 }
544
John Zulaufe5da6e52020-03-18 15:32:18 -0600545 if (src_external_.context) {
John Zulauf355e49b2020-04-24 15:11:15 -0600546 src_external_.context->ResolveAccessRange(type, range, &src_external_.barrier, descent_map, infill_state);
John Zulauf5f13a792020-03-10 07:31:21 -0600547 }
548 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600549}
550
John Zulauf16adfc92020-04-08 10:28:33 -0600551AccessContext::AddressType AccessContext::ImageAddressType(const IMAGE_STATE &image) {
locke-lunarg3f6978b2020-04-16 16:51:35 -0600552 return (image.fragment_encoder->IsLinearImage()) ? AddressType::kLinearAddress : AddressType::kIdealizedAddress;
John Zulauf16adfc92020-04-08 10:28:33 -0600553}
554
555VkDeviceSize AccessContext::ResourceBaseAddress(const BINDABLE &bindable) {
556 return bindable.binding.offset + bindable.binding.mem_state->fake_base_address;
557}
558
John Zulauf355e49b2020-04-24 15:11:15 -0600559static bool SimpleBinding(const BINDABLE &bindable) { return !bindable.sparse && bindable.binding.mem_state; }
John Zulauf16adfc92020-04-08 10:28:33 -0600560
John Zulauf1507ee42020-05-18 11:33:09 -0600561static SyncStageAccessIndex ColorLoadUsage(VkAttachmentLoadOp load_op) {
562 const auto stage_access = (load_op == VK_ATTACHMENT_LOAD_OP_LOAD) ? SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ
563 : SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE;
564 return stage_access;
565}
566static SyncStageAccessIndex DepthStencilLoadUsage(VkAttachmentLoadOp load_op) {
567 const auto stage_access = (load_op == VK_ATTACHMENT_LOAD_OP_LOAD) ? SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_READ
568 : SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE;
569 return stage_access;
570}
571
John Zulauf7635de32020-05-29 17:14:15 -0600572// Caller must manage returned pointer
573static AccessContext *CreateStoreResolveProxyContext(const AccessContext &context, const RENDER_PASS_STATE &rp_state,
574 uint32_t subpass, const VkRect2D &render_area,
575 std::vector<const IMAGE_VIEW_STATE *> attachment_views) {
576 auto *proxy = new AccessContext(context);
577 proxy->UpdateAttachmentResolveAccess(rp_state, render_area, attachment_views, subpass, kCurrentCommandTag);
John Zulaufaff20662020-06-01 14:07:58 -0600578 proxy->UpdateAttachmentStoreAccess(rp_state, render_area, attachment_views, subpass, kCurrentCommandTag);
John Zulauf7635de32020-05-29 17:14:15 -0600579 return proxy;
580}
581
John Zulauf540266b2020-04-06 18:54:53 -0600582void AccessContext::ResolvePreviousAccess(const IMAGE_STATE &image_state, const VkImageSubresourceRange &subresource_range_arg,
John Zulauf355e49b2020-04-24 15:11:15 -0600583 AddressType address_type, ResourceAccessRangeMap *descent_map,
584 const ResourceAccessState *infill_state) const {
John Zulauf16adfc92020-04-08 10:28:33 -0600585 if (!SimpleBinding(image_state)) return;
586
John Zulauf62f10592020-04-03 12:20:02 -0600587 auto subresource_range = NormalizeSubresourceRange(image_state.createInfo, subresource_range_arg);
locke-lunargae26eac2020-04-16 15:29:05 -0600588 subresource_adapter::ImageRangeGenerator range_gen(*image_state.fragment_encoder.get(), subresource_range, {0, 0, 0},
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600589 image_state.createInfo.extent);
John Zulauf16adfc92020-04-08 10:28:33 -0600590 const auto base_address = ResourceBaseAddress(image_state);
John Zulauf62f10592020-04-03 12:20:02 -0600591 for (; range_gen->non_empty(); ++range_gen) {
John Zulauf16adfc92020-04-08 10:28:33 -0600592 ResolvePreviousAccess(address_type, (*range_gen + base_address), descent_map, infill_state);
John Zulauf62f10592020-04-03 12:20:02 -0600593 }
594}
595
John Zulauf7635de32020-05-29 17:14:15 -0600596// Layout transitions are handled as if the were occuring in the beginning of the next subpass
John Zulauf1507ee42020-05-18 11:33:09 -0600597bool AccessContext::ValidateLayoutTransitions(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state,
John Zulauf7635de32020-05-29 17:14:15 -0600598 const VkRect2D &render_area, uint32_t subpass,
599 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
600 const char *func_name) const {
John Zulauf355e49b2020-04-24 15:11:15 -0600601 bool skip = false;
John Zulauf7635de32020-05-29 17:14:15 -0600602 // As validation methods are const and precede the record/update phase, for any tranistions from the immediately
603 // previous subpass, we have to validate them against a copy of the AccessContext, with resolve operations applied, as
604 // those affects have not been recorded yet.
605 //
606 // Note: we could be more efficient by tracking whether or not we actually *have* any changes (e.g. attachment resolve)
607 // to apply and only copy then, if this proves a hot spot.
608 std::unique_ptr<AccessContext> proxy_for_prev;
609 TrackBack proxy_track_back;
610
John Zulauf355e49b2020-04-24 15:11:15 -0600611 const auto &transitions = rp_state.subpass_transitions[subpass];
612 for (const auto &transition : transitions) {
John Zulauf7635de32020-05-29 17:14:15 -0600613 const bool prev_needs_proxy = transition.prev_pass != VK_SUBPASS_EXTERNAL && (transition.prev_pass + 1 == subpass);
614
615 const auto *track_back = GetTrackBackFromSubpass(transition.prev_pass);
616 if (prev_needs_proxy) {
617 if (!proxy_for_prev) {
618 proxy_for_prev.reset(CreateStoreResolveProxyContext(*track_back->context, rp_state, transition.prev_pass,
619 render_area, attachment_views));
620 proxy_track_back = *track_back;
621 proxy_track_back.context = proxy_for_prev.get();
622 }
623 track_back = &proxy_track_back;
624 }
625 auto hazard = DetectSubpassTransitionHazard(*track_back, attachment_views[transition.attachment]);
John Zulauf355e49b2020-04-24 15:11:15 -0600626 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -0600627 skip |= sync_state.LogError(
628 rp_state.renderPass, string_SyncHazardVUID(hazard.hazard),
629 "%s: Hazard %s in subpass %" PRIu32 " for attachment %" PRIu32 " image layout transition. Prior access %s.",
630 func_name, string_SyncHazard(hazard.hazard), subpass, transition.attachment, string_UsageTag(hazard.tag).c_str());
John Zulauf355e49b2020-04-24 15:11:15 -0600631 }
632 }
633 return skip;
634}
635
John Zulauf1507ee42020-05-18 11:33:09 -0600636bool AccessContext::ValidateLoadOperation(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state,
John Zulauf7635de32020-05-29 17:14:15 -0600637 const VkRect2D &render_area, uint32_t subpass,
638 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
639 const char *func_name) const {
John Zulauf1507ee42020-05-18 11:33:09 -0600640 bool skip = false;
641 const auto *attachment_ci = rp_state.createInfo.pAttachments;
642 VkExtent3D extent = CastTo3D(render_area.extent);
643 VkOffset3D offset = CastTo3D(render_area.offset);
644 const auto external_access_scope = src_external_.barrier.dst_access_scope;
John Zulauf1507ee42020-05-18 11:33:09 -0600645
646 for (uint32_t i = 0; i < rp_state.createInfo.attachmentCount; i++) {
647 if (subpass == rp_state.attachment_first_subpass[i]) {
648 if (attachment_views[i] == nullptr) continue;
649 const IMAGE_VIEW_STATE &view = *attachment_views[i];
650 const IMAGE_STATE *image = view.image_state.get();
651 if (image == nullptr) continue;
652 const auto &ci = attachment_ci[i];
653 const bool is_transition = rp_state.attachment_first_is_transition[i];
654
655 // Need check in the following way
656 // 1) if the usage bit isn't in the dest_access_scope, and there is layout traniition for initial use, report hazard
657 // vs. transition
658 // 2) if there isn't a layout transition, we need to look at the external context with a "detect hazard" operation
659 // for each aspect loaded.
660
661 const bool has_depth = FormatHasDepth(ci.format);
John Zulaufb027cdb2020-05-21 14:25:22 -0600662 const bool has_stencil = FormatHasStencil(ci.format);
John Zulauf1507ee42020-05-18 11:33:09 -0600663 const bool is_color = !(has_depth || has_stencil);
664
665 const SyncStageAccessIndex load_index = has_depth ? DepthStencilLoadUsage(ci.loadOp) : ColorLoadUsage(ci.loadOp);
666 const SyncStageAccessFlags load_mask = (has_depth || is_color) ? SyncStageAccess::Flags(load_index) : 0U;
667 const SyncStageAccessIndex stencil_load_index = has_stencil ? DepthStencilLoadUsage(ci.stencilLoadOp) : load_index;
668 const SyncStageAccessFlags stencil_mask = has_stencil ? SyncStageAccess::Flags(stencil_load_index) : 0U;
669
John Zulaufaff20662020-06-01 14:07:58 -0600670 HazardResult hazard;
John Zulauf1507ee42020-05-18 11:33:09 -0600671 const char *aspect = nullptr;
672 if (is_transition) {
673 // For transition w
674 SyncHazard transition_hazard = SyncHazard::NONE;
675 bool checked_stencil = false;
676 if (load_mask) {
677 if ((load_mask & external_access_scope) != load_mask) {
678 transition_hazard =
679 SyncStageAccess::HasWrite(load_mask) ? SyncHazard::WRITE_AFTER_WRITE : SyncHazard::READ_AFTER_WRITE;
680 aspect = is_color ? "color" : "depth";
681 }
682 if (!transition_hazard && stencil_mask) {
683 if ((stencil_mask & external_access_scope) != stencil_mask) {
684 transition_hazard = SyncStageAccess::HasWrite(stencil_mask) ? SyncHazard::WRITE_AFTER_WRITE
685 : SyncHazard::READ_AFTER_WRITE;
686 aspect = "stencil";
687 checked_stencil = true;
688 }
689 }
690 }
691 if (transition_hazard) {
692 // Hazard vs. ILT
693 auto load_op_string = string_VkAttachmentLoadOp(checked_stencil ? ci.stencilLoadOp : ci.loadOp);
694 skip |=
695 sync_state.LogError(rp_state.renderPass, string_SyncHazardVUID(hazard.hazard),
696 "%s: Hazard %s vs. layout transition in subpass %" PRIu32 " for attachment %" PRIu32
697 " aspect %s during load with loadOp %s.",
698 func_name, string_SyncHazard(transition_hazard), subpass, i, aspect, load_op_string);
699 }
700 } else {
701 auto hazard_range = view.normalized_subresource_range;
702 bool checked_stencil = false;
703 if (is_color) {
704 hazard = DetectHazard(*image, load_index, view.normalized_subresource_range, offset, extent);
705 aspect = "color";
706 } else {
707 if (has_depth) {
708 hazard_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
709 hazard = DetectHazard(*image, load_index, hazard_range, offset, extent);
710 aspect = "depth";
711 }
712 if (!hazard.hazard && has_stencil) {
713 hazard_range.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
714 hazard = DetectHazard(*image, stencil_load_index, hazard_range, offset, extent);
715 aspect = "stencil";
716 checked_stencil = true;
717 }
718 }
719
720 if (hazard.hazard) {
721 auto load_op_string = string_VkAttachmentLoadOp(checked_stencil ? ci.stencilLoadOp : ci.loadOp);
722 skip |= sync_state.LogError(rp_state.renderPass, string_SyncHazardVUID(hazard.hazard),
723 "%s: Hazard %s in subpass %" PRIu32 " for attachment %" PRIu32
724 " aspect %s during load with loadOp %s.",
725 func_name, string_SyncHazard(hazard.hazard), subpass, i, aspect, load_op_string);
726 }
727 }
728 }
729 }
730 return skip;
731}
732
John Zulaufaff20662020-06-01 14:07:58 -0600733// Store operation validation can ignore resolve (before it) and layout tranistions after it. The first is ignored
734// because of the ordering guarantees w.r.t. sample access and that the resolve validation hasn't altered the state, because
735// store is part of the same Next/End operation.
736// The latter is handled in layout transistion validation directly
737bool AccessContext::ValidateStoreOperation(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state,
738 const VkRect2D &render_area, uint32_t subpass,
739 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
740 const char *func_name) const {
741 bool skip = false;
742 const auto *attachment_ci = rp_state.createInfo.pAttachments;
743 VkExtent3D extent = CastTo3D(render_area.extent);
744 VkOffset3D offset = CastTo3D(render_area.offset);
745
746 for (uint32_t i = 0; i < rp_state.createInfo.attachmentCount; i++) {
747 if (subpass == rp_state.attachment_last_subpass[i]) {
748 if (attachment_views[i] == nullptr) continue;
749 const IMAGE_VIEW_STATE &view = *attachment_views[i];
750 const IMAGE_STATE *image = view.image_state.get();
751 if (image == nullptr) continue;
752 const auto &ci = attachment_ci[i];
753
754 // The spec states that "don't care" is an operation with VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
755 // so we assume that an implementation is *free* to write in that case, meaning that for correctness
756 // sake, we treat DONT_CARE as writing.
757 const bool has_depth = FormatHasDepth(ci.format);
758 const bool has_stencil = FormatHasStencil(ci.format);
759 const bool is_color = !(has_depth || has_stencil);
760 const bool store_op_stores = ci.storeOp != VK_ATTACHMENT_STORE_OP_NONE_QCOM;
761 if (!has_stencil && !store_op_stores) continue;
762
763 HazardResult hazard;
764 const char *aspect = nullptr;
765 bool checked_stencil = false;
766 if (is_color) {
767 hazard = DetectHazard(*image, SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE,
768 view.normalized_subresource_range, kAttachmentRasterOrder, offset, extent);
769 aspect = "color";
770 } else {
771 const bool stencil_op_stores = ci.stencilStoreOp != VK_ATTACHMENT_STORE_OP_NONE_QCOM;
772 auto hazard_range = view.normalized_subresource_range;
773 if (has_depth && store_op_stores) {
774 hazard_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
775 hazard = DetectHazard(*image, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, hazard_range,
776 kAttachmentRasterOrder, offset, extent);
777 aspect = "depth";
778 }
779 if (!hazard.hazard && has_stencil && stencil_op_stores) {
780 hazard_range.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
781 hazard = DetectHazard(*image, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, hazard_range,
782 kAttachmentRasterOrder, offset, extent);
783 aspect = "stencil";
784 checked_stencil = true;
785 }
786 }
787
788 if (hazard.hazard) {
789 const char *const op_type_string = checked_stencil ? "stencilStoreOp" : "storeOp";
790 const char *const store_op_string = string_VkAttachmentStoreOp(checked_stencil ? ci.stencilStoreOp : ci.storeOp);
John Zulauf1dae9192020-06-16 15:46:44 -0600791 skip |= sync_state.LogError(rp_state.renderPass, string_SyncHazardVUID(hazard.hazard),
792 "%s: Hazard %s in subpass %" PRIu32 " for attachment %" PRIu32
793 " %s aspect during store with %s %s. Prior access %s",
794 func_name, string_SyncHazard(hazard.hazard), subpass, i, aspect, op_type_string,
795 store_op_string, string_UsageTag(hazard.tag).c_str());
John Zulaufaff20662020-06-01 14:07:58 -0600796 }
797 }
798 }
799 return skip;
800}
801
John Zulaufb027cdb2020-05-21 14:25:22 -0600802bool AccessContext::ValidateResolveOperations(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state,
803 const VkRect2D &render_area,
804 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const char *func_name,
805 uint32_t subpass) const {
John Zulauf7635de32020-05-29 17:14:15 -0600806 ValidateResolveAction validate_action(rp_state.renderPass, subpass, *this, sync_state, func_name);
807 ResolveOperation(validate_action, rp_state, render_area, attachment_views, subpass);
808 return validate_action.GetSkip();
John Zulaufb027cdb2020-05-21 14:25:22 -0600809}
810
John Zulauf3d84f1b2020-03-09 13:33:25 -0600811class HazardDetector {
812 SyncStageAccessIndex usage_index_;
813
814 public:
John Zulauf5f13a792020-03-10 07:31:21 -0600815 HazardResult Detect(const ResourceAccessRangeMap::const_iterator &pos) const { return pos->second.DetectHazard(usage_index_); }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600816 HazardResult DetectAsync(const ResourceAccessRangeMap::const_iterator &pos) const {
817 return pos->second.DetectAsyncHazard(usage_index_);
818 }
819 HazardDetector(SyncStageAccessIndex usage) : usage_index_(usage) {}
820};
821
John Zulauf69133422020-05-20 14:55:53 -0600822class HazardDetectorWithOrdering {
823 const SyncStageAccessIndex usage_index_;
824 const SyncOrderingBarrier &ordering_;
825
826 public:
827 HazardResult Detect(const ResourceAccessRangeMap::const_iterator &pos) const {
828 return pos->second.DetectHazard(usage_index_, ordering_);
829 }
830 HazardResult DetectAsync(const ResourceAccessRangeMap::const_iterator &pos) const {
831 return pos->second.DetectAsyncHazard(usage_index_);
832 }
833 HazardDetectorWithOrdering(SyncStageAccessIndex usage, const SyncOrderingBarrier &ordering)
834 : usage_index_(usage), ordering_(ordering) {}
835};
836
John Zulauf16adfc92020-04-08 10:28:33 -0600837HazardResult AccessContext::DetectHazard(AddressType type, SyncStageAccessIndex usage_index,
John Zulauf540266b2020-04-06 18:54:53 -0600838 const ResourceAccessRange &range) const {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600839 HazardDetector detector(usage_index);
John Zulauf355e49b2020-04-24 15:11:15 -0600840 return DetectHazard(type, detector, range, DetectOptions::kDetectAll);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600841}
842
John Zulauf16adfc92020-04-08 10:28:33 -0600843HazardResult AccessContext::DetectHazard(const BUFFER_STATE &buffer, SyncStageAccessIndex usage_index,
John Zulauf355e49b2020-04-24 15:11:15 -0600844 const ResourceAccessRange &range) const {
John Zulauf16adfc92020-04-08 10:28:33 -0600845 if (!SimpleBinding(buffer)) return HazardResult();
John Zulauf355e49b2020-04-24 15:11:15 -0600846 return DetectHazard(AddressType::kLinearAddress, usage_index, range + ResourceBaseAddress(buffer));
John Zulaufe5da6e52020-03-18 15:32:18 -0600847}
848
John Zulauf69133422020-05-20 14:55:53 -0600849template <typename Detector>
850HazardResult AccessContext::DetectHazard(Detector &detector, const IMAGE_STATE &image,
851 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset,
852 const VkExtent3D &extent, DetectOptions options) const {
853 if (!SimpleBinding(image)) return HazardResult();
854 subresource_adapter::ImageRangeGenerator range_gen(*image.fragment_encoder.get(), subresource_range, offset, extent);
855 const auto address_type = ImageAddressType(image);
856 const auto base_address = ResourceBaseAddress(image);
857 for (; range_gen->non_empty(); ++range_gen) {
858 HazardResult hazard = DetectHazard(address_type, detector, (*range_gen + base_address), options);
859 if (hazard.hazard) return hazard;
860 }
861 return HazardResult();
862}
863
John Zulauf540266b2020-04-06 18:54:53 -0600864HazardResult AccessContext::DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
865 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset,
866 const VkExtent3D &extent) const {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700867 VkImageSubresourceRange subresource_range = {subresource.aspectMask, subresource.mipLevel, 1, subresource.baseArrayLayer,
868 subresource.layerCount};
John Zulauf1507ee42020-05-18 11:33:09 -0600869 return DetectHazard(image, current_usage, subresource_range, offset, extent);
870}
871
872HazardResult AccessContext::DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
873 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset,
874 const VkExtent3D &extent) const {
John Zulauf69133422020-05-20 14:55:53 -0600875 HazardDetector detector(current_usage);
876 return DetectHazard(detector, image, subresource_range, offset, extent, DetectOptions::kDetectAll);
877}
878
879HazardResult AccessContext::DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
880 const VkImageSubresourceRange &subresource_range, const SyncOrderingBarrier &ordering,
881 const VkOffset3D &offset, const VkExtent3D &extent) const {
882 HazardDetectorWithOrdering detector(current_usage, ordering);
883 return DetectHazard(detector, image, subresource_range, offset, extent, DetectOptions::kDetectAll);
John Zulauf9cb530d2019-09-30 14:14:10 -0600884}
885
John Zulaufb027cdb2020-05-21 14:25:22 -0600886// Some common code for looking at attachments, if there's anything wrong, we return no hazard, core validation
887// should have reported the issue regarding an invalid attachment entry
888HazardResult AccessContext::DetectHazard(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage,
889 const SyncOrderingBarrier &ordering, const VkOffset3D &offset, const VkExtent3D &extent,
890 VkImageAspectFlags aspect_mask) const {
891 if (view != nullptr) {
892 const IMAGE_STATE *image = view->image_state.get();
893 if (image != nullptr) {
894 auto *detect_range = &view->normalized_subresource_range;
895 VkImageSubresourceRange masked_range;
896 if (aspect_mask) { // If present and non-zero, restrict the normalized range to aspects present in aspect_mask
897 masked_range = view->normalized_subresource_range;
898 masked_range.aspectMask = aspect_mask & masked_range.aspectMask;
899 detect_range = &masked_range;
900 }
901
902 // NOTE: The range encoding code is not robust to invalid ranges, so we protect it from our change
903 if (detect_range->aspectMask) {
904 return DetectHazard(*image, current_usage, *detect_range, ordering, offset, extent);
905 }
906 }
907 }
908 return HazardResult();
909}
John Zulauf3d84f1b2020-03-09 13:33:25 -0600910class BarrierHazardDetector {
911 public:
912 BarrierHazardDetector(SyncStageAccessIndex usage_index, VkPipelineStageFlags src_exec_scope,
913 SyncStageAccessFlags src_access_scope)
914 : usage_index_(usage_index), src_exec_scope_(src_exec_scope), src_access_scope_(src_access_scope) {}
915
John Zulauf5f13a792020-03-10 07:31:21 -0600916 HazardResult Detect(const ResourceAccessRangeMap::const_iterator &pos) const {
917 return pos->second.DetectBarrierHazard(usage_index_, src_exec_scope_, src_access_scope_);
John Zulauf0cb5be22020-01-23 12:18:22 -0700918 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600919 HazardResult DetectAsync(const ResourceAccessRangeMap::const_iterator &pos) const {
920 // Async barrier hazard detection can use the same path as the usage index is not IsRead, but is IsWrite
921 return pos->second.DetectAsyncHazard(usage_index_);
922 }
923
924 private:
925 SyncStageAccessIndex usage_index_;
926 VkPipelineStageFlags src_exec_scope_;
927 SyncStageAccessFlags src_access_scope_;
928};
929
John Zulauf16adfc92020-04-08 10:28:33 -0600930HazardResult AccessContext::DetectBarrierHazard(AddressType type, SyncStageAccessIndex current_usage,
John Zulauf540266b2020-04-06 18:54:53 -0600931 VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope,
John Zulauf355e49b2020-04-24 15:11:15 -0600932 const ResourceAccessRange &range, DetectOptions options) const {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600933 BarrierHazardDetector detector(current_usage, src_exec_scope, src_access_scope);
John Zulauf69133422020-05-20 14:55:53 -0600934 return DetectHazard(type, detector, range, options);
John Zulauf0cb5be22020-01-23 12:18:22 -0700935}
936
John Zulauf16adfc92020-04-08 10:28:33 -0600937HazardResult AccessContext::DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
John Zulauf355e49b2020-04-24 15:11:15 -0600938 SyncStageAccessFlags src_access_scope,
939 const VkImageSubresourceRange &subresource_range,
940 DetectOptions options) const {
John Zulauf69133422020-05-20 14:55:53 -0600941 BarrierHazardDetector detector(SyncStageAccessIndex::SYNC_IMAGE_LAYOUT_TRANSITION, src_exec_scope, src_access_scope);
942 VkOffset3D zero_offset = {0, 0, 0};
943 return DetectHazard(detector, image, subresource_range, zero_offset, image.createInfo.extent, options);
John Zulauf0cb5be22020-01-23 12:18:22 -0700944}
945
John Zulauf355e49b2020-04-24 15:11:15 -0600946HazardResult AccessContext::DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
947 SyncStageAccessFlags src_stage_accesses,
948 const VkImageMemoryBarrier &barrier) const {
949 auto subresource_range = NormalizeSubresourceRange(image.createInfo, barrier.subresourceRange);
950 const auto src_access_scope = SyncStageAccess::AccessScope(src_stage_accesses, barrier.srcAccessMask);
951 return DetectImageBarrierHazard(image, src_exec_scope, src_access_scope, subresource_range, kDetectAll);
952}
953
John Zulauf9cb530d2019-09-30 14:14:10 -0600954template <typename Flags, typename Map>
955SyncStageAccessFlags AccessScopeImpl(Flags flag_mask, const Map &map) {
956 SyncStageAccessFlags scope = 0;
957 for (const auto &bit_scope : map) {
958 if (flag_mask < bit_scope.first) break;
959
960 if (flag_mask & bit_scope.first) {
961 scope |= bit_scope.second;
962 }
963 }
964 return scope;
965}
966
967SyncStageAccessFlags SyncStageAccess::AccessScopeByStage(VkPipelineStageFlags stages) {
968 return AccessScopeImpl(stages, syncStageAccessMaskByStageBit);
969}
970
971SyncStageAccessFlags SyncStageAccess::AccessScopeByAccess(VkAccessFlags accesses) {
972 return AccessScopeImpl(accesses, syncStageAccessMaskByAccessBit);
973}
974
975// Getting from stage mask and access mask to stage/acess masks is something we need to be good at...
976SyncStageAccessFlags SyncStageAccess::AccessScope(VkPipelineStageFlags stages, VkAccessFlags accesses) {
John Zulauf5f13a792020-03-10 07:31:21 -0600977 // The access scope is the intersection of all stage/access types possible for the enabled stages and the enables
978 // accesses (after doing a couple factoring of common terms the union of stage/access intersections is the intersections
979 // of the union of all stage/access types for all the stages and the same unions for the access mask...
John Zulauf9cb530d2019-09-30 14:14:10 -0600980 return AccessScopeByStage(stages) & AccessScopeByAccess(accesses);
981}
982
983template <typename Action>
John Zulauf5c5e88d2019-12-26 11:22:02 -0700984void UpdateMemoryAccessState(ResourceAccessRangeMap *accesses, const ResourceAccessRange &range, const Action &action) {
John Zulauf7635de32020-05-29 17:14:15 -0600985 // TODO: Optimization for operations that do a pure overwrite (i.e. WRITE usages which rewrite the state, vs READ usages
986 // that do incrementalupdates
John Zulauf9cb530d2019-09-30 14:14:10 -0600987 auto pos = accesses->lower_bound(range);
988 if (pos == accesses->end() || !pos->first.intersects(range)) {
989 // The range is empty, fill it with a default value.
990 pos = action.Infill(accesses, pos, range);
991 } else if (range.begin < pos->first.begin) {
992 // Leading empty space, infill
John Zulauf5c5e88d2019-12-26 11:22:02 -0700993 pos = action.Infill(accesses, pos, ResourceAccessRange(range.begin, pos->first.begin));
John Zulauf9cb530d2019-09-30 14:14:10 -0600994 } else if (pos->first.begin < range.begin) {
995 // Trim the beginning if needed
996 pos = accesses->split(pos, range.begin, sparse_container::split_op_keep_both());
997 ++pos;
998 }
999
1000 const auto the_end = accesses->end();
1001 while ((pos != the_end) && pos->first.intersects(range)) {
1002 if (pos->first.end > range.end) {
1003 pos = accesses->split(pos, range.end, sparse_container::split_op_keep_both());
1004 }
1005
1006 pos = action(accesses, pos);
1007 if (pos == the_end) break;
1008
1009 auto next = pos;
1010 ++next;
1011 if ((pos->first.end < range.end) && (next != the_end) && !next->first.is_subsequent_to(pos->first)) {
1012 // Need to infill if next is disjoint
1013 VkDeviceSize limit = (next == the_end) ? range.end : std::min(range.end, next->first.begin);
John Zulauf5c5e88d2019-12-26 11:22:02 -07001014 ResourceAccessRange new_range(pos->first.end, limit);
John Zulauf9cb530d2019-09-30 14:14:10 -06001015 next = action.Infill(accesses, next, new_range);
1016 }
1017 pos = next;
1018 }
1019}
1020
1021struct UpdateMemoryAccessStateFunctor {
John Zulauf5c5e88d2019-12-26 11:22:02 -07001022 using Iterator = ResourceAccessRangeMap::iterator;
1023 Iterator Infill(ResourceAccessRangeMap *accesses, Iterator pos, ResourceAccessRange range) const {
John Zulauf5f13a792020-03-10 07:31:21 -06001024 // this is only called on gaps, and never returns a gap.
1025 ResourceAccessState default_state;
John Zulauf16adfc92020-04-08 10:28:33 -06001026 context.ResolvePreviousAccess(type, range, accesses, &default_state);
John Zulauf5f13a792020-03-10 07:31:21 -06001027 return accesses->lower_bound(range);
John Zulauf9cb530d2019-09-30 14:14:10 -06001028 }
John Zulauf5f13a792020-03-10 07:31:21 -06001029
John Zulauf5c5e88d2019-12-26 11:22:02 -07001030 Iterator operator()(ResourceAccessRangeMap *accesses, Iterator pos) const {
John Zulauf9cb530d2019-09-30 14:14:10 -06001031 auto &access_state = pos->second;
1032 access_state.Update(usage, tag);
1033 return pos;
1034 }
1035
John Zulauf16adfc92020-04-08 10:28:33 -06001036 UpdateMemoryAccessStateFunctor(AccessContext::AddressType type_, const AccessContext &context_, SyncStageAccessIndex usage_,
John Zulauf540266b2020-04-06 18:54:53 -06001037 const ResourceUsageTag &tag_)
John Zulauf16adfc92020-04-08 10:28:33 -06001038 : type(type_), context(context_), usage(usage_), tag(tag_) {}
1039 const AccessContext::AddressType type;
John Zulauf540266b2020-04-06 18:54:53 -06001040 const AccessContext &context;
John Zulauf16adfc92020-04-08 10:28:33 -06001041 const SyncStageAccessIndex usage;
John Zulauf9cb530d2019-09-30 14:14:10 -06001042 const ResourceUsageTag &tag;
1043};
1044
1045struct ApplyMemoryAccessBarrierFunctor {
John Zulauf5c5e88d2019-12-26 11:22:02 -07001046 using Iterator = ResourceAccessRangeMap::iterator;
1047 inline Iterator Infill(ResourceAccessRangeMap *accesses, Iterator pos, ResourceAccessRange range) const { return pos; }
John Zulauf9cb530d2019-09-30 14:14:10 -06001048
John Zulauf5c5e88d2019-12-26 11:22:02 -07001049 Iterator operator()(ResourceAccessRangeMap *accesses, Iterator pos) const {
John Zulauf9cb530d2019-09-30 14:14:10 -06001050 auto &access_state = pos->second;
John Zulauf36bcf6a2020-02-03 15:12:52 -07001051 access_state.ApplyMemoryAccessBarrier(src_exec_scope, src_access_scope, dst_exec_scope, dst_access_scope);
John Zulauf9cb530d2019-09-30 14:14:10 -06001052 return pos;
1053 }
1054
John Zulauf36bcf6a2020-02-03 15:12:52 -07001055 ApplyMemoryAccessBarrierFunctor(VkPipelineStageFlags src_exec_scope_, SyncStageAccessFlags src_access_scope_,
1056 VkPipelineStageFlags dst_exec_scope_, SyncStageAccessFlags dst_access_scope_)
1057 : src_exec_scope(src_exec_scope_),
1058 src_access_scope(src_access_scope_),
1059 dst_exec_scope(dst_exec_scope_),
1060 dst_access_scope(dst_access_scope_) {}
John Zulauf9cb530d2019-09-30 14:14:10 -06001061
John Zulauf36bcf6a2020-02-03 15:12:52 -07001062 VkPipelineStageFlags src_exec_scope;
1063 SyncStageAccessFlags src_access_scope;
1064 VkPipelineStageFlags dst_exec_scope;
1065 SyncStageAccessFlags dst_access_scope;
John Zulauf9cb530d2019-09-30 14:14:10 -06001066};
1067
1068struct ApplyGlobalBarrierFunctor {
John Zulauf5c5e88d2019-12-26 11:22:02 -07001069 using Iterator = ResourceAccessRangeMap::iterator;
1070 inline Iterator Infill(ResourceAccessRangeMap *accesses, Iterator pos, ResourceAccessRange range) const { return pos; }
John Zulauf9cb530d2019-09-30 14:14:10 -06001071
John Zulauf5c5e88d2019-12-26 11:22:02 -07001072 Iterator operator()(ResourceAccessRangeMap *accesses, Iterator pos) const {
John Zulauf9cb530d2019-09-30 14:14:10 -06001073 auto &access_state = pos->second;
John Zulauf36bcf6a2020-02-03 15:12:52 -07001074 access_state.ApplyExecutionBarrier(src_exec_scope, dst_exec_scope);
John Zulauf9cb530d2019-09-30 14:14:10 -06001075
1076 for (const auto &functor : barrier_functor) {
1077 functor(accesses, pos);
1078 }
1079 return pos;
1080 }
1081
John Zulauf36bcf6a2020-02-03 15:12:52 -07001082 ApplyGlobalBarrierFunctor(VkPipelineStageFlags src_exec_scope, VkPipelineStageFlags dst_exec_scope,
1083 SyncStageAccessFlags src_stage_accesses, SyncStageAccessFlags dst_stage_accesses,
John Zulauf9cb530d2019-09-30 14:14:10 -06001084 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers)
John Zulauf36bcf6a2020-02-03 15:12:52 -07001085 : src_exec_scope(src_exec_scope), dst_exec_scope(dst_exec_scope) {
John Zulauf9cb530d2019-09-30 14:14:10 -06001086 // Don't want to create this per tracked item, but don't want to loop through all tracked items per barrier...
1087 barrier_functor.reserve(memoryBarrierCount);
1088 for (uint32_t barrier_index = 0; barrier_index < memoryBarrierCount; barrier_index++) {
1089 const auto &barrier = pMemoryBarriers[barrier_index];
John Zulauf36bcf6a2020-02-03 15:12:52 -07001090 barrier_functor.emplace_back(src_exec_scope, SyncStageAccess::AccessScope(src_stage_accesses, barrier.srcAccessMask),
1091 dst_exec_scope, SyncStageAccess::AccessScope(dst_stage_accesses, barrier.dstAccessMask));
John Zulauf9cb530d2019-09-30 14:14:10 -06001092 }
1093 }
1094
John Zulauf36bcf6a2020-02-03 15:12:52 -07001095 const VkPipelineStageFlags src_exec_scope;
1096 const VkPipelineStageFlags dst_exec_scope;
John Zulauf9cb530d2019-09-30 14:14:10 -06001097 std::vector<ApplyMemoryAccessBarrierFunctor> barrier_functor;
1098};
1099
John Zulauf355e49b2020-04-24 15:11:15 -06001100void AccessContext::UpdateAccessState(AddressType type, SyncStageAccessIndex current_usage, const ResourceAccessRange &range,
1101 const ResourceUsageTag &tag) {
John Zulauf16adfc92020-04-08 10:28:33 -06001102 UpdateMemoryAccessStateFunctor action(type, *this, current_usage, tag);
1103 UpdateMemoryAccessState(&GetAccessStateMap(type), range, action);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001104}
1105
John Zulauf16adfc92020-04-08 10:28:33 -06001106void AccessContext::UpdateAccessState(const BUFFER_STATE &buffer, SyncStageAccessIndex current_usage,
John Zulauf355e49b2020-04-24 15:11:15 -06001107 const ResourceAccessRange &range, const ResourceUsageTag &tag) {
John Zulauf16adfc92020-04-08 10:28:33 -06001108 if (!SimpleBinding(buffer)) return;
1109 const auto base_address = ResourceBaseAddress(buffer);
1110 UpdateAccessState(AddressType::kLinearAddress, current_usage, range + base_address, tag);
1111}
John Zulauf355e49b2020-04-24 15:11:15 -06001112
John Zulauf540266b2020-04-06 18:54:53 -06001113void AccessContext::UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
John Zulauf355e49b2020-04-24 15:11:15 -06001114 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset,
John Zulauf540266b2020-04-06 18:54:53 -06001115 const VkExtent3D &extent, const ResourceUsageTag &tag) {
John Zulauf16adfc92020-04-08 10:28:33 -06001116 if (!SimpleBinding(image)) return;
locke-lunargae26eac2020-04-16 15:29:05 -06001117 subresource_adapter::ImageRangeGenerator range_gen(*image.fragment_encoder.get(), subresource_range, offset, extent);
John Zulauf16adfc92020-04-08 10:28:33 -06001118 const auto address_type = ImageAddressType(image);
1119 const auto base_address = ResourceBaseAddress(image);
1120 UpdateMemoryAccessStateFunctor action(address_type, *this, current_usage, tag);
John Zulauf5f13a792020-03-10 07:31:21 -06001121 for (; range_gen->non_empty(); ++range_gen) {
John Zulauf16adfc92020-04-08 10:28:33 -06001122 UpdateMemoryAccessState(&GetAccessStateMap(address_type), (*range_gen + base_address), action);
John Zulauf5f13a792020-03-10 07:31:21 -06001123 }
John Zulauf3d84f1b2020-03-09 13:33:25 -06001124}
John Zulauf7635de32020-05-29 17:14:15 -06001125void AccessContext::UpdateAccessState(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const VkOffset3D &offset,
1126 const VkExtent3D &extent, VkImageAspectFlags aspect_mask, const ResourceUsageTag &tag) {
1127 if (view != nullptr) {
1128 const IMAGE_STATE *image = view->image_state.get();
1129 if (image != nullptr) {
1130 auto *update_range = &view->normalized_subresource_range;
1131 VkImageSubresourceRange masked_range;
1132 if (aspect_mask) { // If present and non-zero, restrict the normalized range to aspects present in aspect_mask
1133 masked_range = view->normalized_subresource_range;
1134 masked_range.aspectMask = aspect_mask & masked_range.aspectMask;
1135 update_range = &masked_range;
1136 }
1137 UpdateAccessState(*image, current_usage, *update_range, offset, extent, tag);
1138 }
1139 }
1140}
John Zulauf3d84f1b2020-03-09 13:33:25 -06001141
John Zulauf355e49b2020-04-24 15:11:15 -06001142void AccessContext::UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
1143 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset,
1144 const VkExtent3D &extent, const ResourceUsageTag &tag) {
John Zulauf355e49b2020-04-24 15:11:15 -06001145 VkImageSubresourceRange subresource_range = {subresource.aspectMask, subresource.mipLevel, 1, subresource.baseArrayLayer,
1146 subresource.layerCount};
1147 UpdateAccessState(image, current_usage, subresource_range, offset, extent, tag);
1148}
1149
John Zulauf540266b2020-04-06 18:54:53 -06001150template <typename Action>
1151void AccessContext::UpdateMemoryAccess(const BUFFER_STATE &buffer, const ResourceAccessRange &range, const Action action) {
John Zulauf16adfc92020-04-08 10:28:33 -06001152 if (!SimpleBinding(buffer)) return;
1153 const auto base_address = ResourceBaseAddress(buffer);
1154 UpdateMemoryAccessState(&GetAccessStateMap(AddressType::kLinearAddress), (range + base_address), action);
John Zulauf540266b2020-04-06 18:54:53 -06001155}
1156
1157template <typename Action>
1158void AccessContext::UpdateMemoryAccess(const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range,
1159 const Action action) {
John Zulauf16adfc92020-04-08 10:28:33 -06001160 if (!SimpleBinding(image)) return;
1161 const auto address_type = ImageAddressType(image);
1162 auto *accesses = &GetAccessStateMap(address_type);
John Zulauf540266b2020-04-06 18:54:53 -06001163
locke-lunargae26eac2020-04-16 15:29:05 -06001164 subresource_adapter::ImageRangeGenerator range_gen(*image.fragment_encoder.get(), subresource_range, {0, 0, 0},
locke-lunarg5f7d3c62020-04-07 00:10:39 -06001165 image.createInfo.extent);
John Zulauf540266b2020-04-06 18:54:53 -06001166
John Zulauf16adfc92020-04-08 10:28:33 -06001167 const auto base_address = ResourceBaseAddress(image);
John Zulauf540266b2020-04-06 18:54:53 -06001168 for (; range_gen->non_empty(); ++range_gen) {
John Zulauf16adfc92020-04-08 10:28:33 -06001169 UpdateMemoryAccessState(accesses, (*range_gen + base_address), action);
John Zulauf540266b2020-04-06 18:54:53 -06001170 }
1171}
1172
John Zulauf7635de32020-05-29 17:14:15 -06001173void AccessContext::UpdateAttachmentResolveAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
1174 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass,
1175 const ResourceUsageTag &tag) {
1176 UpdateStateResolveAction update(*this, tag);
1177 ResolveOperation(update, rp_state, render_area, attachment_views, subpass);
1178}
1179
John Zulaufaff20662020-06-01 14:07:58 -06001180void AccessContext::UpdateAttachmentStoreAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
1181 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass,
1182 const ResourceUsageTag &tag) {
1183 const auto *attachment_ci = rp_state.createInfo.pAttachments;
1184 VkExtent3D extent = CastTo3D(render_area.extent);
1185 VkOffset3D offset = CastTo3D(render_area.offset);
1186
1187 for (uint32_t i = 0; i < rp_state.createInfo.attachmentCount; i++) {
1188 if (rp_state.attachment_last_subpass[i] == subpass) {
1189 if (attachment_views[i] == nullptr) continue; // UNUSED
1190 const auto &view = *attachment_views[i];
1191 const IMAGE_STATE *image = view.image_state.get();
1192 if (image == nullptr) continue;
1193
1194 const auto &ci = attachment_ci[i];
1195 const bool has_depth = FormatHasDepth(ci.format);
1196 const bool has_stencil = FormatHasStencil(ci.format);
1197 const bool is_color = !(has_depth || has_stencil);
1198 const bool store_op_stores = ci.storeOp != VK_ATTACHMENT_STORE_OP_NONE_QCOM;
1199
1200 if (is_color && store_op_stores) {
1201 UpdateAccessState(*image, SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, view.normalized_subresource_range,
1202 offset, extent, tag);
1203 } else {
1204 auto update_range = view.normalized_subresource_range;
1205 if (has_depth && store_op_stores) {
1206 update_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
1207 UpdateAccessState(*image, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, update_range, offset, extent,
1208 tag);
1209 }
1210 const bool stencil_op_stores = ci.stencilStoreOp != VK_ATTACHMENT_STORE_OP_NONE_QCOM;
1211 if (has_stencil && stencil_op_stores) {
1212 update_range.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
1213 UpdateAccessState(*image, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, update_range, offset, extent,
1214 tag);
1215 }
1216 }
1217 }
1218 }
1219}
1220
John Zulauf540266b2020-04-06 18:54:53 -06001221template <typename Action>
1222void AccessContext::ApplyGlobalBarriers(const Action &barrier_action) {
1223 // Note: Barriers do *not* cross context boundaries, applying to accessess within.... (at least for renderpass subpasses)
John Zulauf16adfc92020-04-08 10:28:33 -06001224 for (const auto address_type : kAddressTypes) {
1225 UpdateMemoryAccessState(&GetAccessStateMap(address_type), full_range, barrier_action);
John Zulauf540266b2020-04-06 18:54:53 -06001226 }
1227}
1228
1229void AccessContext::ResolveChildContexts(const std::vector<AccessContext> &contexts) {
John Zulauf540266b2020-04-06 18:54:53 -06001230 for (uint32_t subpass_index = 0; subpass_index < contexts.size(); subpass_index++) {
1231 auto &context = contexts[subpass_index];
John Zulauf16adfc92020-04-08 10:28:33 -06001232 for (const auto address_type : kAddressTypes) {
John Zulauf355e49b2020-04-24 15:11:15 -06001233 context.ResolveAccessRange(address_type, full_range, &context.GetDstExternalTrackBack().barrier,
1234 &GetAccessStateMap(address_type), nullptr, false);
John Zulauf540266b2020-04-06 18:54:53 -06001235 }
1236 }
1237}
1238
John Zulauf355e49b2020-04-24 15:11:15 -06001239void AccessContext::ApplyImageBarrier(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
1240 SyncStageAccessFlags src_access_scope, VkPipelineStageFlags dst_exec_scope,
1241 SyncStageAccessFlags dst_access_scope, const VkImageSubresourceRange &subresource_range) {
1242 const ApplyMemoryAccessBarrierFunctor barrier_action(src_exec_scope, src_access_scope, dst_exec_scope, dst_access_scope);
1243 UpdateMemoryAccess(image, subresource_range, barrier_action);
1244}
1245
John Zulauf7635de32020-05-29 17:14:15 -06001246// Note: ImageBarriers do not operate at offset/extent resolution, only at the whole subreources level
John Zulauf355e49b2020-04-24 15:11:15 -06001247void AccessContext::ApplyImageBarrier(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
1248 SyncStageAccessFlags src_access_scope, VkPipelineStageFlags dst_exec_scope,
1249 SyncStageAccessFlags dst_access_scope, const VkImageSubresourceRange &subresource_range,
1250 bool layout_transition, const ResourceUsageTag &tag) {
1251 if (layout_transition) {
1252 UpdateAccessState(image, SYNC_IMAGE_LAYOUT_TRANSITION, subresource_range, VkOffset3D{0, 0, 0}, image.createInfo.extent,
1253 tag);
1254 ApplyImageBarrier(image, src_exec_scope, SYNC_IMAGE_LAYOUT_TRANSITION_BIT, dst_exec_scope, dst_access_scope,
1255 subresource_range);
John Zulaufc9201222020-05-13 15:13:03 -06001256 } else {
1257 ApplyImageBarrier(image, src_exec_scope, src_access_scope, dst_exec_scope, dst_access_scope, subresource_range);
John Zulauf355e49b2020-04-24 15:11:15 -06001258 }
John Zulauf355e49b2020-04-24 15:11:15 -06001259}
1260
John Zulauf7635de32020-05-29 17:14:15 -06001261// Note: ImageBarriers do not operate at offset/extent resolution, only at the whole subreources level
John Zulauf355e49b2020-04-24 15:11:15 -06001262void AccessContext::ApplyImageBarrier(const IMAGE_STATE &image, const SyncBarrier &barrier,
1263 const VkImageSubresourceRange &subresource_range, bool layout_transition,
1264 const ResourceUsageTag &tag) {
1265 ApplyImageBarrier(image, barrier.src_exec_scope, barrier.src_access_scope, barrier.dst_exec_scope, barrier.dst_access_scope,
1266 subresource_range, layout_transition, tag);
1267}
1268
1269// Suitable only for *subpass* access contexts
John Zulauf7635de32020-05-29 17:14:15 -06001270HazardResult AccessContext::DetectSubpassTransitionHazard(const TrackBack &track_back, const IMAGE_VIEW_STATE *attach_view) const {
John Zulauf355e49b2020-04-24 15:11:15 -06001271 if (!attach_view) return HazardResult();
1272 const auto image_state = attach_view->image_state.get();
1273 if (!image_state) return HazardResult();
1274
John Zulauf355e49b2020-04-24 15:11:15 -06001275 // We should never ask for a transition from a context we don't have
John Zulauf7635de32020-05-29 17:14:15 -06001276 assert(track_back.context);
John Zulauf355e49b2020-04-24 15:11:15 -06001277
1278 // Do the detection against the specific prior context independent of other contexts. (Synchronous only)
John Zulauf7635de32020-05-29 17:14:15 -06001279 auto hazard = track_back.context->DetectImageBarrierHazard(*image_state, track_back.barrier.src_exec_scope,
1280 track_back.barrier.src_access_scope,
1281 attach_view->normalized_subresource_range, kDetectPrevious);
John Zulauf355e49b2020-04-24 15:11:15 -06001282 if (!hazard.hazard) {
1283 // The Async hazard check is against the current context's async set.
John Zulauf7635de32020-05-29 17:14:15 -06001284 hazard = DetectImageBarrierHazard(*image_state, track_back.barrier.src_exec_scope, track_back.barrier.src_access_scope,
John Zulauf355e49b2020-04-24 15:11:15 -06001285 attach_view->normalized_subresource_range, kDetectAsync);
1286 }
1287 return hazard;
1288}
1289
1290// Class CommandBufferAccessContext: Keep track of resource access state information for a specific command buffer
1291bool CommandBufferAccessContext::ValidateBeginRenderPass(const RENDER_PASS_STATE &rp_state,
1292
1293 const VkRenderPassBeginInfo *pRenderPassBegin,
1294 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
1295 const char *func_name) const {
1296 // Check if any of the layout transitions are hazardous.... but we don't have the renderpass context to work with, so we
1297 bool skip = false;
1298 uint32_t subpass = 0;
1299 const auto &transitions = rp_state.subpass_transitions[subpass];
1300 if (transitions.size()) {
1301 const std::vector<AccessContext> empty_context_vector;
1302 // Create context we can use to validate against...
1303 AccessContext temp_context(subpass, queue_flags_, rp_state.subpass_dependencies, empty_context_vector,
1304 const_cast<AccessContext *>(&cb_access_context_));
1305
1306 assert(pRenderPassBegin);
1307 if (nullptr == pRenderPassBegin) return skip;
1308
1309 const auto fb_state = sync_state_->Get<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer);
1310 assert(fb_state);
1311 if (nullptr == fb_state) return skip;
1312
1313 // Create a limited array of views (which we'll need to toss
1314 std::vector<const IMAGE_VIEW_STATE *> views;
1315 const auto count_attachment = GetFramebufferAttachments(*pRenderPassBegin, *fb_state);
1316 const auto attachment_count = count_attachment.first;
1317 const auto *attachments = count_attachment.second;
1318 views.resize(attachment_count, nullptr);
1319 for (const auto &transition : transitions) {
1320 assert(transition.attachment < attachment_count);
1321 views[transition.attachment] = sync_state_->Get<IMAGE_VIEW_STATE>(attachments[transition.attachment]);
1322 }
1323
John Zulauf7635de32020-05-29 17:14:15 -06001324 skip |= temp_context.ValidateLayoutTransitions(*sync_state_, rp_state, pRenderPassBegin->renderArea, 0, views, func_name);
1325 skip |= temp_context.ValidateLoadOperation(*sync_state_, rp_state, pRenderPassBegin->renderArea, 0, views, func_name);
John Zulauf355e49b2020-04-24 15:11:15 -06001326 }
1327 return skip;
1328}
1329
locke-lunarg61870c22020-06-09 14:51:50 -06001330bool CommandBufferAccessContext::ValidateDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint,
1331 const char *func_name) const {
1332 bool skip = false;
1333 const PIPELINE_STATE *pPipe = nullptr;
1334 const std::vector<LAST_BOUND_STATE::PER_SET> *per_sets = nullptr;
1335 GetCurrentPipelineAndDesriptorSetsFromCommandBuffer(*cb_state_.get(), pipelineBindPoint, &pPipe, &per_sets);
1336 if (!pPipe || !per_sets) {
1337 return skip;
1338 }
1339
1340 using DescriptorClass = cvdescriptorset::DescriptorClass;
1341 using BufferDescriptor = cvdescriptorset::BufferDescriptor;
1342 using ImageDescriptor = cvdescriptorset::ImageDescriptor;
1343 using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor;
1344 using TexelDescriptor = cvdescriptorset::TexelDescriptor;
1345
1346 for (const auto &stage_state : pPipe->stage_state) {
locke-lunarg37047832020-06-12 13:44:45 -06001347 if (stage_state.stage_flag == VK_SHADER_STAGE_FRAGMENT_BIT && pPipe->graphicsPipelineCI.pRasterizationState &&
locke-lunarge9f1cdf2020-06-12 12:28:57 -06001348 pPipe->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable)
1349 continue;
locke-lunarg61870c22020-06-09 14:51:50 -06001350 for (const auto &set_binding : stage_state.descriptor_uses) {
1351 cvdescriptorset::DescriptorSet *descriptor_set = (*per_sets)[set_binding.first.first].bound_descriptor_set;
1352 cvdescriptorset::DescriptorSetLayout::ConstBindingIterator binding_it(descriptor_set->GetLayout().get(),
1353 set_binding.first.second);
1354 const auto descriptor_type = binding_it.GetType();
1355 cvdescriptorset::IndexRange index_range = binding_it.GetGlobalIndexRange();
1356 auto array_idx = 0;
1357
1358 if (binding_it.IsVariableDescriptorCount()) {
1359 index_range.end = index_range.start + descriptor_set->GetVariableDescriptorCount();
1360 }
1361 SyncStageAccessIndex sync_index =
1362 GetSyncStageAccessIndexsByDescriptorSet(descriptor_type, set_binding.second, stage_state.stage_flag);
1363
1364 for (uint32_t i = index_range.start; i < index_range.end; ++i, ++array_idx) {
1365 uint32_t index = i - index_range.start;
1366 const auto *descriptor = descriptor_set->GetDescriptorFromGlobalIndex(i);
1367 switch (descriptor->GetClass()) {
1368 case DescriptorClass::ImageSampler:
1369 case DescriptorClass::Image: {
1370 const IMAGE_VIEW_STATE *img_view_state = nullptr;
1371 if (descriptor->GetClass() == DescriptorClass::ImageSampler) {
1372 img_view_state = static_cast<const ImageSamplerDescriptor *>(descriptor)->GetImageViewState();
1373 } else {
1374 img_view_state = static_cast<const ImageDescriptor *>(descriptor)->GetImageViewState();
1375 }
1376 if (!img_view_state) continue;
1377 const IMAGE_STATE *img_state = img_view_state->image_state.get();
1378 VkExtent3D extent = {};
1379 VkOffset3D offset = {};
1380 if (sync_index == SYNC_FRAGMENT_SHADER_INPUT_ATTACHMENT_READ) {
1381 extent = CastTo3D(cb_state_->activeRenderPassBeginInfo.renderArea.extent);
1382 offset = CastTo3D(cb_state_->activeRenderPassBeginInfo.renderArea.offset);
1383 } else {
1384 extent = img_state->createInfo.extent;
1385 }
1386 auto hazard = current_context_->DetectHazard(*img_state, sync_index,
1387 img_view_state->normalized_subresource_range, offset, extent);
1388 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06001389 skip |= sync_state_->LogError(
1390 img_view_state->image_view, string_SyncHazardVUID(hazard.hazard),
1391 "%s: Hazard %s for %s in %s, %s, and %s binding #%" PRIu32 " index %" PRIu32 ". Prior access %s.",
1392 func_name, string_SyncHazard(hazard.hazard),
1393 sync_state_->report_data->FormatHandle(img_view_state->image_view).c_str(),
1394 sync_state_->report_data->FormatHandle(cb_state_->commandBuffer).c_str(),
1395 sync_state_->report_data->FormatHandle(pPipe->pipeline).c_str(),
1396 sync_state_->report_data->FormatHandle(descriptor_set->GetSet()).c_str(), set_binding.first.second,
1397 index, string_UsageTag(hazard.tag).c_str());
locke-lunarg61870c22020-06-09 14:51:50 -06001398 }
1399 break;
1400 }
1401 case DescriptorClass::TexelBuffer: {
1402 auto buf_view_state = static_cast<const TexelDescriptor *>(descriptor)->GetBufferViewState();
1403 if (!buf_view_state) continue;
1404 const BUFFER_STATE *buf_state = buf_view_state->buffer_state.get();
1405 ResourceAccessRange range =
1406 MakeRange(buf_view_state->create_info.offset,
1407 GetRealWholeSize(buf_view_state->create_info.offset, buf_view_state->create_info.range,
1408 buf_state->createInfo.size));
1409 auto hazard = current_context_->DetectHazard(*buf_state, sync_index, range);
1410 if (hazard.hazard) {
1411 skip |=
1412 sync_state_->LogError(buf_view_state->buffer_view, string_SyncHazardVUID(hazard.hazard),
1413 "%s: Hazard %s for %s in %s, %s, and %s binding #%d index %d", func_name,
1414 string_SyncHazard(hazard.hazard),
1415 sync_state_->report_data->FormatHandle(buf_view_state->buffer_view).c_str(),
1416 sync_state_->report_data->FormatHandle(cb_state_->commandBuffer).c_str(),
1417 sync_state_->report_data->FormatHandle(pPipe->pipeline).c_str(),
1418 sync_state_->report_data->FormatHandle(descriptor_set->GetSet()).c_str(),
1419 set_binding.first.second, index);
1420 }
1421 break;
1422 }
1423 case DescriptorClass::GeneralBuffer: {
1424 const auto *buffer_descriptor = static_cast<const BufferDescriptor *>(descriptor);
1425 auto buf_state = buffer_descriptor->GetBufferState();
1426 if (!buf_state) continue;
1427 ResourceAccessRange range = MakeRange(buffer_descriptor->GetOffset(), buffer_descriptor->GetRange());
1428 auto hazard = current_context_->DetectHazard(*buf_state, sync_index, range);
1429 if (hazard.hazard) {
1430 skip |= sync_state_->LogError(buf_state->buffer, string_SyncHazardVUID(hazard.hazard),
1431 "%s: Hazard %s for %s in %s, %s, and %s binding #%d index %d", func_name,
1432 string_SyncHazard(hazard.hazard),
1433 sync_state_->report_data->FormatHandle(buf_state->buffer).c_str(),
1434 sync_state_->report_data->FormatHandle(cb_state_->commandBuffer).c_str(),
1435 sync_state_->report_data->FormatHandle(pPipe->pipeline).c_str(),
1436 sync_state_->report_data->FormatHandle(descriptor_set->GetSet()).c_str(),
1437 set_binding.first.second, index);
1438 }
1439 break;
1440 }
1441 // TODO: INLINE_UNIFORM_BLOCK_EXT, ACCELERATION_STRUCTURE_KHR
1442 default:
1443 break;
1444 }
1445 }
1446 }
1447 }
1448 return skip;
1449}
1450
1451void CommandBufferAccessContext::RecordDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint,
1452 const ResourceUsageTag &tag) {
1453 const PIPELINE_STATE *pPipe = nullptr;
1454 const std::vector<LAST_BOUND_STATE::PER_SET> *per_sets = nullptr;
1455 GetCurrentPipelineAndDesriptorSetsFromCommandBuffer(*cb_state_.get(), pipelineBindPoint, &pPipe, &per_sets);
1456 if (!pPipe || !per_sets) {
1457 return;
1458 }
1459
1460 using DescriptorClass = cvdescriptorset::DescriptorClass;
1461 using BufferDescriptor = cvdescriptorset::BufferDescriptor;
1462 using ImageDescriptor = cvdescriptorset::ImageDescriptor;
1463 using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor;
1464 using TexelDescriptor = cvdescriptorset::TexelDescriptor;
1465
1466 for (const auto &stage_state : pPipe->stage_state) {
locke-lunarge9f1cdf2020-06-12 12:28:57 -06001467 if (stage_state.stage_flag == VK_SHADER_STAGE_FRAGMENT_BIT && pPipe->graphicsPipelineCI.pRasterizationState &&
1468 pPipe->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable)
1469 continue;
locke-lunarg61870c22020-06-09 14:51:50 -06001470 for (const auto &set_binding : stage_state.descriptor_uses) {
1471 cvdescriptorset::DescriptorSet *descriptor_set = (*per_sets)[set_binding.first.first].bound_descriptor_set;
1472 cvdescriptorset::DescriptorSetLayout::ConstBindingIterator binding_it(descriptor_set->GetLayout().get(),
1473 set_binding.first.second);
1474 const auto descriptor_type = binding_it.GetType();
1475 cvdescriptorset::IndexRange index_range = binding_it.GetGlobalIndexRange();
1476 auto array_idx = 0;
1477
1478 if (binding_it.IsVariableDescriptorCount()) {
1479 index_range.end = index_range.start + descriptor_set->GetVariableDescriptorCount();
1480 }
1481 SyncStageAccessIndex sync_index =
1482 GetSyncStageAccessIndexsByDescriptorSet(descriptor_type, set_binding.second, stage_state.stage_flag);
1483
1484 for (uint32_t i = index_range.start; i < index_range.end; ++i, ++array_idx) {
1485 const auto *descriptor = descriptor_set->GetDescriptorFromGlobalIndex(i);
1486 switch (descriptor->GetClass()) {
1487 case DescriptorClass::ImageSampler:
1488 case DescriptorClass::Image: {
1489 const IMAGE_VIEW_STATE *img_view_state = nullptr;
1490 if (descriptor->GetClass() == DescriptorClass::ImageSampler) {
1491 img_view_state = static_cast<const ImageSamplerDescriptor *>(descriptor)->GetImageViewState();
1492 } else {
1493 img_view_state = static_cast<const ImageDescriptor *>(descriptor)->GetImageViewState();
1494 }
1495 if (!img_view_state) continue;
1496 const IMAGE_STATE *img_state = img_view_state->image_state.get();
1497 VkExtent3D extent = {};
1498 VkOffset3D offset = {};
1499 if (sync_index == SYNC_FRAGMENT_SHADER_INPUT_ATTACHMENT_READ) {
1500 extent = CastTo3D(cb_state_->activeRenderPassBeginInfo.renderArea.extent);
1501 offset = CastTo3D(cb_state_->activeRenderPassBeginInfo.renderArea.offset);
1502 } else {
1503 extent = img_state->createInfo.extent;
1504 }
1505 current_context_->UpdateAccessState(*img_state, sync_index, img_view_state->normalized_subresource_range,
1506 offset, extent, tag);
1507 break;
1508 }
1509 case DescriptorClass::TexelBuffer: {
1510 auto buf_view_state = static_cast<const TexelDescriptor *>(descriptor)->GetBufferViewState();
1511 if (!buf_view_state) continue;
1512 const BUFFER_STATE *buf_state = buf_view_state->buffer_state.get();
1513 ResourceAccessRange range =
1514 MakeRange(buf_view_state->create_info.offset, buf_view_state->create_info.range);
1515 current_context_->UpdateAccessState(*buf_state, sync_index, range, tag);
1516 break;
1517 }
1518 case DescriptorClass::GeneralBuffer: {
1519 const auto *buffer_descriptor = static_cast<const BufferDescriptor *>(descriptor);
1520 auto buf_state = buffer_descriptor->GetBufferState();
1521 if (!buf_state) continue;
1522 ResourceAccessRange range = MakeRange(buffer_descriptor->GetOffset(), buffer_descriptor->GetRange());
1523 current_context_->UpdateAccessState(*buf_state, sync_index, range, tag);
1524 break;
1525 }
1526 // TODO: INLINE_UNIFORM_BLOCK_EXT, ACCELERATION_STRUCTURE_KHR
1527 default:
1528 break;
1529 }
1530 }
1531 }
1532 }
1533}
1534
1535bool CommandBufferAccessContext::ValidateDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const char *func_name) const {
1536 bool skip = false;
1537 const auto *pPipe = GetCurrentPipelineFromCommandBuffer(*cb_state_.get(), VK_PIPELINE_BIND_POINT_GRAPHICS);
1538 if (!pPipe) {
1539 return skip;
1540 }
1541
1542 const auto &binding_buffers = cb_state_->current_vertex_buffer_binding_info.vertex_buffer_bindings;
1543 const auto &binding_buffers_size = binding_buffers.size();
1544 const auto &binding_descriptions_size = pPipe->vertex_binding_descriptions_.size();
1545
1546 for (size_t i = 0; i < binding_descriptions_size; ++i) {
1547 const auto &binding_description = pPipe->vertex_binding_descriptions_[i];
1548 if (binding_description.binding < binding_buffers_size) {
1549 const auto &binding_buffer = binding_buffers[binding_description.binding];
1550 if (binding_buffer.buffer == VK_NULL_HANDLE) continue;
1551
1552 auto *buf_state = sync_state_->Get<BUFFER_STATE>(binding_buffer.buffer);
1553 VkDeviceSize range_start = 0;
1554 VkDeviceSize range_size = 0;
1555 GetBufferRange(range_start, range_size, binding_buffer.offset, buf_state->createInfo.size, firstVertex, vertexCount,
1556 binding_description.stride);
1557 ResourceAccessRange range = MakeRange(range_start, range_size);
1558 auto hazard = current_context_->DetectHazard(*buf_state, SYNC_VERTEX_INPUT_VERTEX_ATTRIBUTE_READ, range);
1559 if (hazard.hazard) {
1560 skip |= sync_state_->LogError(buf_state->buffer, string_SyncHazardVUID(hazard.hazard),
1561 "%s: Hazard %s for vertex %s in %s", func_name, string_SyncHazard(hazard.hazard),
1562 sync_state_->report_data->FormatHandle(buf_state->buffer).c_str(),
1563 sync_state_->report_data->FormatHandle(cb_state_->commandBuffer).c_str());
1564 }
1565 }
1566 }
1567 return skip;
1568}
1569
1570void CommandBufferAccessContext::RecordDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const ResourceUsageTag &tag) {
1571 const auto *pPipe = GetCurrentPipelineFromCommandBuffer(*cb_state_.get(), VK_PIPELINE_BIND_POINT_GRAPHICS);
1572 if (!pPipe) {
1573 return;
1574 }
1575 const auto &binding_buffers = cb_state_->current_vertex_buffer_binding_info.vertex_buffer_bindings;
1576 const auto &binding_buffers_size = binding_buffers.size();
1577 const auto &binding_descriptions_size = pPipe->vertex_binding_descriptions_.size();
1578
1579 for (size_t i = 0; i < binding_descriptions_size; ++i) {
1580 const auto &binding_description = pPipe->vertex_binding_descriptions_[i];
1581 if (binding_description.binding < binding_buffers_size) {
1582 const auto &binding_buffer = binding_buffers[binding_description.binding];
1583 if (binding_buffer.buffer == VK_NULL_HANDLE) continue;
1584
1585 auto *buf_state = sync_state_->Get<BUFFER_STATE>(binding_buffer.buffer);
1586 VkDeviceSize range_start = 0;
1587 VkDeviceSize range_size = 0;
1588 GetBufferRange(range_start, range_size, binding_buffer.offset, buf_state->createInfo.size, firstVertex, vertexCount,
1589 binding_description.stride);
1590 ResourceAccessRange range = MakeRange(range_start, range_size);
1591 current_context_->UpdateAccessState(*buf_state, SYNC_VERTEX_INPUT_VERTEX_ATTRIBUTE_READ, range, tag);
1592 }
1593 }
1594}
1595
1596bool CommandBufferAccessContext::ValidateDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const char *func_name) const {
1597 bool skip = false;
1598 if (cb_state_->index_buffer_binding.buffer == VK_NULL_HANDLE) return skip;
1599
1600 auto *index_buf_state = sync_state_->Get<BUFFER_STATE>(cb_state_->index_buffer_binding.buffer);
1601 const auto index_size = GetIndexAlignment(cb_state_->index_buffer_binding.index_type);
1602 VkDeviceSize range_start = 0;
1603 VkDeviceSize range_size = 0;
1604 GetBufferRange(range_start, range_size, cb_state_->index_buffer_binding.offset, index_buf_state->createInfo.size, firstIndex,
1605 indexCount, index_size);
1606 ResourceAccessRange range = MakeRange(range_start, range_size);
1607 auto hazard = current_context_->DetectHazard(*index_buf_state, SYNC_VERTEX_INPUT_INDEX_READ, range);
1608 if (hazard.hazard) {
1609 skip |= sync_state_->LogError(index_buf_state->buffer, string_SyncHazardVUID(hazard.hazard),
1610 "%s: Hazard %s for index %s in %s", func_name, string_SyncHazard(hazard.hazard),
1611 sync_state_->report_data->FormatHandle(index_buf_state->buffer).c_str(),
1612 sync_state_->report_data->FormatHandle(cb_state_->commandBuffer).c_str());
1613 }
1614
1615 // TODO: For now, we detect the whole vertex buffer. Index buffer could be changed until SubmitQueue.
1616 // We will detect more accurate range in the future.
1617 skip |= ValidateDrawVertex(UINT32_MAX, 0, func_name);
1618 return skip;
1619}
1620
1621void CommandBufferAccessContext::RecordDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const ResourceUsageTag &tag) {
1622 if (cb_state_->index_buffer_binding.buffer == VK_NULL_HANDLE) return;
1623
1624 auto *index_buf_state = sync_state_->Get<BUFFER_STATE>(cb_state_->index_buffer_binding.buffer);
1625 const auto index_size = GetIndexAlignment(cb_state_->index_buffer_binding.index_type);
1626 VkDeviceSize range_start = 0;
1627 VkDeviceSize range_size = 0;
1628 GetBufferRange(range_start, range_size, cb_state_->index_buffer_binding.offset, index_buf_state->createInfo.size, firstIndex,
1629 indexCount, index_size);
1630 ResourceAccessRange range = MakeRange(range_start, range_size);
1631 current_context_->UpdateAccessState(*index_buf_state, SYNC_VERTEX_INPUT_INDEX_READ, range, tag);
1632
1633 // TODO: For now, we detect the whole vertex buffer. Index buffer could be changed until SubmitQueue.
1634 // We will detect more accurate range in the future.
1635 RecordDrawVertex(UINT32_MAX, 0, tag);
1636}
1637
1638bool CommandBufferAccessContext::ValidateDrawSubpassAttachment(const char *func_name) const {
locke-lunarg7077d502020-06-18 21:37:26 -06001639 bool skip = false;
1640 if (!current_renderpass_context_) return skip;
1641 skip |= current_renderpass_context_->ValidateDrawSubpassAttachment(*sync_state_, *cb_state_.get(),
1642 cb_state_->activeRenderPassBeginInfo.renderArea, func_name);
1643 return skip;
locke-lunarg61870c22020-06-09 14:51:50 -06001644}
1645
1646void CommandBufferAccessContext::RecordDrawSubpassAttachment(const ResourceUsageTag &tag) {
locke-lunarg7077d502020-06-18 21:37:26 -06001647 if (current_renderpass_context_)
1648 current_renderpass_context_->RecordDrawSubpassAttachment(*cb_state_.get(), cb_state_->activeRenderPassBeginInfo.renderArea,
1649 tag);
locke-lunarg61870c22020-06-09 14:51:50 -06001650}
1651
John Zulauf355e49b2020-04-24 15:11:15 -06001652bool CommandBufferAccessContext::ValidateNextSubpass(const char *func_name) const {
John Zulauf355e49b2020-04-24 15:11:15 -06001653 bool skip = false;
locke-lunarg7077d502020-06-18 21:37:26 -06001654 if (!current_renderpass_context_) return skip;
John Zulauf1507ee42020-05-18 11:33:09 -06001655 skip |=
1656 current_renderpass_context_->ValidateNextSubpass(*sync_state_, cb_state_->activeRenderPassBeginInfo.renderArea, func_name);
John Zulauf355e49b2020-04-24 15:11:15 -06001657
1658 return skip;
1659}
1660
1661bool CommandBufferAccessContext::ValidateEndRenderpass(const char *func_name) const {
1662 // TODO: Things to add here.
John Zulauf7635de32020-05-29 17:14:15 -06001663 // Validate Preserve attachments
John Zulauf355e49b2020-04-24 15:11:15 -06001664 bool skip = false;
locke-lunarg7077d502020-06-18 21:37:26 -06001665 if (!current_renderpass_context_) return skip;
John Zulauf7635de32020-05-29 17:14:15 -06001666 skip |= current_renderpass_context_->ValidateEndRenderPass(*sync_state_, cb_state_->activeRenderPassBeginInfo.renderArea,
1667 func_name);
John Zulauf355e49b2020-04-24 15:11:15 -06001668
1669 return skip;
1670}
1671
1672void CommandBufferAccessContext::RecordBeginRenderPass(const ResourceUsageTag &tag) {
1673 assert(sync_state_);
1674 if (!cb_state_) return;
1675
1676 // Create an access context the current renderpass.
1677 render_pass_contexts_.emplace_back(&cb_access_context_);
John Zulauf16adfc92020-04-08 10:28:33 -06001678 current_renderpass_context_ = &render_pass_contexts_.back();
John Zulauf355e49b2020-04-24 15:11:15 -06001679 current_renderpass_context_->RecordBeginRenderPass(*sync_state_, *cb_state_, queue_flags_, tag);
John Zulauf16adfc92020-04-08 10:28:33 -06001680 current_context_ = &current_renderpass_context_->CurrentContext();
John Zulauf16adfc92020-04-08 10:28:33 -06001681}
1682
John Zulauf355e49b2020-04-24 15:11:15 -06001683void CommandBufferAccessContext::RecordNextSubpass(const RENDER_PASS_STATE &rp_state, const ResourceUsageTag &tag) {
John Zulauf16adfc92020-04-08 10:28:33 -06001684 assert(current_renderpass_context_);
John Zulauf1507ee42020-05-18 11:33:09 -06001685 current_renderpass_context_->RecordNextSubpass(cb_state_->activeRenderPassBeginInfo.renderArea, tag);
John Zulauf16adfc92020-04-08 10:28:33 -06001686 current_context_ = &current_renderpass_context_->CurrentContext();
1687}
1688
John Zulauf355e49b2020-04-24 15:11:15 -06001689void CommandBufferAccessContext::RecordEndRenderPass(const RENDER_PASS_STATE &render_pass, const ResourceUsageTag &tag) {
John Zulauf16adfc92020-04-08 10:28:33 -06001690 assert(current_renderpass_context_);
1691 if (!current_renderpass_context_) return;
1692
John Zulauf7635de32020-05-29 17:14:15 -06001693 current_renderpass_context_->RecordEndRenderPass(cb_state_->activeRenderPassBeginInfo.renderArea, tag);
John Zulauf355e49b2020-04-24 15:11:15 -06001694 current_context_ = &cb_access_context_;
John Zulauf16adfc92020-04-08 10:28:33 -06001695 current_renderpass_context_ = nullptr;
1696}
1697
locke-lunarg61870c22020-06-09 14:51:50 -06001698bool RenderPassAccessContext::ValidateDrawSubpassAttachment(const SyncValidator &sync_state, const CMD_BUFFER_STATE &cmd,
1699 const VkRect2D &render_area, const char *func_name) const {
1700 bool skip = false;
locke-lunarg96dc9632020-06-10 17:22:18 -06001701 const auto *pPipe = GetCurrentPipelineFromCommandBuffer(cmd, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunarge9f1cdf2020-06-12 12:28:57 -06001702 if (!pPipe ||
1703 (pPipe->graphicsPipelineCI.pRasterizationState && pPipe->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable)) {
locke-lunarg96dc9632020-06-10 17:22:18 -06001704 return skip;
1705 }
1706 const auto &list = pPipe->fragmentShader_writable_output_location_list;
locke-lunarg61870c22020-06-09 14:51:50 -06001707 const auto &subpass = rp_state_->createInfo.pSubpasses[current_subpass_];
1708 VkExtent3D extent = CastTo3D(render_area.extent);
1709 VkOffset3D offset = CastTo3D(render_area.offset);
locke-lunarg37047832020-06-12 13:44:45 -06001710
locke-lunarg44f9bb12020-06-10 14:43:57 -06001711 // Subpass's inputAttachment has been done in ValidateDispatchDrawDescriptorSet
locke-lunarg96dc9632020-06-10 17:22:18 -06001712 if (subpass.pColorAttachments && subpass.colorAttachmentCount && !list.empty()) {
1713 for (const auto location : list) {
1714 if (location >= subpass.colorAttachmentCount || subpass.pColorAttachments[location].attachment == VK_ATTACHMENT_UNUSED)
1715 continue;
1716 const IMAGE_VIEW_STATE *img_view_state = attachment_views_[subpass.pColorAttachments[location].attachment];
1717 HazardResult hazard = external_context_->DetectHazard(
1718 img_view_state, SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, kColorAttachmentRasterOrder, offset, extent);
1719 if (hazard.hazard) {
1720 skip |= sync_state.LogError(
1721 img_view_state->image_view, string_SyncHazardVUID(hazard.hazard),
1722 "%s: Hazard %s for %s in %s, Subpass #%d, and pColorAttachments #%d", func_name,
1723 string_SyncHazard(hazard.hazard), sync_state.report_data->FormatHandle(img_view_state->image_view).c_str(),
1724 sync_state.report_data->FormatHandle(cmd.commandBuffer).c_str(), cmd.activeSubpass, location);
locke-lunarg61870c22020-06-09 14:51:50 -06001725 }
1726 }
1727 }
locke-lunarg37047832020-06-12 13:44:45 -06001728
1729 // PHASE1 TODO: Add layout based read/vs. write selection.
1730 // PHASE1 TODO: Read operations for both depth and stencil are possible in the future.
1731 if (pPipe->graphicsPipelineCI.pDepthStencilState && subpass.pDepthStencilAttachment &&
1732 subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
locke-lunarg61870c22020-06-09 14:51:50 -06001733 const IMAGE_VIEW_STATE *img_view_state = attachment_views_[subpass.pDepthStencilAttachment->attachment];
locke-lunarg37047832020-06-12 13:44:45 -06001734 bool depth_write = false, stencil_write = false;
1735
1736 // PHASE1 TODO: These validation should be in core_checks.
1737 if (!FormatIsStencilOnly(img_view_state->create_info.format) &&
1738 pPipe->graphicsPipelineCI.pDepthStencilState->depthTestEnable &&
1739 pPipe->graphicsPipelineCI.pDepthStencilState->depthWriteEnable &&
1740 IsImageLayoutDepthWritable(subpass.pDepthStencilAttachment->layout)) {
1741 depth_write = true;
1742 }
1743 // PHASE1 TODO: It needs to check if stencil is writable.
1744 // If failOp, passOp, or depthFailOp are not KEEP, and writeMask isn't 0, it's writable.
1745 // If depth test is disable, it's considered depth test passes, and then depthFailOp doesn't run.
1746 // PHASE1 TODO: These validation should be in core_checks.
1747 if (!FormatIsDepthOnly(img_view_state->create_info.format) &&
1748 pPipe->graphicsPipelineCI.pDepthStencilState->stencilTestEnable &&
1749 IsImageLayoutStencilWritable(subpass.pDepthStencilAttachment->layout)) {
1750 stencil_write = true;
1751 }
1752
1753 // PHASE1 TODO: Add EARLY stage detection based on ExecutionMode.
1754 if (depth_write) {
1755 HazardResult hazard =
1756 external_context_->DetectHazard(img_view_state, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE,
1757 kDepthStencilAttachmentRasterOrder, offset, extent, VK_IMAGE_ASPECT_DEPTH_BIT);
1758 if (hazard.hazard) {
1759 skip |= sync_state.LogError(img_view_state->image_view, string_SyncHazardVUID(hazard.hazard),
1760 "%s: Hazard %s for %s in %s, Subpass #%d, and depth part of pDepthStencilAttachment",
1761 func_name, string_SyncHazard(hazard.hazard),
1762 sync_state.report_data->FormatHandle(img_view_state->image_view).c_str(),
1763 sync_state.report_data->FormatHandle(cmd.commandBuffer).c_str(), cmd.activeSubpass);
1764 }
1765 }
1766 if (stencil_write) {
1767 HazardResult hazard =
1768 external_context_->DetectHazard(img_view_state, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE,
1769 kDepthStencilAttachmentRasterOrder, offset, extent, VK_IMAGE_ASPECT_STENCIL_BIT);
1770 if (hazard.hazard) {
1771 skip |= sync_state.LogError(img_view_state->image_view, string_SyncHazardVUID(hazard.hazard),
1772 "%s: Hazard %s for %s in %s, Subpass #%d, and stencil part of pDepthStencilAttachment",
1773 func_name, string_SyncHazard(hazard.hazard),
1774 sync_state.report_data->FormatHandle(img_view_state->image_view).c_str(),
1775 sync_state.report_data->FormatHandle(cmd.commandBuffer).c_str(), cmd.activeSubpass);
1776 }
locke-lunarg61870c22020-06-09 14:51:50 -06001777 }
1778 }
1779 return skip;
1780}
1781
locke-lunarg96dc9632020-06-10 17:22:18 -06001782void RenderPassAccessContext::RecordDrawSubpassAttachment(const CMD_BUFFER_STATE &cmd, const VkRect2D &render_area,
1783 const ResourceUsageTag &tag) {
1784 const auto *pPipe = GetCurrentPipelineFromCommandBuffer(cmd, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunarge9f1cdf2020-06-12 12:28:57 -06001785 if (!pPipe ||
1786 (pPipe->graphicsPipelineCI.pRasterizationState && pPipe->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable)) {
locke-lunarg96dc9632020-06-10 17:22:18 -06001787 return;
1788 }
1789 const auto &list = pPipe->fragmentShader_writable_output_location_list;
locke-lunarg61870c22020-06-09 14:51:50 -06001790 const auto &subpass = rp_state_->createInfo.pSubpasses[current_subpass_];
1791 VkExtent3D extent = CastTo3D(render_area.extent);
1792 VkOffset3D offset = CastTo3D(render_area.offset);
1793
locke-lunarg44f9bb12020-06-10 14:43:57 -06001794 // Subpass's inputAttachment has been done in RecordDispatchDrawDescriptorSet
locke-lunarg96dc9632020-06-10 17:22:18 -06001795 if (subpass.pColorAttachments && subpass.colorAttachmentCount && !list.empty()) {
1796 for (const auto location : list) {
1797 if (location >= subpass.colorAttachmentCount || subpass.pColorAttachments[location].attachment == VK_ATTACHMENT_UNUSED)
1798 continue;
1799 const IMAGE_VIEW_STATE *img_view_state = attachment_views_[subpass.pColorAttachments[location].attachment];
1800 external_context_->UpdateAccessState(img_view_state, SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, offset,
1801 extent, 0, tag);
locke-lunarg61870c22020-06-09 14:51:50 -06001802 }
1803 }
locke-lunarg37047832020-06-12 13:44:45 -06001804
1805 // PHASE1 TODO: Add layout based read/vs. write selection.
1806 // PHASE1 TODO: Read operations for both depth and stencil are possible in the future.
1807 if (pPipe->graphicsPipelineCI.pDepthStencilState && subpass.pDepthStencilAttachment &&
1808 subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
locke-lunarg61870c22020-06-09 14:51:50 -06001809 const IMAGE_VIEW_STATE *img_view_state = attachment_views_[subpass.pDepthStencilAttachment->attachment];
locke-lunarg37047832020-06-12 13:44:45 -06001810 bool depth_write = false, stencil_write = false;
1811
1812 // PHASE1 TODO: These validation should be in core_checks.
1813 if (!FormatIsStencilOnly(img_view_state->create_info.format) &&
1814 pPipe->graphicsPipelineCI.pDepthStencilState->depthTestEnable &&
1815 pPipe->graphicsPipelineCI.pDepthStencilState->depthWriteEnable &&
1816 IsImageLayoutDepthWritable(subpass.pDepthStencilAttachment->layout)) {
1817 depth_write = true;
1818 }
1819 // PHASE1 TODO: It needs to check if stencil is writable.
1820 // If failOp, passOp, or depthFailOp are not KEEP, and writeMask isn't 0, it's writable.
1821 // If depth test is disable, it's considered depth test passes, and then depthFailOp doesn't run.
1822 // PHASE1 TODO: These validation should be in core_checks.
1823 if (!FormatIsDepthOnly(img_view_state->create_info.format) &&
1824 pPipe->graphicsPipelineCI.pDepthStencilState->stencilTestEnable &&
1825 IsImageLayoutStencilWritable(subpass.pDepthStencilAttachment->layout)) {
1826 stencil_write = true;
1827 }
1828
1829 // PHASE1 TODO: Add EARLY stage detection based on ExecutionMode.
1830 if (depth_write) {
1831 external_context_->UpdateAccessState(img_view_state, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, offset,
1832 extent, VK_IMAGE_ASPECT_DEPTH_BIT, tag);
1833 }
1834 if (stencil_write) {
1835 external_context_->UpdateAccessState(img_view_state, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, offset,
1836 extent, VK_IMAGE_ASPECT_STENCIL_BIT, tag);
1837 }
locke-lunarg61870c22020-06-09 14:51:50 -06001838 }
1839}
1840
John Zulauf1507ee42020-05-18 11:33:09 -06001841bool RenderPassAccessContext::ValidateNextSubpass(const SyncValidator &sync_state, const VkRect2D &render_area,
1842 const char *func_name) const {
John Zulaufaff20662020-06-01 14:07:58 -06001843 // PHASE1 TODO: Add Validate Preserve attachments
John Zulauf355e49b2020-04-24 15:11:15 -06001844 bool skip = false;
John Zulaufb027cdb2020-05-21 14:25:22 -06001845 skip |= CurrentContext().ValidateResolveOperations(sync_state, *rp_state_, render_area, attachment_views_, func_name,
1846 current_subpass_);
John Zulaufaff20662020-06-01 14:07:58 -06001847 skip |= CurrentContext().ValidateStoreOperation(sync_state, *rp_state_, render_area, current_subpass_, attachment_views_,
1848 func_name);
1849
John Zulauf355e49b2020-04-24 15:11:15 -06001850 const auto next_subpass = current_subpass_ + 1;
John Zulauf1507ee42020-05-18 11:33:09 -06001851 const auto &next_context = subpass_contexts_[next_subpass];
John Zulauf7635de32020-05-29 17:14:15 -06001852 skip |= next_context.ValidateLayoutTransitions(sync_state, *rp_state_, render_area, next_subpass, attachment_views_, func_name);
1853 skip |= next_context.ValidateLoadOperation(sync_state, *rp_state_, render_area, next_subpass, attachment_views_, func_name);
1854 return skip;
1855}
1856bool RenderPassAccessContext::ValidateEndRenderPass(const SyncValidator &sync_state, const VkRect2D &render_area,
1857 const char *func_name) const {
John Zulaufaff20662020-06-01 14:07:58 -06001858 // PHASE1 TODO: Validate Preserve
John Zulauf7635de32020-05-29 17:14:15 -06001859 bool skip = false;
1860 skip |= CurrentContext().ValidateResolveOperations(sync_state, *rp_state_, render_area, attachment_views_, func_name,
1861 current_subpass_);
John Zulaufaff20662020-06-01 14:07:58 -06001862 skip |= CurrentContext().ValidateStoreOperation(sync_state, *rp_state_, render_area, current_subpass_, attachment_views_,
1863 func_name);
John Zulauf7635de32020-05-29 17:14:15 -06001864 skip |= ValidateFinalSubpassLayoutTransitions(sync_state, render_area, func_name);
John Zulauf355e49b2020-04-24 15:11:15 -06001865 return skip;
1866}
1867
John Zulauf7635de32020-05-29 17:14:15 -06001868AccessContext *RenderPassAccessContext::CreateStoreResolveProxy(const VkRect2D &render_area) const {
1869 return CreateStoreResolveProxyContext(CurrentContext(), *rp_state_, current_subpass_, render_area, attachment_views_);
1870}
1871
1872bool RenderPassAccessContext::ValidateFinalSubpassLayoutTransitions(const SyncValidator &sync_state, const VkRect2D &render_area,
1873 const char *func_name) const {
John Zulauf355e49b2020-04-24 15:11:15 -06001874 bool skip = false;
1875
John Zulauf7635de32020-05-29 17:14:15 -06001876 // As validation methods are const and precede the record/update phase, for any tranistions from the current (last)
1877 // subpass, we have to validate them against a copy of the current AccessContext, with resolve operations applied.
1878 // Note: we could be more efficient by tracking whether or not we actually *have* any changes (e.g. attachment resolve)
1879 // to apply and only copy then, if this proves a hot spot.
1880 std::unique_ptr<AccessContext> proxy_for_current;
1881
John Zulauf355e49b2020-04-24 15:11:15 -06001882 // Validate the "finalLayout" transitions to external
1883 // Get them from where there we're hidding in the extra entry.
1884 const auto &final_transitions = rp_state_->subpass_transitions.back();
1885 for (const auto &transition : final_transitions) {
1886 const auto &attach_view = attachment_views_[transition.attachment];
1887 const auto &trackback = subpass_contexts_[transition.prev_pass].GetDstExternalTrackBack();
1888 assert(trackback.context); // Transitions are given implicit transitions if the StateTracker is working correctly
John Zulauf7635de32020-05-29 17:14:15 -06001889 auto *context = trackback.context;
1890
1891 if (transition.prev_pass == current_subpass_) {
1892 if (!proxy_for_current) {
1893 // We haven't recorded resolve ofor the current_subpass, so we need to copy current and update it *as if*
1894 proxy_for_current.reset(CreateStoreResolveProxy(render_area));
1895 }
1896 context = proxy_for_current.get();
1897 }
1898
1899 auto hazard = context->DetectImageBarrierHazard(
John Zulauf355e49b2020-04-24 15:11:15 -06001900 *attach_view->image_state, trackback.barrier.src_exec_scope, trackback.barrier.src_access_scope,
1901 attach_view->normalized_subresource_range, AccessContext::DetectOptions::kDetectPrevious);
1902 if (hazard.hazard) {
1903 skip |= sync_state.LogError(rp_state_->renderPass, string_SyncHazardVUID(hazard.hazard),
1904 "%s: Hazard %s with last use subpass %" PRIu32 " for attachment %" PRIu32
John Zulauf1dae9192020-06-16 15:46:44 -06001905 " final image layout transition. Prior access %s.",
1906 func_name, string_SyncHazard(hazard.hazard), transition.prev_pass, transition.attachment,
1907 string_UsageTag(hazard.tag).c_str());
John Zulauf355e49b2020-04-24 15:11:15 -06001908 }
1909 }
1910 return skip;
1911}
1912
1913void RenderPassAccessContext::RecordLayoutTransitions(const ResourceUsageTag &tag) {
1914 // Add layout transitions...
1915 const auto &transitions = rp_state_->subpass_transitions[current_subpass_];
1916 auto &subpass_context = subpass_contexts_[current_subpass_];
John Zulaufc9201222020-05-13 15:13:03 -06001917 std::set<const IMAGE_VIEW_STATE *> view_seen;
John Zulauf355e49b2020-04-24 15:11:15 -06001918 for (const auto &transition : transitions) {
1919 const auto attachment_view = attachment_views_[transition.attachment];
1920 if (!attachment_view) continue;
1921 const auto image = attachment_view->image_state.get();
1922 if (!image) continue;
1923
1924 const auto *barrier = subpass_context.GetTrackBackFromSubpass(transition.prev_pass);
John Zulaufc9201222020-05-13 15:13:03 -06001925 auto insert_pair = view_seen.insert(attachment_view);
1926 if (insert_pair.second) {
1927 // We haven't recorded the transistion yet, so treat this as a normal barrier with transistion.
1928 subpass_context.ApplyImageBarrier(*image, barrier->barrier, attachment_view->normalized_subresource_range, true, tag);
1929
1930 } else {
1931 // We've recorded the transition, but we need to added on the additional dest barriers, and rerecording the transition
1932 // would clear out the prior barrier flags, so apply this as a *non* transition barrier
1933 auto barrier_to_transition = barrier->barrier;
1934 barrier_to_transition.src_access_scope |= SYNC_IMAGE_LAYOUT_TRANSITION_BIT;
1935 subpass_context.ApplyImageBarrier(*image, barrier->barrier, attachment_view->normalized_subresource_range, false, tag);
1936 }
John Zulauf355e49b2020-04-24 15:11:15 -06001937 }
1938}
1939
John Zulauf1507ee42020-05-18 11:33:09 -06001940void RenderPassAccessContext::RecordLoadOperations(const VkRect2D &render_area, const ResourceUsageTag &tag) {
1941 const auto *attachment_ci = rp_state_->createInfo.pAttachments;
1942 auto &subpass_context = subpass_contexts_[current_subpass_];
1943 VkExtent3D extent = CastTo3D(render_area.extent);
1944 VkOffset3D offset = CastTo3D(render_area.offset);
1945
1946 for (uint32_t i = 0; i < rp_state_->createInfo.attachmentCount; i++) {
1947 if (rp_state_->attachment_first_subpass[i] == current_subpass_) {
1948 if (attachment_views_[i] == nullptr) continue; // UNUSED
1949 const auto &view = *attachment_views_[i];
1950 const IMAGE_STATE *image = view.image_state.get();
1951 if (image == nullptr) continue;
1952
1953 const auto &ci = attachment_ci[i];
1954 const bool has_depth = FormatHasDepth(ci.format);
John Zulaufb027cdb2020-05-21 14:25:22 -06001955 const bool has_stencil = FormatHasStencil(ci.format);
John Zulauf1507ee42020-05-18 11:33:09 -06001956 const bool is_color = !(has_depth || has_stencil);
1957
1958 if (is_color) {
1959 subpass_context.UpdateAccessState(*image, ColorLoadUsage(ci.loadOp), view.normalized_subresource_range, offset,
1960 extent, tag);
1961 } else {
1962 auto update_range = view.normalized_subresource_range;
1963 if (has_depth) {
1964 update_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
1965 subpass_context.UpdateAccessState(*image, DepthStencilLoadUsage(ci.loadOp), update_range, offset, extent, tag);
1966 }
1967 if (has_stencil) {
1968 update_range.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
1969 subpass_context.UpdateAccessState(*image, DepthStencilLoadUsage(ci.stencilLoadOp), update_range, offset, extent,
1970 tag);
1971 }
1972 }
1973 }
1974 }
1975}
1976
John Zulauf355e49b2020-04-24 15:11:15 -06001977void RenderPassAccessContext::RecordBeginRenderPass(const SyncValidator &state, const CMD_BUFFER_STATE &cb_state,
1978 VkQueueFlags queue_flags, const ResourceUsageTag &tag) {
1979 current_subpass_ = 0;
locke-lunargaecf2152020-05-12 17:15:41 -06001980 rp_state_ = cb_state.activeRenderPass.get();
John Zulauf355e49b2020-04-24 15:11:15 -06001981 subpass_contexts_.reserve(rp_state_->createInfo.subpassCount);
1982 // Add this for all subpasses here so that they exsist during next subpass validation
1983 for (uint32_t pass = 0; pass < rp_state_->createInfo.subpassCount; pass++) {
1984 subpass_contexts_.emplace_back(pass, queue_flags, rp_state_->subpass_dependencies, subpass_contexts_, external_context_);
1985 }
1986 attachment_views_ = state.GetCurrentAttachmentViews(cb_state);
1987
1988 RecordLayoutTransitions(tag);
John Zulauf1507ee42020-05-18 11:33:09 -06001989 RecordLoadOperations(cb_state.activeRenderPassBeginInfo.renderArea, tag);
John Zulauf355e49b2020-04-24 15:11:15 -06001990}
John Zulauf1507ee42020-05-18 11:33:09 -06001991
1992void RenderPassAccessContext::RecordNextSubpass(const VkRect2D &render_area, const ResourceUsageTag &tag) {
John Zulauf7635de32020-05-29 17:14:15 -06001993 // Resolves are against *prior* subpass context and thus *before* the subpass increment
1994 CurrentContext().UpdateAttachmentResolveAccess(*rp_state_, render_area, attachment_views_, current_subpass_, tag);
John Zulaufaff20662020-06-01 14:07:58 -06001995 CurrentContext().UpdateAttachmentStoreAccess(*rp_state_, render_area, attachment_views_, current_subpass_, tag);
John Zulauf7635de32020-05-29 17:14:15 -06001996
John Zulauf355e49b2020-04-24 15:11:15 -06001997 current_subpass_++;
1998 assert(current_subpass_ < subpass_contexts_.size());
1999 RecordLayoutTransitions(tag);
John Zulauf1507ee42020-05-18 11:33:09 -06002000 RecordLoadOperations(render_area, tag);
John Zulauf355e49b2020-04-24 15:11:15 -06002001}
2002
John Zulauf7635de32020-05-29 17:14:15 -06002003void RenderPassAccessContext::RecordEndRenderPass(const VkRect2D &render_area, const ResourceUsageTag &tag) {
John Zulaufaff20662020-06-01 14:07:58 -06002004 // Add the resolve and store accesses
John Zulauf7635de32020-05-29 17:14:15 -06002005 CurrentContext().UpdateAttachmentResolveAccess(*rp_state_, render_area, attachment_views_, current_subpass_, tag);
John Zulaufaff20662020-06-01 14:07:58 -06002006 CurrentContext().UpdateAttachmentStoreAccess(*rp_state_, render_area, attachment_views_, current_subpass_, tag);
John Zulauf7635de32020-05-29 17:14:15 -06002007
John Zulauf355e49b2020-04-24 15:11:15 -06002008 // Export the accesses from the renderpass...
2009 external_context_->ResolveChildContexts(subpass_contexts_);
2010
2011 // Add the "finalLayout" transitions to external
2012 // Get them from where there we're hidding in the extra entry.
2013 const auto &final_transitions = rp_state_->subpass_transitions.back();
2014 for (const auto &transition : final_transitions) {
2015 const auto &attachment = attachment_views_[transition.attachment];
2016 const auto &last_trackback = subpass_contexts_[transition.prev_pass].GetDstExternalTrackBack();
2017 assert(external_context_ == last_trackback.context);
2018 external_context_->ApplyImageBarrier(*attachment->image_state, last_trackback.barrier,
2019 attachment->normalized_subresource_range, true, tag);
2020 }
2021}
2022
John Zulauf3d84f1b2020-03-09 13:33:25 -06002023SyncBarrier::SyncBarrier(VkQueueFlags queue_flags, const VkSubpassDependency2 &barrier) {
2024 const auto src_stage_mask = ExpandPipelineStages(queue_flags, barrier.srcStageMask);
2025 src_exec_scope = WithEarlierPipelineStages(src_stage_mask);
2026 src_access_scope = SyncStageAccess::AccessScope(src_stage_mask, barrier.srcAccessMask);
2027 const auto dst_stage_mask = ExpandPipelineStages(queue_flags, barrier.dstStageMask);
2028 dst_exec_scope = WithLaterPipelineStages(dst_stage_mask);
2029 dst_access_scope = SyncStageAccess::AccessScope(dst_stage_mask, barrier.dstAccessMask);
2030}
2031
2032void ResourceAccessState::ApplyBarrier(const SyncBarrier &barrier) {
2033 ApplyExecutionBarrier(barrier.src_exec_scope, barrier.dst_exec_scope);
2034 ApplyMemoryAccessBarrier(barrier.src_exec_scope, barrier.src_access_scope, barrier.dst_exec_scope, barrier.dst_access_scope);
2035}
2036
John Zulauf9cb530d2019-09-30 14:14:10 -06002037HazardResult ResourceAccessState::DetectHazard(SyncStageAccessIndex usage_index) const {
2038 HazardResult hazard;
2039 auto usage = FlagBit(usage_index);
2040 if (IsRead(usage)) {
John Zulaufc9201222020-05-13 15:13:03 -06002041 if (last_write && IsWriteHazard(usage)) {
John Zulauf9cb530d2019-09-30 14:14:10 -06002042 hazard.Set(READ_AFTER_WRITE, write_tag);
2043 }
2044 } else {
2045 // Assume write
2046 // TODO determine what to do with READ-WRITE usage states if any
2047 // Write-After-Write check -- if we have a previous write to test against
2048 if (last_write && IsWriteHazard(usage)) {
2049 hazard.Set(WRITE_AFTER_WRITE, write_tag);
2050 } else {
John Zulauf69133422020-05-20 14:55:53 -06002051 // Look for casus belli for WAR
John Zulauf9cb530d2019-09-30 14:14:10 -06002052 const auto usage_stage = PipelineStageBit(usage_index);
2053 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
2054 if (IsReadHazard(usage_stage, last_reads[read_index])) {
2055 hazard.Set(WRITE_AFTER_READ, last_reads[read_index].tag);
2056 break;
2057 }
2058 }
2059 }
2060 }
2061 return hazard;
2062}
2063
John Zulauf69133422020-05-20 14:55:53 -06002064HazardResult ResourceAccessState::DetectHazard(SyncStageAccessIndex usage_index, const SyncOrderingBarrier &ordering) const {
2065 // The ordering guarantees act as barriers to the last accesses, independent of synchronization operations
2066 HazardResult hazard;
2067 const auto usage = FlagBit(usage_index);
2068 const bool write_is_ordered = (last_write & ordering.access_scope) == last_write; // Is true if no write, and that's good.
2069 if (IsRead(usage)) {
2070 if (!write_is_ordered && IsWriteHazard(usage)) {
2071 hazard.Set(READ_AFTER_WRITE, write_tag);
2072 }
2073 } else {
2074 if (!write_is_ordered && IsWriteHazard(usage)) {
2075 hazard.Set(WRITE_AFTER_WRITE, write_tag);
2076 } else {
2077 const auto usage_stage = PipelineStageBit(usage_index);
2078 const auto unordered_reads = last_read_stages & ~ordering.exec_scope;
2079 if (unordered_reads) {
2080 // Look for any WAR hazards outside the ordered set of stages
2081 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
2082 if (last_reads[read_index].stage & unordered_reads) {
2083 if (IsReadHazard(usage_stage, last_reads[read_index])) {
2084 hazard.Set(WRITE_AFTER_READ, last_reads[read_index].tag);
2085 break;
2086 }
2087 }
2088 }
2089 }
2090 }
2091 }
2092 return hazard;
2093}
2094
John Zulauf2f952d22020-02-10 11:34:51 -07002095// Asynchronous Hazards occur between subpasses with no connection through the DAG
John Zulauf3d84f1b2020-03-09 13:33:25 -06002096HazardResult ResourceAccessState::DetectAsyncHazard(SyncStageAccessIndex usage_index) const {
John Zulauf2f952d22020-02-10 11:34:51 -07002097 HazardResult hazard;
2098 auto usage = FlagBit(usage_index);
2099 if (IsRead(usage)) {
2100 if (last_write != 0) {
2101 hazard.Set(READ_RACING_WRITE, write_tag);
2102 }
2103 } else {
2104 if (last_write != 0) {
2105 hazard.Set(WRITE_RACING_WRITE, write_tag);
2106 } else if (last_read_count > 0) {
2107 hazard.Set(WRITE_RACING_READ, last_reads[0].tag);
2108 }
2109 }
2110 return hazard;
2111}
2112
John Zulauf36bcf6a2020-02-03 15:12:52 -07002113HazardResult ResourceAccessState::DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags src_exec_scope,
2114 SyncStageAccessFlags src_access_scope) const {
John Zulauf0cb5be22020-01-23 12:18:22 -07002115 // Only supporting image layout transitions for now
2116 assert(usage_index == SyncStageAccessIndex::SYNC_IMAGE_LAYOUT_TRANSITION);
2117 HazardResult hazard;
2118 if (last_write) {
2119 // If the previous write is *not* in the 1st access scope
2120 // *AND* the current barrier is not in the dependency chain
2121 // *AND* the there is no prior memory barrier for the previous write in the dependency chain
2122 // then the barrier access is unsafe (R/W after W)
John Zulauf36bcf6a2020-02-03 15:12:52 -07002123 if (((last_write & src_access_scope) == 0) && ((src_exec_scope & write_dependency_chain) == 0) && (write_barriers == 0)) {
John Zulauf0cb5be22020-01-23 12:18:22 -07002124 // TODO: Do we need a difference hazard name for this?
2125 hazard.Set(WRITE_AFTER_WRITE, write_tag);
2126 }
John Zulauf355e49b2020-04-24 15:11:15 -06002127 }
2128 if (!hazard.hazard) {
2129 // Look at the reads if any
John Zulauf0cb5be22020-01-23 12:18:22 -07002130 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
John Zulauf36bcf6a2020-02-03 15:12:52 -07002131 const auto &read_access = last_reads[read_index];
2132 // If the read stage is not in the src sync sync
2133 // *AND* not execution chained with an existing sync barrier (that's the or)
2134 // then the barrier access is unsafe (R/W after R)
2135 if ((src_exec_scope & (read_access.stage | read_access.barriers)) == 0) {
2136 hazard.Set(WRITE_AFTER_READ, read_access.tag);
John Zulauf0cb5be22020-01-23 12:18:22 -07002137 break;
2138 }
2139 }
2140 }
2141 return hazard;
2142}
2143
John Zulauf5f13a792020-03-10 07:31:21 -06002144// The logic behind resolves is the same as update, we assume that earlier hazards have be reported, and that no
2145// tranistive hazard can exists with a hazard between the earlier operations. Yes, an early hazard can mask that another
2146// exists, but if you fix *that* hazard it either fixes or unmasks the subsequent ones.
2147void ResourceAccessState::Resolve(const ResourceAccessState &other) {
2148 if (write_tag.IsBefore(other.write_tag)) {
2149 // If this is a later write, we've reported any exsiting hazard, and we can just overwrite as the more recent operation
2150 *this = other;
2151 } else if (!other.write_tag.IsBefore(write_tag)) {
2152 // This is the *equals* case for write operations, we merged the write barriers and the read state (but without the
2153 // dependency chaining logic or any stage expansion)
2154 write_barriers |= other.write_barriers;
2155
2156 // Merge that read states
2157 for (uint32_t other_read_index = 0; other_read_index < other.last_read_count; other_read_index++) {
2158 auto &other_read = other.last_reads[other_read_index];
2159 if (last_read_stages & other_read.stage) {
2160 // Merge in the barriers for read stages that exist in *both* this and other
2161 // TODO: This is N^2 with stages... perhaps the ReadStates should be by stage index.
2162 for (uint32_t my_read_index = 0; my_read_index < last_read_count; my_read_index++) {
2163 auto &my_read = last_reads[my_read_index];
2164 if (other_read.stage == my_read.stage) {
2165 if (my_read.tag.IsBefore(other_read.tag)) {
2166 my_read.tag = other_read.tag;
2167 }
2168 my_read.barriers |= other_read.barriers;
2169 break;
2170 }
2171 }
2172 } else {
2173 // The other read stage doesn't exist in this, so add it.
2174 last_reads[last_read_count] = other_read;
2175 last_read_count++;
2176 last_read_stages |= other_read.stage;
2177 }
2178 }
2179 } // the else clause would be that other write is before this write... in which case we supercede the other state and ignore
2180 // it.
2181}
2182
John Zulauf9cb530d2019-09-30 14:14:10 -06002183void ResourceAccessState::Update(SyncStageAccessIndex usage_index, const ResourceUsageTag &tag) {
2184 // Move this logic in the ResourceStateTracker as methods, thereof (or we'll repeat it for every flavor of resource...
2185 const auto usage_bit = FlagBit(usage_index);
2186 if (IsRead(usage_index)) {
2187 // Mulitple outstanding reads may be of interest and do dependency chains independently
2188 // However, for purposes of barrier tracking, only one read per pipeline stage matters
2189 const auto usage_stage = PipelineStageBit(usage_index);
2190 if (usage_stage & last_read_stages) {
2191 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
2192 ReadState &access = last_reads[read_index];
2193 if (access.stage == usage_stage) {
2194 access.barriers = 0;
2195 access.tag = tag;
2196 break;
2197 }
2198 }
2199 } else {
2200 // We don't have this stage in the list yet...
2201 assert(last_read_count < last_reads.size());
2202 ReadState &access = last_reads[last_read_count++];
2203 access.stage = usage_stage;
2204 access.barriers = 0;
2205 access.tag = tag;
2206 last_read_stages |= usage_stage;
2207 }
2208 } else {
2209 // Assume write
2210 // TODO determine what to do with READ-WRITE operations if any
2211 // Clobber last read and both sets of barriers... because all we have is DANGER, DANGER, WILL ROBINSON!!!
2212 // if the last_reads/last_write were unsafe, we've reported them,
2213 // in either case the prior access is irrelevant, we can overwrite them as *this* write is now after them
2214 last_read_count = 0;
2215 last_read_stages = 0;
2216
2217 write_barriers = 0;
2218 write_dependency_chain = 0;
2219 write_tag = tag;
2220 last_write = usage_bit;
2221 }
2222}
John Zulauf5f13a792020-03-10 07:31:21 -06002223
John Zulauf9cb530d2019-09-30 14:14:10 -06002224void ResourceAccessState::ApplyExecutionBarrier(VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask) {
2225 // Execution Barriers only protect read operations
2226 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
2227 ReadState &access = last_reads[read_index];
2228 // The | implements the "dependency chain" logic for this access, as the barriers field stores the second sync scope
2229 if (srcStageMask & (access.stage | access.barriers)) {
2230 access.barriers |= dstStageMask;
2231 }
2232 }
2233 if (write_dependency_chain & srcStageMask) write_dependency_chain |= dstStageMask;
2234}
2235
John Zulauf36bcf6a2020-02-03 15:12:52 -07002236void ResourceAccessState::ApplyMemoryAccessBarrier(VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope,
2237 VkPipelineStageFlags dst_exec_scope, SyncStageAccessFlags dst_access_scope) {
John Zulauf9cb530d2019-09-30 14:14:10 -06002238 // Assuming we've applied the execution side of this barrier, we update just the write
2239 // The || implements the "dependency chain" logic for this barrier
John Zulauf36bcf6a2020-02-03 15:12:52 -07002240 if ((src_access_scope & last_write) || (write_dependency_chain & src_exec_scope)) {
2241 write_barriers |= dst_access_scope;
2242 write_dependency_chain |= dst_exec_scope;
John Zulauf9cb530d2019-09-30 14:14:10 -06002243 }
2244}
2245
John Zulaufd1f85d42020-04-15 12:23:15 -06002246void SyncValidator::ResetCommandBufferCallback(VkCommandBuffer command_buffer) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06002247 auto *access_context = GetAccessContextNoInsert(command_buffer);
2248 if (access_context) {
2249 access_context->Reset();
John Zulauf9cb530d2019-09-30 14:14:10 -06002250 }
2251}
2252
John Zulaufd1f85d42020-04-15 12:23:15 -06002253void SyncValidator::FreeCommandBufferCallback(VkCommandBuffer command_buffer) {
2254 auto access_found = cb_access_state.find(command_buffer);
2255 if (access_found != cb_access_state.end()) {
2256 access_found->second->Reset();
2257 cb_access_state.erase(access_found);
2258 }
2259}
2260
John Zulauf540266b2020-04-06 18:54:53 -06002261void SyncValidator::ApplyGlobalBarriers(AccessContext *context, VkPipelineStageFlags srcStageMask,
John Zulauf36bcf6a2020-02-03 15:12:52 -07002262 VkPipelineStageFlags dstStageMask, SyncStageAccessFlags src_access_scope,
2263 SyncStageAccessFlags dst_access_scope, uint32_t memoryBarrierCount,
John Zulauf9cb530d2019-09-30 14:14:10 -06002264 const VkMemoryBarrier *pMemoryBarriers) {
2265 // TODO: Implement this better (maybe some delayed/on-demand integration).
John Zulauf36bcf6a2020-02-03 15:12:52 -07002266 ApplyGlobalBarrierFunctor barriers_functor(srcStageMask, dstStageMask, src_access_scope, dst_access_scope, memoryBarrierCount,
John Zulauf9cb530d2019-09-30 14:14:10 -06002267 pMemoryBarriers);
John Zulauf540266b2020-04-06 18:54:53 -06002268 context->ApplyGlobalBarriers(barriers_functor);
John Zulauf9cb530d2019-09-30 14:14:10 -06002269}
2270
John Zulauf540266b2020-04-06 18:54:53 -06002271void SyncValidator::ApplyBufferBarriers(AccessContext *context, VkPipelineStageFlags src_exec_scope,
John Zulauf36bcf6a2020-02-03 15:12:52 -07002272 SyncStageAccessFlags src_stage_accesses, VkPipelineStageFlags dst_exec_scope,
2273 SyncStageAccessFlags dst_stage_accesses, uint32_t barrier_count,
John Zulauf9cb530d2019-09-30 14:14:10 -06002274 const VkBufferMemoryBarrier *barriers) {
John Zulauf9cb530d2019-09-30 14:14:10 -06002275 for (uint32_t index = 0; index < barrier_count; index++) {
locke-lunarg3c038002020-04-30 23:08:08 -06002276 auto barrier = barriers[index];
John Zulauf9cb530d2019-09-30 14:14:10 -06002277 const auto *buffer = Get<BUFFER_STATE>(barrier.buffer);
2278 if (!buffer) continue;
locke-lunarg3c038002020-04-30 23:08:08 -06002279 barrier.size = GetRealWholeSize(barrier.offset, barrier.size, buffer->createInfo.size);
John Zulauf16adfc92020-04-08 10:28:33 -06002280 ResourceAccessRange range = MakeRange(barrier);
John Zulauf540266b2020-04-06 18:54:53 -06002281 const auto src_access_scope = AccessScope(src_stage_accesses, barrier.srcAccessMask);
2282 const auto dst_access_scope = AccessScope(dst_stage_accesses, barrier.dstAccessMask);
2283 const ApplyMemoryAccessBarrierFunctor update_action(src_exec_scope, src_access_scope, dst_exec_scope, dst_access_scope);
2284 context->UpdateMemoryAccess(*buffer, range, update_action);
John Zulauf9cb530d2019-09-30 14:14:10 -06002285 }
2286}
2287
John Zulauf540266b2020-04-06 18:54:53 -06002288void SyncValidator::ApplyImageBarriers(AccessContext *context, VkPipelineStageFlags src_exec_scope,
2289 SyncStageAccessFlags src_stage_accesses, VkPipelineStageFlags dst_exec_scope,
2290 SyncStageAccessFlags dst_stage_accesses, uint32_t barrier_count,
John Zulauf355e49b2020-04-24 15:11:15 -06002291 const VkImageMemoryBarrier *barriers, const ResourceUsageTag &tag) {
John Zulauf5c5e88d2019-12-26 11:22:02 -07002292 for (uint32_t index = 0; index < barrier_count; index++) {
2293 const auto &barrier = barriers[index];
2294 const auto *image = Get<IMAGE_STATE>(barrier.image);
2295 if (!image) continue;
John Zulauf540266b2020-04-06 18:54:53 -06002296 auto subresource_range = NormalizeSubresourceRange(image->createInfo, barrier.subresourceRange);
John Zulauf355e49b2020-04-24 15:11:15 -06002297 bool layout_transition = barrier.oldLayout != barrier.newLayout;
2298 const auto src_access_scope = AccessScope(src_stage_accesses, barrier.srcAccessMask);
2299 const auto dst_access_scope = AccessScope(dst_stage_accesses, barrier.dstAccessMask);
2300 context->ApplyImageBarrier(*image, src_exec_scope, src_access_scope, dst_exec_scope, dst_access_scope, subresource_range,
2301 layout_transition, tag);
John Zulauf9cb530d2019-09-30 14:14:10 -06002302 }
John Zulauf9cb530d2019-09-30 14:14:10 -06002303}
2304
2305bool SyncValidator::PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
2306 uint32_t regionCount, const VkBufferCopy *pRegions) const {
2307 bool skip = false;
John Zulauf3d84f1b2020-03-09 13:33:25 -06002308 const auto *cb_context = GetAccessContext(commandBuffer);
2309 assert(cb_context);
2310 if (!cb_context) return skip;
2311 const auto *context = cb_context->GetCurrentAccessContext();
John Zulauf9cb530d2019-09-30 14:14:10 -06002312
John Zulauf3d84f1b2020-03-09 13:33:25 -06002313 // If we have no previous accesses, we have no hazards
John Zulauf3d84f1b2020-03-09 13:33:25 -06002314 const auto *src_buffer = Get<BUFFER_STATE>(srcBuffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002315 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002316
2317 for (uint32_t region = 0; region < regionCount; region++) {
2318 const auto &copy_region = pRegions[region];
John Zulauf16adfc92020-04-08 10:28:33 -06002319 if (src_buffer) {
locke-lunargff255f92020-05-13 18:53:52 -06002320 ResourceAccessRange src_range = MakeRange(
2321 copy_region.srcOffset, GetRealWholeSize(copy_region.srcOffset, copy_region.size, src_buffer->createInfo.size));
John Zulauf16adfc92020-04-08 10:28:33 -06002322 auto hazard = context->DetectHazard(*src_buffer, SYNC_TRANSFER_TRANSFER_READ, src_range);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002323 if (hazard.hazard) {
2324 // TODO -- add tag information to log msg when useful.
locke-lunarga0003652020-03-10 11:38:51 -06002325 skip |= LogError(srcBuffer, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002326 "vkCmdCopyBuffer: Hazard %s for srcBuffer %s, region %" PRIu32 ". Prior access %s.",
2327 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcBuffer).c_str(), region,
2328 string_UsageTag(hazard.tag).c_str());
John Zulauf9cb530d2019-09-30 14:14:10 -06002329 }
John Zulauf9cb530d2019-09-30 14:14:10 -06002330 }
John Zulauf16adfc92020-04-08 10:28:33 -06002331 if (dst_buffer && !skip) {
locke-lunargff255f92020-05-13 18:53:52 -06002332 ResourceAccessRange dst_range = MakeRange(
2333 copy_region.dstOffset, GetRealWholeSize(copy_region.dstOffset, copy_region.size, dst_buffer->createInfo.size));
John Zulauf355e49b2020-04-24 15:11:15 -06002334 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, dst_range);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002335 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002336 skip |= LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002337 "vkCmdCopyBuffer: Hazard %s for dstBuffer %s, region %" PRIu32 ". Prior access %s.",
2338 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstBuffer).c_str(), region,
2339 string_UsageTag(hazard.tag).c_str());
John Zulauf3d84f1b2020-03-09 13:33:25 -06002340 }
2341 }
2342 if (skip) break;
John Zulauf9cb530d2019-09-30 14:14:10 -06002343 }
2344 return skip;
2345}
2346
2347void SyncValidator::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
2348 uint32_t regionCount, const VkBufferCopy *pRegions) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06002349 auto *cb_context = GetAccessContext(commandBuffer);
2350 assert(cb_context);
John Zulauf2b151bf2020-04-24 15:37:44 -06002351 const auto tag = cb_context->NextCommandTag(CMD_COPYBUFFER);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002352 auto *context = cb_context->GetCurrentAccessContext();
2353
John Zulauf9cb530d2019-09-30 14:14:10 -06002354 const auto *src_buffer = Get<BUFFER_STATE>(srcBuffer);
John Zulauf9cb530d2019-09-30 14:14:10 -06002355 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
John Zulauf9cb530d2019-09-30 14:14:10 -06002356
2357 for (uint32_t region = 0; region < regionCount; region++) {
2358 const auto &copy_region = pRegions[region];
John Zulauf16adfc92020-04-08 10:28:33 -06002359 if (src_buffer) {
locke-lunargff255f92020-05-13 18:53:52 -06002360 ResourceAccessRange src_range = MakeRange(
2361 copy_region.srcOffset, GetRealWholeSize(copy_region.srcOffset, copy_region.size, src_buffer->createInfo.size));
John Zulauf16adfc92020-04-08 10:28:33 -06002362 context->UpdateAccessState(*src_buffer, SYNC_TRANSFER_TRANSFER_READ, src_range, tag);
John Zulauf9cb530d2019-09-30 14:14:10 -06002363 }
John Zulauf16adfc92020-04-08 10:28:33 -06002364 if (dst_buffer) {
locke-lunargff255f92020-05-13 18:53:52 -06002365 ResourceAccessRange dst_range = MakeRange(
2366 copy_region.dstOffset, GetRealWholeSize(copy_region.dstOffset, copy_region.size, dst_buffer->createInfo.size));
John Zulauf16adfc92020-04-08 10:28:33 -06002367 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, dst_range, tag);
John Zulauf5c5e88d2019-12-26 11:22:02 -07002368 }
2369 }
2370}
2371
2372bool SyncValidator::PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2373 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2374 const VkImageCopy *pRegions) const {
2375 bool skip = false;
John Zulauf3d84f1b2020-03-09 13:33:25 -06002376 const auto *cb_access_context = GetAccessContext(commandBuffer);
2377 assert(cb_access_context);
2378 if (!cb_access_context) return skip;
John Zulauf5c5e88d2019-12-26 11:22:02 -07002379
John Zulauf3d84f1b2020-03-09 13:33:25 -06002380 const auto *context = cb_access_context->GetCurrentAccessContext();
2381 assert(context);
2382 if (!context) return skip;
2383
2384 const auto *src_image = Get<IMAGE_STATE>(srcImage);
2385 const auto *dst_image = Get<IMAGE_STATE>(dstImage);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002386 for (uint32_t region = 0; region < regionCount; region++) {
2387 const auto &copy_region = pRegions[region];
2388 if (src_image) {
John Zulauf540266b2020-04-06 18:54:53 -06002389 auto hazard = context->DetectHazard(*src_image, SYNC_TRANSFER_TRANSFER_READ, copy_region.srcSubresource,
John Zulauf3d84f1b2020-03-09 13:33:25 -06002390 copy_region.srcOffset, copy_region.extent);
2391 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002392 skip |= LogError(srcImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002393 "vkCmdCopyImage: Hazard %s for srcImage %s, region %" PRIu32 ". Prior access %s.",
2394 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcImage).c_str(), region,
2395 string_UsageTag(hazard.tag).c_str());
John Zulauf5c5e88d2019-12-26 11:22:02 -07002396 }
John Zulauf3d84f1b2020-03-09 13:33:25 -06002397 }
2398
2399 if (dst_image) {
locke-lunarg1df1f882020-03-02 16:42:08 -07002400 VkExtent3D dst_copy_extent =
2401 GetAdjustedDestImageExtent(src_image->createInfo.format, dst_image->createInfo.format, copy_region.extent);
John Zulauf540266b2020-04-06 18:54:53 -06002402 auto hazard = context->DetectHazard(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, copy_region.dstSubresource,
locke-lunarg1df1f882020-03-02 16:42:08 -07002403 copy_region.dstOffset, dst_copy_extent);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002404 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002405 skip |= LogError(dstImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002406 "vkCmdCopyImage: Hazard %s for dstImage %s, region %" PRIu32 ". Prior access %s.",
2407 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstImage).c_str(), region,
2408 string_UsageTag(hazard.tag).c_str());
John Zulauf5c5e88d2019-12-26 11:22:02 -07002409 }
locke-lunarg1dbbb9e2020-02-28 22:43:53 -07002410 if (skip) break;
John Zulauf5c5e88d2019-12-26 11:22:02 -07002411 }
2412 }
John Zulauf3d84f1b2020-03-09 13:33:25 -06002413
John Zulauf5c5e88d2019-12-26 11:22:02 -07002414 return skip;
2415}
2416
2417void SyncValidator::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2418 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2419 const VkImageCopy *pRegions) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06002420 auto *cb_access_context = GetAccessContext(commandBuffer);
2421 assert(cb_access_context);
John Zulauf2b151bf2020-04-24 15:37:44 -06002422 const auto tag = cb_access_context->NextCommandTag(CMD_COPYIMAGE);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002423 auto *context = cb_access_context->GetCurrentAccessContext();
2424 assert(context);
2425
John Zulauf5c5e88d2019-12-26 11:22:02 -07002426 auto *src_image = Get<IMAGE_STATE>(srcImage);
John Zulauf5c5e88d2019-12-26 11:22:02 -07002427 auto *dst_image = Get<IMAGE_STATE>(dstImage);
John Zulauf5c5e88d2019-12-26 11:22:02 -07002428
2429 for (uint32_t region = 0; region < regionCount; region++) {
2430 const auto &copy_region = pRegions[region];
John Zulauf3d84f1b2020-03-09 13:33:25 -06002431 if (src_image) {
John Zulauf540266b2020-04-06 18:54:53 -06002432 context->UpdateAccessState(*src_image, SYNC_TRANSFER_TRANSFER_READ, copy_region.srcSubresource, copy_region.srcOffset,
2433 copy_region.extent, tag);
John Zulauf5c5e88d2019-12-26 11:22:02 -07002434 }
John Zulauf3d84f1b2020-03-09 13:33:25 -06002435 if (dst_image) {
locke-lunarg1df1f882020-03-02 16:42:08 -07002436 VkExtent3D dst_copy_extent =
2437 GetAdjustedDestImageExtent(src_image->createInfo.format, dst_image->createInfo.format, copy_region.extent);
John Zulauf540266b2020-04-06 18:54:53 -06002438 context->UpdateAccessState(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, copy_region.dstSubresource, copy_region.dstOffset,
2439 dst_copy_extent, tag);
John Zulauf9cb530d2019-09-30 14:14:10 -06002440 }
2441 }
2442}
2443
2444bool SyncValidator::PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
2445 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
2446 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2447 uint32_t bufferMemoryBarrierCount,
2448 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2449 uint32_t imageMemoryBarrierCount,
2450 const VkImageMemoryBarrier *pImageMemoryBarriers) const {
2451 bool skip = false;
John Zulauf3d84f1b2020-03-09 13:33:25 -06002452 const auto *cb_access_context = GetAccessContext(commandBuffer);
2453 assert(cb_access_context);
2454 if (!cb_access_context) return skip;
John Zulauf0cb5be22020-01-23 12:18:22 -07002455
John Zulauf3d84f1b2020-03-09 13:33:25 -06002456 const auto *context = cb_access_context->GetCurrentAccessContext();
2457 assert(context);
2458 if (!context) return skip;
John Zulauf0cb5be22020-01-23 12:18:22 -07002459
John Zulauf3d84f1b2020-03-09 13:33:25 -06002460 const auto src_stage_mask = ExpandPipelineStages(cb_access_context->GetQueueFlags(), srcStageMask);
John Zulauf36bcf6a2020-02-03 15:12:52 -07002461 const auto src_exec_scope = WithEarlierPipelineStages(src_stage_mask);
2462 auto src_stage_accesses = AccessScopeByStage(src_stage_mask);
John Zulauf0cb5be22020-01-23 12:18:22 -07002463 // Validate Image Layout transitions
2464 for (uint32_t index = 0; index < imageMemoryBarrierCount; index++) {
2465 const auto &barrier = pImageMemoryBarriers[index];
2466 if (barrier.newLayout == barrier.oldLayout) continue; // Only interested in layout transitions at this point.
2467 const auto *image_state = Get<IMAGE_STATE>(barrier.image);
2468 if (!image_state) continue;
John Zulauf16adfc92020-04-08 10:28:33 -06002469 const auto hazard = context->DetectImageBarrierHazard(*image_state, src_exec_scope, src_stage_accesses, barrier);
John Zulauf0cb5be22020-01-23 12:18:22 -07002470 if (hazard.hazard) {
John Zulauf7635de32020-05-29 17:14:15 -06002471 // PHASE1 TODO -- add tag information to log msg when useful.
locke-lunarga0003652020-03-10 11:38:51 -06002472 skip |= LogError(barrier.image, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002473 "vkCmdPipelineBarrier: Hazard %s for image barrier %" PRIu32 " %s. Prior access %s.",
2474 string_SyncHazard(hazard.hazard), index, report_data->FormatHandle(barrier.image).c_str(),
2475 string_UsageTag(hazard.tag).c_str());
John Zulauf0cb5be22020-01-23 12:18:22 -07002476 }
2477 }
John Zulauf9cb530d2019-09-30 14:14:10 -06002478
2479 return skip;
2480}
2481
2482void SyncValidator::PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
2483 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
2484 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2485 uint32_t bufferMemoryBarrierCount,
2486 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2487 uint32_t imageMemoryBarrierCount,
2488 const VkImageMemoryBarrier *pImageMemoryBarriers) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06002489 auto *cb_access_context = GetAccessContext(commandBuffer);
2490 assert(cb_access_context);
2491 if (!cb_access_context) return;
John Zulauf2b151bf2020-04-24 15:37:44 -06002492 const auto tag = cb_access_context->NextCommandTag(CMD_PIPELINEBARRIER);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002493 auto access_context = cb_access_context->GetCurrentAccessContext();
2494 assert(access_context);
2495 if (!access_context) return;
John Zulauf9cb530d2019-09-30 14:14:10 -06002496
John Zulauf3d84f1b2020-03-09 13:33:25 -06002497 const auto src_stage_mask = ExpandPipelineStages(cb_access_context->GetQueueFlags(), srcStageMask);
John Zulauf36bcf6a2020-02-03 15:12:52 -07002498 auto src_stage_accesses = AccessScopeByStage(src_stage_mask);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002499 const auto dst_stage_mask = ExpandPipelineStages(cb_access_context->GetQueueFlags(), dstStageMask);
John Zulauf36bcf6a2020-02-03 15:12:52 -07002500 auto dst_stage_accesses = AccessScopeByStage(dst_stage_mask);
2501 const auto src_exec_scope = WithEarlierPipelineStages(src_stage_mask);
2502 const auto dst_exec_scope = WithLaterPipelineStages(dst_stage_mask);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002503 ApplyBufferBarriers(access_context, src_exec_scope, src_stage_accesses, dst_exec_scope, dst_stage_accesses,
2504 bufferMemoryBarrierCount, pBufferMemoryBarriers);
John Zulauf540266b2020-04-06 18:54:53 -06002505 ApplyImageBarriers(access_context, src_exec_scope, src_stage_accesses, dst_exec_scope, dst_stage_accesses,
John Zulauf355e49b2020-04-24 15:11:15 -06002506 imageMemoryBarrierCount, pImageMemoryBarriers, tag);
John Zulauf9cb530d2019-09-30 14:14:10 -06002507
2508 // Apply these last in-case there operation is a superset of the other two and would clean them up...
John Zulauf3d84f1b2020-03-09 13:33:25 -06002509 ApplyGlobalBarriers(access_context, src_exec_scope, dst_exec_scope, src_stage_accesses, dst_stage_accesses, memoryBarrierCount,
John Zulauf0cb5be22020-01-23 12:18:22 -07002510 pMemoryBarriers);
John Zulauf9cb530d2019-09-30 14:14:10 -06002511}
2512
2513void SyncValidator::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
2514 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result) {
2515 // The state tracker sets up the device state
2516 StateTracker::PostCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, result);
2517
John Zulauf5f13a792020-03-10 07:31:21 -06002518 // Add the callback hooks for the functions that are either broadly or deeply used and that the ValidationStateTracker
2519 // refactor would be messier without.
John Zulauf9cb530d2019-09-30 14:14:10 -06002520 // TODO: Find a good way to do this hooklessly.
2521 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
2522 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, LayerObjectTypeSyncValidation);
2523 SyncValidator *sync_device_state = static_cast<SyncValidator *>(validation_data);
2524
John Zulaufd1f85d42020-04-15 12:23:15 -06002525 sync_device_state->SetCommandBufferResetCallback([sync_device_state](VkCommandBuffer command_buffer) -> void {
2526 sync_device_state->ResetCommandBufferCallback(command_buffer);
2527 });
2528 sync_device_state->SetCommandBufferFreeCallback([sync_device_state](VkCommandBuffer command_buffer) -> void {
2529 sync_device_state->FreeCommandBufferCallback(command_buffer);
2530 });
John Zulauf9cb530d2019-09-30 14:14:10 -06002531}
John Zulauf3d84f1b2020-03-09 13:33:25 -06002532
John Zulauf355e49b2020-04-24 15:11:15 -06002533bool SyncValidator::ValidateBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
2534 const VkSubpassBeginInfoKHR *pSubpassBeginInfo, const char *func_name) const {
2535 bool skip = false;
2536 const auto rp_state = Get<RENDER_PASS_STATE>(pRenderPassBegin->renderPass);
2537 auto cb_context = GetAccessContext(commandBuffer);
2538
2539 if (rp_state && cb_context) {
2540 skip |= cb_context->ValidateBeginRenderPass(*rp_state, pRenderPassBegin, pSubpassBeginInfo, func_name);
2541 }
2542
2543 return skip;
2544}
2545
2546bool SyncValidator::PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
2547 VkSubpassContents contents) const {
2548 bool skip = StateTracker::PreCallValidateCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
2549 auto subpass_begin_info = lvl_init_struct<VkSubpassBeginInfo>();
2550 subpass_begin_info.contents = contents;
2551 skip |= ValidateBeginRenderPass(commandBuffer, pRenderPassBegin, &subpass_begin_info, "vkCmdBeginRenderPass");
2552 return skip;
2553}
2554
2555bool SyncValidator::PreCallValidateCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
2556 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) const {
2557 bool skip = StateTracker::PreCallValidateCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
2558 skip |= ValidateBeginRenderPass(commandBuffer, pRenderPassBegin, pSubpassBeginInfo, "vkCmdBeginRenderPass2");
2559 return skip;
2560}
2561
2562bool SyncValidator::PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
2563 const VkRenderPassBeginInfo *pRenderPassBegin,
2564 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) const {
2565 bool skip = StateTracker::PreCallValidateCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
2566 skip |= ValidateBeginRenderPass(commandBuffer, pRenderPassBegin, pSubpassBeginInfo, "vkCmdBeginRenderPass2KHR");
2567 return skip;
2568}
2569
John Zulauf3d84f1b2020-03-09 13:33:25 -06002570void SyncValidator::PostCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo,
2571 VkResult result) {
2572 // The state tracker sets up the command buffer state
2573 StateTracker::PostCallRecordBeginCommandBuffer(commandBuffer, pBeginInfo, result);
2574
2575 // Create/initialize the structure that trackers accesses at the command buffer scope.
2576 auto cb_access_context = GetAccessContext(commandBuffer);
2577 assert(cb_access_context);
2578 cb_access_context->Reset();
2579}
2580
2581void SyncValidator::RecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
John Zulauf355e49b2020-04-24 15:11:15 -06002582 const VkSubpassBeginInfo *pSubpassBeginInfo, CMD_TYPE command) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06002583 auto cb_context = GetAccessContext(commandBuffer);
John Zulauf355e49b2020-04-24 15:11:15 -06002584 if (cb_context) {
2585 cb_context->RecordBeginRenderPass(cb_context->NextCommandTag(command));
John Zulauf3d84f1b2020-03-09 13:33:25 -06002586 }
2587}
2588
2589void SyncValidator::PostCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
2590 VkSubpassContents contents) {
2591 StateTracker::PostCallRecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
2592 auto subpass_begin_info = lvl_init_struct<VkSubpassBeginInfo>();
2593 subpass_begin_info.contents = contents;
John Zulauf355e49b2020-04-24 15:11:15 -06002594 RecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, &subpass_begin_info, CMD_BEGINRENDERPASS);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002595}
2596
2597void SyncValidator::PostCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
2598 const VkSubpassBeginInfo *pSubpassBeginInfo) {
2599 StateTracker::PostCallRecordCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06002600 RecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, pSubpassBeginInfo, CMD_BEGINRENDERPASS2);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002601}
2602
2603void SyncValidator::PostCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
2604 const VkRenderPassBeginInfo *pRenderPassBegin,
2605 const VkSubpassBeginInfo *pSubpassBeginInfo) {
2606 StateTracker::PostCallRecordCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06002607 RecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, pSubpassBeginInfo, CMD_BEGINRENDERPASS2);
2608}
2609
2610bool SyncValidator::ValidateCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
2611 const VkSubpassEndInfoKHR *pSubpassEndInfo, const char *func_name) const {
2612 bool skip = false;
2613
2614 auto cb_context = GetAccessContext(commandBuffer);
2615 assert(cb_context);
2616 auto cb_state = cb_context->GetCommandBufferState();
2617 if (!cb_state) return skip;
2618
2619 auto rp_state = cb_state->activeRenderPass;
2620 if (!rp_state) return skip;
2621
2622 skip |= cb_context->ValidateNextSubpass(func_name);
2623
2624 return skip;
2625}
2626
2627bool SyncValidator::PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const {
2628 bool skip = StateTracker::PreCallValidateCmdNextSubpass(commandBuffer, contents);
2629 auto subpass_begin_info = lvl_init_struct<VkSubpassBeginInfo>();
2630 subpass_begin_info.contents = contents;
2631 skip |= ValidateCmdNextSubpass(commandBuffer, &subpass_begin_info, nullptr, "vkCmdNextSubpass");
2632 return skip;
2633}
2634
2635bool SyncValidator::PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
2636 const VkSubpassEndInfoKHR *pSubpassEndInfo) const {
2637 bool skip = StateTracker::PreCallValidateCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
2638 skip |= ValidateCmdNextSubpass(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo, "vkCmdNextSubpass2KHR");
2639 return skip;
2640}
2641
2642bool SyncValidator::PreCallValidateCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
2643 const VkSubpassEndInfo *pSubpassEndInfo) const {
2644 bool skip = StateTracker::PreCallValidateCmdNextSubpass2(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
2645 skip |= ValidateCmdNextSubpass(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo, "vkCmdNextSubpass2");
2646 return skip;
John Zulauf3d84f1b2020-03-09 13:33:25 -06002647}
2648
2649void SyncValidator::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
John Zulauf355e49b2020-04-24 15:11:15 -06002650 const VkSubpassEndInfo *pSubpassEndInfo, CMD_TYPE command) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06002651 auto cb_context = GetAccessContext(commandBuffer);
2652 assert(cb_context);
2653 auto cb_state = cb_context->GetCommandBufferState();
2654 if (!cb_state) return;
2655
2656 auto rp_state = cb_state->activeRenderPass;
2657 if (!rp_state) return;
2658
John Zulauf355e49b2020-04-24 15:11:15 -06002659 cb_context->RecordNextSubpass(*rp_state, cb_context->NextCommandTag(command));
John Zulauf3d84f1b2020-03-09 13:33:25 -06002660}
2661
2662void SyncValidator::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
2663 StateTracker::PostCallRecordCmdNextSubpass(commandBuffer, contents);
2664 auto subpass_begin_info = lvl_init_struct<VkSubpassBeginInfo>();
2665 subpass_begin_info.contents = contents;
John Zulauf355e49b2020-04-24 15:11:15 -06002666 RecordCmdNextSubpass(commandBuffer, &subpass_begin_info, nullptr, CMD_NEXTSUBPASS);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002667}
2668
2669void SyncValidator::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
2670 const VkSubpassEndInfo *pSubpassEndInfo) {
2671 StateTracker::PostCallRecordCmdNextSubpass2(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06002672 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo, CMD_NEXTSUBPASS2);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002673}
2674
2675void SyncValidator::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
2676 const VkSubpassEndInfo *pSubpassEndInfo) {
2677 StateTracker::PostCallRecordCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06002678 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo, CMD_NEXTSUBPASS2);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002679}
2680
John Zulauf355e49b2020-04-24 15:11:15 -06002681bool SyncValidator::ValidateCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo,
2682 const char *func_name) const {
2683 bool skip = false;
2684
2685 auto cb_context = GetAccessContext(commandBuffer);
2686 assert(cb_context);
2687 auto cb_state = cb_context->GetCommandBufferState();
2688 if (!cb_state) return skip;
2689
2690 auto rp_state = cb_state->activeRenderPass;
2691 if (!rp_state) return skip;
2692
2693 skip |= cb_context->ValidateEndRenderpass(func_name);
2694 return skip;
2695}
2696
2697bool SyncValidator::PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) const {
2698 bool skip = StateTracker::PreCallValidateCmdEndRenderPass(commandBuffer);
2699 skip |= ValidateCmdEndRenderPass(commandBuffer, nullptr, "vkEndRenderPass");
2700 return skip;
2701}
2702
2703bool SyncValidator::PreCallValidateCmdEndRenderPass2(VkCommandBuffer commandBuffer,
2704 const VkSubpassEndInfoKHR *pSubpassEndInfo) const {
2705 bool skip = StateTracker::PreCallValidateCmdEndRenderPass2(commandBuffer, pSubpassEndInfo);
2706 skip |= ValidateCmdEndRenderPass(commandBuffer, pSubpassEndInfo, "vkEndRenderPass2");
2707 return skip;
2708}
2709
2710bool SyncValidator::PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
2711 const VkSubpassEndInfoKHR *pSubpassEndInfo) const {
2712 bool skip = StateTracker::PreCallValidateCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
2713 skip |= ValidateCmdEndRenderPass(commandBuffer, pSubpassEndInfo, "vkEndRenderPass2KHR");
2714 return skip;
2715}
2716
2717void SyncValidator::RecordCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo,
2718 CMD_TYPE command) {
John Zulaufe5da6e52020-03-18 15:32:18 -06002719 // Resolve the all subpass contexts to the command buffer contexts
2720 auto cb_context = GetAccessContext(commandBuffer);
2721 assert(cb_context);
2722 auto cb_state = cb_context->GetCommandBufferState();
2723 if (!cb_state) return;
2724
locke-lunargaecf2152020-05-12 17:15:41 -06002725 const auto *rp_state = cb_state->activeRenderPass.get();
John Zulaufe5da6e52020-03-18 15:32:18 -06002726 if (!rp_state) return;
2727
John Zulauf355e49b2020-04-24 15:11:15 -06002728 cb_context->RecordEndRenderPass(*rp_state, cb_context->NextCommandTag(command));
John Zulaufe5da6e52020-03-18 15:32:18 -06002729}
John Zulauf3d84f1b2020-03-09 13:33:25 -06002730
2731void SyncValidator::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
2732 StateTracker::PostCallRecordCmdEndRenderPass(commandBuffer);
John Zulauf355e49b2020-04-24 15:11:15 -06002733 RecordCmdEndRenderPass(commandBuffer, nullptr, CMD_ENDRENDERPASS);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002734}
2735
2736void SyncValidator::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) {
2737 StateTracker::PostCallRecordCmdEndRenderPass2(commandBuffer, pSubpassEndInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06002738 RecordCmdEndRenderPass(commandBuffer, pSubpassEndInfo, CMD_ENDRENDERPASS2);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002739}
2740
2741void SyncValidator::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) {
2742 StateTracker::PostCallRecordCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06002743 RecordCmdEndRenderPass(commandBuffer, pSubpassEndInfo, CMD_ENDRENDERPASS2);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002744}
locke-lunarga19c71d2020-03-02 18:17:04 -07002745
2746bool SyncValidator::PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
2747 VkImageLayout dstImageLayout, uint32_t regionCount,
2748 const VkBufferImageCopy *pRegions) const {
2749 bool skip = false;
2750 const auto *cb_access_context = GetAccessContext(commandBuffer);
2751 assert(cb_access_context);
2752 if (!cb_access_context) return skip;
2753
2754 const auto *context = cb_access_context->GetCurrentAccessContext();
2755 assert(context);
2756 if (!context) return skip;
2757
2758 const auto *src_buffer = Get<BUFFER_STATE>(srcBuffer);
locke-lunarga19c71d2020-03-02 18:17:04 -07002759 const auto *dst_image = Get<IMAGE_STATE>(dstImage);
2760
2761 for (uint32_t region = 0; region < regionCount; region++) {
2762 const auto &copy_region = pRegions[region];
John Zulauf16adfc92020-04-08 10:28:33 -06002763 if (src_buffer) {
John Zulauf355e49b2020-04-24 15:11:15 -06002764 ResourceAccessRange src_range =
2765 MakeRange(copy_region.bufferOffset, GetBufferSizeFromCopyImage(copy_region, dst_image->createInfo.format));
John Zulauf16adfc92020-04-08 10:28:33 -06002766 auto hazard = context->DetectHazard(*src_buffer, SYNC_TRANSFER_TRANSFER_READ, src_range);
locke-lunarga19c71d2020-03-02 18:17:04 -07002767 if (hazard.hazard) {
John Zulauf7635de32020-05-29 17:14:15 -06002768 // PHASE1 TODO -- add tag information to log msg when useful.
locke-lunarga0003652020-03-10 11:38:51 -06002769 skip |= LogError(srcBuffer, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002770 "vkCmdCopyBufferToImage: Hazard %s for srcBuffer %s, region %" PRIu32 ". Prior access %s.",
2771 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcBuffer).c_str(), region,
2772 string_UsageTag(hazard.tag).c_str());
locke-lunarga19c71d2020-03-02 18:17:04 -07002773 }
2774 }
2775 if (dst_image) {
John Zulauf540266b2020-04-06 18:54:53 -06002776 auto hazard = context->DetectHazard(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, copy_region.imageSubresource,
locke-lunarga19c71d2020-03-02 18:17:04 -07002777 copy_region.imageOffset, copy_region.imageExtent);
2778 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002779 skip |= LogError(dstImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002780 "vkCmdCopyBufferToImage: Hazard %s for dstImage %s, region %" PRIu32 ". Prior access %s.",
2781 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstImage).c_str(), region,
2782 string_UsageTag(hazard.tag).c_str());
locke-lunarga19c71d2020-03-02 18:17:04 -07002783 }
2784 if (skip) break;
2785 }
2786 if (skip) break;
2787 }
2788 return skip;
2789}
2790
2791void SyncValidator::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
2792 VkImageLayout dstImageLayout, uint32_t regionCount,
2793 const VkBufferImageCopy *pRegions) {
locke-lunarg8ec19162020-06-16 18:48:34 -06002794 StateTracker::PreCallRecordCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
locke-lunarga19c71d2020-03-02 18:17:04 -07002795 auto *cb_access_context = GetAccessContext(commandBuffer);
2796 assert(cb_access_context);
John Zulauf2b151bf2020-04-24 15:37:44 -06002797 const auto tag = cb_access_context->NextCommandTag(CMD_COPYBUFFERTOIMAGE);
locke-lunarga19c71d2020-03-02 18:17:04 -07002798 auto *context = cb_access_context->GetCurrentAccessContext();
2799 assert(context);
2800
2801 const auto *src_buffer = Get<BUFFER_STATE>(srcBuffer);
John Zulauf16adfc92020-04-08 10:28:33 -06002802 const auto *dst_image = Get<IMAGE_STATE>(dstImage);
locke-lunarga19c71d2020-03-02 18:17:04 -07002803
2804 for (uint32_t region = 0; region < regionCount; region++) {
2805 const auto &copy_region = pRegions[region];
2806 if (src_buffer) {
John Zulauf355e49b2020-04-24 15:11:15 -06002807 ResourceAccessRange src_range =
2808 MakeRange(copy_region.bufferOffset, GetBufferSizeFromCopyImage(copy_region, dst_image->createInfo.format));
John Zulauf16adfc92020-04-08 10:28:33 -06002809 context->UpdateAccessState(*src_buffer, SYNC_TRANSFER_TRANSFER_READ, src_range, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07002810 }
2811 if (dst_image) {
John Zulauf540266b2020-04-06 18:54:53 -06002812 context->UpdateAccessState(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, copy_region.imageSubresource,
John Zulauf5f13a792020-03-10 07:31:21 -06002813 copy_region.imageOffset, copy_region.imageExtent, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07002814 }
2815 }
2816}
2817
2818bool SyncValidator::PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
2819 VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount,
2820 const VkBufferImageCopy *pRegions) const {
2821 bool skip = false;
2822 const auto *cb_access_context = GetAccessContext(commandBuffer);
2823 assert(cb_access_context);
2824 if (!cb_access_context) return skip;
2825
2826 const auto *context = cb_access_context->GetCurrentAccessContext();
2827 assert(context);
2828 if (!context) return skip;
2829
2830 const auto *src_image = Get<IMAGE_STATE>(srcImage);
2831 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
2832 const auto dst_mem = (dst_buffer && !dst_buffer->sparse) ? dst_buffer->binding.mem_state->mem : VK_NULL_HANDLE;
2833 for (uint32_t region = 0; region < regionCount; region++) {
2834 const auto &copy_region = pRegions[region];
2835 if (src_image) {
John Zulauf540266b2020-04-06 18:54:53 -06002836 auto hazard = context->DetectHazard(*src_image, SYNC_TRANSFER_TRANSFER_READ, copy_region.imageSubresource,
locke-lunarga19c71d2020-03-02 18:17:04 -07002837 copy_region.imageOffset, copy_region.imageExtent);
2838 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002839 skip |= LogError(srcImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002840 "vkCmdCopyImageToBuffer: Hazard %s for srcImage %s, region %" PRIu32 ". Prior access %s.",
2841 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcImage).c_str(), region,
2842 string_UsageTag(hazard.tag).c_str());
locke-lunarga19c71d2020-03-02 18:17:04 -07002843 }
2844 }
2845 if (dst_mem) {
John Zulauf355e49b2020-04-24 15:11:15 -06002846 ResourceAccessRange dst_range =
2847 MakeRange(copy_region.bufferOffset, GetBufferSizeFromCopyImage(copy_region, src_image->createInfo.format));
John Zulauf16adfc92020-04-08 10:28:33 -06002848 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, dst_range);
locke-lunarga19c71d2020-03-02 18:17:04 -07002849 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002850 skip |= LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002851 "vkCmdCopyImageToBuffer: Hazard %s for dstBuffer %s, region %" PRIu32 ". Prior access %s.",
2852 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstBuffer).c_str(), region,
2853 string_UsageTag(hazard.tag).c_str());
locke-lunarga19c71d2020-03-02 18:17:04 -07002854 }
2855 }
2856 if (skip) break;
2857 }
2858 return skip;
2859}
2860
2861void SyncValidator::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2862 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) {
locke-lunarg8ec19162020-06-16 18:48:34 -06002863 StateTracker::PreCallRecordCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
locke-lunarga19c71d2020-03-02 18:17:04 -07002864 auto *cb_access_context = GetAccessContext(commandBuffer);
2865 assert(cb_access_context);
John Zulauf2b151bf2020-04-24 15:37:44 -06002866 const auto tag = cb_access_context->NextCommandTag(CMD_COPYIMAGETOBUFFER);
locke-lunarga19c71d2020-03-02 18:17:04 -07002867 auto *context = cb_access_context->GetCurrentAccessContext();
2868 assert(context);
2869
2870 const auto *src_image = Get<IMAGE_STATE>(srcImage);
locke-lunarga19c71d2020-03-02 18:17:04 -07002871 auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
2872 const auto dst_mem = (dst_buffer && !dst_buffer->sparse) ? dst_buffer->binding.mem_state->mem : VK_NULL_HANDLE;
John Zulauf5f13a792020-03-10 07:31:21 -06002873 const VulkanTypedHandle dst_handle(dst_mem, kVulkanObjectTypeDeviceMemory);
locke-lunarga19c71d2020-03-02 18:17:04 -07002874
2875 for (uint32_t region = 0; region < regionCount; region++) {
2876 const auto &copy_region = pRegions[region];
2877 if (src_image) {
John Zulauf540266b2020-04-06 18:54:53 -06002878 context->UpdateAccessState(*src_image, SYNC_TRANSFER_TRANSFER_READ, copy_region.imageSubresource,
John Zulauf5f13a792020-03-10 07:31:21 -06002879 copy_region.imageOffset, copy_region.imageExtent, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07002880 }
2881 if (dst_buffer) {
John Zulauf355e49b2020-04-24 15:11:15 -06002882 ResourceAccessRange dst_range =
2883 MakeRange(copy_region.bufferOffset, GetBufferSizeFromCopyImage(copy_region, src_image->createInfo.format));
John Zulauf16adfc92020-04-08 10:28:33 -06002884 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, dst_range, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07002885 }
2886 }
2887}
2888
2889bool SyncValidator::PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2890 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2891 const VkImageBlit *pRegions, VkFilter filter) const {
2892 bool skip = false;
2893 const auto *cb_access_context = GetAccessContext(commandBuffer);
2894 assert(cb_access_context);
2895 if (!cb_access_context) return skip;
2896
2897 const auto *context = cb_access_context->GetCurrentAccessContext();
2898 assert(context);
2899 if (!context) return skip;
2900
2901 const auto *src_image = Get<IMAGE_STATE>(srcImage);
2902 const auto *dst_image = Get<IMAGE_STATE>(dstImage);
2903
2904 for (uint32_t region = 0; region < regionCount; region++) {
2905 const auto &blit_region = pRegions[region];
2906 if (src_image) {
locke-lunarg8f93acc2020-06-18 21:26:46 -06002907 VkOffset3D offset = {std::min(blit_region.srcOffsets[0].x, blit_region.srcOffsets[1].x),
2908 std::min(blit_region.srcOffsets[0].y, blit_region.srcOffsets[1].y),
2909 std::min(blit_region.srcOffsets[0].z, blit_region.srcOffsets[1].z)};
2910 VkExtent3D extent = {static_cast<uint32_t>(abs(blit_region.srcOffsets[1].x - blit_region.srcOffsets[0].x)),
2911 static_cast<uint32_t>(abs(blit_region.srcOffsets[1].y - blit_region.srcOffsets[0].y)),
2912 static_cast<uint32_t>(abs(blit_region.srcOffsets[1].z - blit_region.srcOffsets[0].z))};
2913 auto hazard =
2914 context->DetectHazard(*src_image, SYNC_TRANSFER_TRANSFER_READ, blit_region.srcSubresource, offset, extent);
locke-lunarga19c71d2020-03-02 18:17:04 -07002915 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002916 skip |= LogError(srcImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002917 "vkCmdBlitImage: Hazard %s for srcImage %s, region %" PRIu32 ". Prior access %s.",
2918 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcImage).c_str(), region,
2919 string_UsageTag(hazard.tag).c_str());
locke-lunarga19c71d2020-03-02 18:17:04 -07002920 }
2921 }
2922
2923 if (dst_image) {
locke-lunarg8f93acc2020-06-18 21:26:46 -06002924 VkOffset3D offset = {std::min(blit_region.dstOffsets[0].x, blit_region.dstOffsets[1].x),
2925 std::min(blit_region.dstOffsets[0].y, blit_region.dstOffsets[1].y),
2926 std::min(blit_region.dstOffsets[0].z, blit_region.dstOffsets[1].z)};
2927 VkExtent3D extent = {static_cast<uint32_t>(abs(blit_region.dstOffsets[1].x - blit_region.dstOffsets[0].x)),
2928 static_cast<uint32_t>(abs(blit_region.dstOffsets[1].y - blit_region.dstOffsets[0].y)),
2929 static_cast<uint32_t>(abs(blit_region.dstOffsets[1].z - blit_region.dstOffsets[0].z))};
2930 auto hazard =
2931 context->DetectHazard(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, blit_region.dstSubresource, offset, extent);
locke-lunarga19c71d2020-03-02 18:17:04 -07002932 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002933 skip |= LogError(dstImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06002934 "vkCmdBlitImage: Hazard %s for dstImage %s, region %" PRIu32 ". Prior access %s.",
2935 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstImage).c_str(), region,
2936 string_UsageTag(hazard.tag).c_str());
locke-lunarga19c71d2020-03-02 18:17:04 -07002937 }
2938 if (skip) break;
2939 }
2940 }
2941
2942 return skip;
2943}
2944
2945void SyncValidator::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2946 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2947 const VkImageBlit *pRegions, VkFilter filter) {
locke-lunarg8ec19162020-06-16 18:48:34 -06002948 StateTracker::PreCallRecordCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount,
2949 pRegions, filter);
locke-lunarga19c71d2020-03-02 18:17:04 -07002950 auto *cb_access_context = GetAccessContext(commandBuffer);
2951 assert(cb_access_context);
John Zulauf2b151bf2020-04-24 15:37:44 -06002952 const auto tag = cb_access_context->NextCommandTag(CMD_BLITIMAGE);
locke-lunarga19c71d2020-03-02 18:17:04 -07002953 auto *context = cb_access_context->GetCurrentAccessContext();
2954 assert(context);
2955
2956 auto *src_image = Get<IMAGE_STATE>(srcImage);
locke-lunarga19c71d2020-03-02 18:17:04 -07002957 auto *dst_image = Get<IMAGE_STATE>(dstImage);
locke-lunarga19c71d2020-03-02 18:17:04 -07002958
2959 for (uint32_t region = 0; region < regionCount; region++) {
2960 const auto &blit_region = pRegions[region];
2961 if (src_image) {
locke-lunarg8f93acc2020-06-18 21:26:46 -06002962 VkOffset3D offset = {std::min(blit_region.srcOffsets[0].x, blit_region.srcOffsets[1].x),
2963 std::min(blit_region.srcOffsets[0].y, blit_region.srcOffsets[1].y),
2964 std::min(blit_region.srcOffsets[0].z, blit_region.srcOffsets[1].z)};
2965 VkExtent3D extent = {static_cast<uint32_t>(abs(blit_region.srcOffsets[1].x - blit_region.srcOffsets[0].x)),
2966 static_cast<uint32_t>(abs(blit_region.srcOffsets[1].y - blit_region.srcOffsets[0].y)),
2967 static_cast<uint32_t>(abs(blit_region.srcOffsets[1].z - blit_region.srcOffsets[0].z))};
2968 context->UpdateAccessState(*src_image, SYNC_TRANSFER_TRANSFER_READ, blit_region.srcSubresource, offset, extent, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07002969 }
2970 if (dst_image) {
locke-lunarg8f93acc2020-06-18 21:26:46 -06002971 VkOffset3D offset = {std::min(blit_region.dstOffsets[0].x, blit_region.dstOffsets[1].x),
2972 std::min(blit_region.dstOffsets[0].y, blit_region.dstOffsets[1].y),
2973 std::min(blit_region.dstOffsets[0].z, blit_region.dstOffsets[1].z)};
2974 VkExtent3D extent = {static_cast<uint32_t>(abs(blit_region.dstOffsets[1].x - blit_region.dstOffsets[0].x)),
2975 static_cast<uint32_t>(abs(blit_region.dstOffsets[1].y - blit_region.dstOffsets[0].y)),
2976 static_cast<uint32_t>(abs(blit_region.dstOffsets[1].z - blit_region.dstOffsets[0].z))};
2977 context->UpdateAccessState(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, blit_region.dstSubresource, offset, extent, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07002978 }
2979 }
2980}
locke-lunarg36ba2592020-04-03 09:42:04 -06002981
locke-lunarg61870c22020-06-09 14:51:50 -06002982bool SyncValidator::ValidateIndirectBuffer(const AccessContext &context, VkCommandBuffer commandBuffer,
2983 const VkDeviceSize struct_size, const VkBuffer buffer, const VkDeviceSize offset,
2984 const uint32_t drawCount, const uint32_t stride, const char *function) const {
locke-lunargff255f92020-05-13 18:53:52 -06002985 bool skip = false;
2986 if (drawCount == 0) return skip;
2987
2988 const auto *buf_state = Get<BUFFER_STATE>(buffer);
2989 VkDeviceSize size = struct_size;
2990 if (drawCount == 1 || stride == size) {
2991 if (drawCount > 1) size *= drawCount;
2992 ResourceAccessRange range = MakeRange(offset, size);
2993 auto hazard = context.DetectHazard(*buf_state, SYNC_DRAW_INDIRECT_INDIRECT_COMMAND_READ, range);
2994 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06002995 skip |= LogError(buf_state->buffer, string_SyncHazardVUID(hazard.hazard),
2996 "%s: Hazard %s for indirect %s in %s. Prior access %s.", function, string_SyncHazard(hazard.hazard),
2997 report_data->FormatHandle(buffer).c_str(), report_data->FormatHandle(commandBuffer).c_str(),
2998 string_UsageTag(hazard.tag).c_str());
locke-lunargff255f92020-05-13 18:53:52 -06002999 }
3000 } else {
3001 for (uint32_t i = 0; i < drawCount; ++i) {
3002 ResourceAccessRange range = MakeRange(offset + i * stride, size);
3003 auto hazard = context.DetectHazard(*buf_state, SYNC_DRAW_INDIRECT_INDIRECT_COMMAND_READ, range);
3004 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06003005 skip |= LogError(buf_state->buffer, string_SyncHazardVUID(hazard.hazard),
3006 "%s: Hazard %s for indirect %s in %s. Prior access %s.", function,
3007 string_SyncHazard(hazard.hazard), report_data->FormatHandle(buffer).c_str(),
3008 report_data->FormatHandle(commandBuffer).c_str(), string_UsageTag(hazard.tag).c_str());
locke-lunargff255f92020-05-13 18:53:52 -06003009 break;
3010 }
3011 }
3012 }
3013 return skip;
3014}
3015
locke-lunarg61870c22020-06-09 14:51:50 -06003016void SyncValidator::RecordIndirectBuffer(AccessContext &context, const ResourceUsageTag &tag, const VkDeviceSize struct_size,
3017 const VkBuffer buffer, const VkDeviceSize offset, const uint32_t drawCount,
3018 uint32_t stride) {
locke-lunargff255f92020-05-13 18:53:52 -06003019 const auto *buf_state = Get<BUFFER_STATE>(buffer);
3020 VkDeviceSize size = struct_size;
3021 if (drawCount == 1 || stride == size) {
3022 if (drawCount > 1) size *= drawCount;
3023 ResourceAccessRange range = MakeRange(offset, size);
3024 context.UpdateAccessState(*buf_state, SYNC_DRAW_INDIRECT_INDIRECT_COMMAND_READ, range, tag);
3025 } else {
3026 for (uint32_t i = 0; i < drawCount; ++i) {
3027 ResourceAccessRange range = MakeRange(offset + i * stride, size);
3028 context.UpdateAccessState(*buf_state, SYNC_DRAW_INDIRECT_INDIRECT_COMMAND_READ, range, tag);
3029 }
3030 }
3031}
3032
locke-lunarg61870c22020-06-09 14:51:50 -06003033bool SyncValidator::ValidateCountBuffer(const AccessContext &context, VkCommandBuffer commandBuffer, VkBuffer buffer,
3034 VkDeviceSize offset, const char *function) const {
locke-lunargff255f92020-05-13 18:53:52 -06003035 bool skip = false;
3036
3037 const auto *count_buf_state = Get<BUFFER_STATE>(buffer);
3038 ResourceAccessRange range = MakeRange(offset, 4);
3039 auto hazard = context.DetectHazard(*count_buf_state, SYNC_DRAW_INDIRECT_INDIRECT_COMMAND_READ, range);
3040 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06003041 skip |= LogError(count_buf_state->buffer, string_SyncHazardVUID(hazard.hazard),
3042 "%s: Hazard %s for countBuffer %s in %s. Prior access %s.", function, string_SyncHazard(hazard.hazard),
3043 report_data->FormatHandle(buffer).c_str(), report_data->FormatHandle(commandBuffer).c_str(),
3044 string_UsageTag(hazard.tag).c_str());
locke-lunargff255f92020-05-13 18:53:52 -06003045 }
3046 return skip;
3047}
3048
locke-lunarg61870c22020-06-09 14:51:50 -06003049void SyncValidator::RecordCountBuffer(AccessContext &context, const ResourceUsageTag &tag, VkBuffer buffer, VkDeviceSize offset) {
locke-lunargff255f92020-05-13 18:53:52 -06003050 const auto *count_buf_state = Get<BUFFER_STATE>(buffer);
3051 ResourceAccessRange range = MakeRange(offset, 4);
3052 context.UpdateAccessState(*count_buf_state, SYNC_DRAW_INDIRECT_INDIRECT_COMMAND_READ, range, tag);
3053}
3054
locke-lunarg36ba2592020-04-03 09:42:04 -06003055bool SyncValidator::PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) const {
locke-lunargff255f92020-05-13 18:53:52 -06003056 bool skip = false;
locke-lunargff255f92020-05-13 18:53:52 -06003057 const auto *cb_access_context = GetAccessContext(commandBuffer);
3058 assert(cb_access_context);
3059 if (!cb_access_context) return skip;
3060
locke-lunarg61870c22020-06-09 14:51:50 -06003061 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatch");
locke-lunargff255f92020-05-13 18:53:52 -06003062 return skip;
locke-lunarg36ba2592020-04-03 09:42:04 -06003063}
3064
3065void SyncValidator::PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003066 StateTracker::PreCallRecordCmdDispatch(commandBuffer, x, y, z);
locke-lunargff255f92020-05-13 18:53:52 -06003067 auto *cb_access_context = GetAccessContext(commandBuffer);
3068 assert(cb_access_context);
3069 const auto tag = cb_access_context->NextCommandTag(CMD_DISPATCH);
locke-lunargff255f92020-05-13 18:53:52 -06003070
locke-lunarg61870c22020-06-09 14:51:50 -06003071 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_COMPUTE, tag);
locke-lunarg36ba2592020-04-03 09:42:04 -06003072}
locke-lunarge1a67022020-04-29 00:15:36 -06003073
3074bool SyncValidator::PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const {
locke-lunargff255f92020-05-13 18:53:52 -06003075 bool skip = false;
locke-lunargff255f92020-05-13 18:53:52 -06003076 const auto *cb_access_context = GetAccessContext(commandBuffer);
3077 assert(cb_access_context);
3078 if (!cb_access_context) return skip;
3079
3080 const auto *context = cb_access_context->GetCurrentAccessContext();
3081 assert(context);
3082 if (!context) return skip;
3083
locke-lunarg61870c22020-06-09 14:51:50 -06003084 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatchIndirect");
3085 skip |= ValidateIndirectBuffer(*context, commandBuffer, sizeof(VkDispatchIndirectCommand), buffer, offset, 1,
3086 sizeof(VkDispatchIndirectCommand), "vkCmdDispatchIndirect");
locke-lunargff255f92020-05-13 18:53:52 -06003087 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003088}
3089
3090void SyncValidator::PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003091 StateTracker::PreCallRecordCmdDispatchIndirect(commandBuffer, buffer, offset);
locke-lunargff255f92020-05-13 18:53:52 -06003092 auto *cb_access_context = GetAccessContext(commandBuffer);
3093 assert(cb_access_context);
3094 const auto tag = cb_access_context->NextCommandTag(CMD_DISPATCHINDIRECT);
3095 auto *context = cb_access_context->GetCurrentAccessContext();
3096 assert(context);
3097
locke-lunarg61870c22020-06-09 14:51:50 -06003098 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_COMPUTE, tag);
3099 RecordIndirectBuffer(*context, tag, sizeof(VkDispatchIndirectCommand), buffer, offset, 1, sizeof(VkDispatchIndirectCommand));
locke-lunarge1a67022020-04-29 00:15:36 -06003100}
3101
3102bool SyncValidator::PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
3103 uint32_t firstVertex, uint32_t firstInstance) const {
locke-lunarga4d39ea2020-05-22 14:17:29 -06003104 bool skip = false;
locke-lunargff255f92020-05-13 18:53:52 -06003105 const auto *cb_access_context = GetAccessContext(commandBuffer);
3106 assert(cb_access_context);
3107 if (!cb_access_context) return skip;
3108
locke-lunarg61870c22020-06-09 14:51:50 -06003109 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDraw");
3110 skip |= cb_access_context->ValidateDrawVertex(vertexCount, firstVertex, "vkCmdDraw");
3111 skip |= cb_access_context->ValidateDrawSubpassAttachment("vkCmdDraw");
locke-lunarga4d39ea2020-05-22 14:17:29 -06003112 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003113}
3114
3115void SyncValidator::PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
3116 uint32_t firstVertex, uint32_t firstInstance) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003117 StateTracker::PreCallRecordCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
locke-lunargff255f92020-05-13 18:53:52 -06003118 auto *cb_access_context = GetAccessContext(commandBuffer);
3119 assert(cb_access_context);
3120 const auto tag = cb_access_context->NextCommandTag(CMD_DRAW);
locke-lunargff255f92020-05-13 18:53:52 -06003121
locke-lunarg61870c22020-06-09 14:51:50 -06003122 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, tag);
3123 cb_access_context->RecordDrawVertex(vertexCount, firstVertex, tag);
3124 cb_access_context->RecordDrawSubpassAttachment(tag);
locke-lunarge1a67022020-04-29 00:15:36 -06003125}
3126
3127bool SyncValidator::PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
3128 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) const {
locke-lunarga4d39ea2020-05-22 14:17:29 -06003129 bool skip = false;
locke-lunargff255f92020-05-13 18:53:52 -06003130 const auto *cb_access_context = GetAccessContext(commandBuffer);
3131 assert(cb_access_context);
3132 if (!cb_access_context) return skip;
3133
locke-lunarg61870c22020-06-09 14:51:50 -06003134 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexed");
3135 skip |= cb_access_context->ValidateDrawVertexIndex(indexCount, firstIndex, "vkCmdDrawIndexed");
3136 skip |= cb_access_context->ValidateDrawSubpassAttachment("vkCmdDrawIndexed");
locke-lunarga4d39ea2020-05-22 14:17:29 -06003137 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003138}
3139
3140void SyncValidator::PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
3141 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003142 StateTracker::PreCallRecordCmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
locke-lunargff255f92020-05-13 18:53:52 -06003143 auto *cb_access_context = GetAccessContext(commandBuffer);
3144 assert(cb_access_context);
3145 const auto tag = cb_access_context->NextCommandTag(CMD_DRAWINDEXED);
locke-lunargff255f92020-05-13 18:53:52 -06003146
locke-lunarg61870c22020-06-09 14:51:50 -06003147 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, tag);
3148 cb_access_context->RecordDrawVertexIndex(indexCount, firstIndex, tag);
3149 cb_access_context->RecordDrawSubpassAttachment(tag);
locke-lunarge1a67022020-04-29 00:15:36 -06003150}
3151
3152bool SyncValidator::PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3153 uint32_t drawCount, uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003154 bool skip = false;
3155 if (drawCount == 0) return skip;
3156
locke-lunargff255f92020-05-13 18:53:52 -06003157 const auto *cb_access_context = GetAccessContext(commandBuffer);
3158 assert(cb_access_context);
3159 if (!cb_access_context) return skip;
3160
3161 const auto *context = cb_access_context->GetCurrentAccessContext();
3162 assert(context);
3163 if (!context) return skip;
3164
locke-lunarg61870c22020-06-09 14:51:50 -06003165 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndirect");
3166 skip |= cb_access_context->ValidateDrawSubpassAttachment("vkCmdDrawIndirect");
3167 skip |= ValidateIndirectBuffer(*context, commandBuffer, sizeof(VkDrawIndirectCommand), buffer, offset, drawCount, stride,
3168 "vkCmdDrawIndirect");
locke-lunargff255f92020-05-13 18:53:52 -06003169
3170 // TODO: For now, we validate the whole vertex buffer. It might cause some false positive.
3171 // VkDrawIndirectCommand buffer could be changed until SubmitQueue.
3172 // We will validate the vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003173 skip |= cb_access_context->ValidateDrawVertex(UINT32_MAX, 0, "vkCmdDrawIndirect");
locke-lunargff255f92020-05-13 18:53:52 -06003174 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003175}
3176
3177void SyncValidator::PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3178 uint32_t drawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003179 StateTracker::PreCallRecordCmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
locke-lunargff255f92020-05-13 18:53:52 -06003180 if (drawCount == 0) return;
locke-lunargff255f92020-05-13 18:53:52 -06003181 auto *cb_access_context = GetAccessContext(commandBuffer);
3182 assert(cb_access_context);
3183 const auto tag = cb_access_context->NextCommandTag(CMD_DRAWINDIRECT);
3184 auto *context = cb_access_context->GetCurrentAccessContext();
3185 assert(context);
3186
locke-lunarg61870c22020-06-09 14:51:50 -06003187 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, tag);
3188 cb_access_context->RecordDrawSubpassAttachment(tag);
3189 RecordIndirectBuffer(*context, tag, sizeof(VkDrawIndirectCommand), buffer, offset, drawCount, stride);
locke-lunargff255f92020-05-13 18:53:52 -06003190
3191 // TODO: For now, we record the whole vertex buffer. It might cause some false positive.
3192 // VkDrawIndirectCommand buffer could be changed until SubmitQueue.
3193 // We will record the vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003194 cb_access_context->RecordDrawVertex(UINT32_MAX, 0, tag);
locke-lunarge1a67022020-04-29 00:15:36 -06003195}
3196
3197bool SyncValidator::PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3198 uint32_t drawCount, uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003199 bool skip = false;
3200 if (drawCount == 0) return skip;
locke-lunargff255f92020-05-13 18:53:52 -06003201 const auto *cb_access_context = GetAccessContext(commandBuffer);
3202 assert(cb_access_context);
3203 if (!cb_access_context) return skip;
3204
3205 const auto *context = cb_access_context->GetCurrentAccessContext();
3206 assert(context);
3207 if (!context) return skip;
3208
locke-lunarg61870c22020-06-09 14:51:50 -06003209 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexedIndirect");
3210 skip |= cb_access_context->ValidateDrawSubpassAttachment("vkCmdDrawIndexedIndirect");
3211 skip |= ValidateIndirectBuffer(*context, commandBuffer, sizeof(VkDrawIndexedIndirectCommand), buffer, offset, drawCount, stride,
3212 "vkCmdDrawIndexedIndirect");
locke-lunargff255f92020-05-13 18:53:52 -06003213
3214 // TODO: For now, we validate the whole index and vertex buffer. It might cause some false positive.
3215 // VkDrawIndexedIndirectCommand buffer could be changed until SubmitQueue.
3216 // We will validate the index and vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003217 skip |= cb_access_context->ValidateDrawVertexIndex(UINT32_MAX, 0, "vkCmdDrawIndexedIndirect");
locke-lunargff255f92020-05-13 18:53:52 -06003218 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003219}
3220
3221void SyncValidator::PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3222 uint32_t drawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003223 StateTracker::PreCallRecordCmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
locke-lunargff255f92020-05-13 18:53:52 -06003224 auto *cb_access_context = GetAccessContext(commandBuffer);
3225 assert(cb_access_context);
3226 const auto tag = cb_access_context->NextCommandTag(CMD_DRAWINDEXEDINDIRECT);
3227 auto *context = cb_access_context->GetCurrentAccessContext();
3228 assert(context);
3229
locke-lunarg61870c22020-06-09 14:51:50 -06003230 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, tag);
3231 cb_access_context->RecordDrawSubpassAttachment(tag);
3232 RecordIndirectBuffer(*context, tag, sizeof(VkDrawIndexedIndirectCommand), buffer, offset, drawCount, stride);
locke-lunargff255f92020-05-13 18:53:52 -06003233
3234 // TODO: For now, we record the whole index and vertex buffer. It might cause some false positive.
3235 // VkDrawIndexedIndirectCommand buffer could be changed until SubmitQueue.
3236 // We will record the index and vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003237 cb_access_context->RecordDrawVertexIndex(UINT32_MAX, 0, tag);
locke-lunargff255f92020-05-13 18:53:52 -06003238}
3239
3240bool SyncValidator::ValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3241 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
3242 uint32_t stride, const char *function) const {
3243 bool skip = false;
locke-lunargff255f92020-05-13 18:53:52 -06003244 const auto *cb_access_context = GetAccessContext(commandBuffer);
3245 assert(cb_access_context);
3246 if (!cb_access_context) return skip;
3247
3248 const auto *context = cb_access_context->GetCurrentAccessContext();
3249 assert(context);
3250 if (!context) return skip;
3251
locke-lunarg61870c22020-06-09 14:51:50 -06003252 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, function);
3253 skip |= cb_access_context->ValidateDrawSubpassAttachment(function);
3254 skip |= ValidateIndirectBuffer(*context, commandBuffer, sizeof(VkDrawIndirectCommand), buffer, offset, maxDrawCount, stride,
3255 function);
3256 skip |= ValidateCountBuffer(*context, commandBuffer, countBuffer, countBufferOffset, function);
locke-lunargff255f92020-05-13 18:53:52 -06003257
3258 // TODO: For now, we validate the whole vertex buffer. It might cause some false positive.
3259 // VkDrawIndirectCommand buffer could be changed until SubmitQueue.
3260 // We will validate the vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003261 skip |= cb_access_context->ValidateDrawVertex(UINT32_MAX, 0, function);
locke-lunargff255f92020-05-13 18:53:52 -06003262 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003263}
3264
3265bool SyncValidator::PreCallValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3266 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
3267 uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003268 return ValidateCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
3269 "vkCmdDrawIndirectCount");
locke-lunarge1a67022020-04-29 00:15:36 -06003270}
3271
3272void SyncValidator::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3273 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
3274 uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003275 StateTracker::PreCallRecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount,
3276 stride);
locke-lunargff255f92020-05-13 18:53:52 -06003277 auto *cb_access_context = GetAccessContext(commandBuffer);
3278 assert(cb_access_context);
3279 const auto tag = cb_access_context->NextCommandTag(CMD_DRAWINDIRECTCOUNT);
3280 auto *context = cb_access_context->GetCurrentAccessContext();
3281 assert(context);
3282
locke-lunarg61870c22020-06-09 14:51:50 -06003283 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, tag);
3284 cb_access_context->RecordDrawSubpassAttachment(tag);
3285 RecordIndirectBuffer(*context, tag, sizeof(VkDrawIndirectCommand), buffer, offset, 1, stride);
3286 RecordCountBuffer(*context, tag, countBuffer, countBufferOffset);
locke-lunargff255f92020-05-13 18:53:52 -06003287
3288 // TODO: For now, we record the whole vertex buffer. It might cause some false positive.
3289 // VkDrawIndirectCommand buffer could be changed until SubmitQueue.
3290 // We will record the vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003291 cb_access_context->RecordDrawVertex(UINT32_MAX, 0, tag);
locke-lunarge1a67022020-04-29 00:15:36 -06003292}
3293
3294bool SyncValidator::PreCallValidateCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3295 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3296 uint32_t maxDrawCount, uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003297 return ValidateCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
3298 "vkCmdDrawIndirectCountKHR");
locke-lunarge1a67022020-04-29 00:15:36 -06003299}
3300
3301void SyncValidator::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3302 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3303 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003304 StateTracker::PreCallRecordCmdDrawIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount,
3305 stride);
locke-lunargff255f92020-05-13 18:53:52 -06003306 PreCallRecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
locke-lunarge1a67022020-04-29 00:15:36 -06003307}
3308
3309bool SyncValidator::PreCallValidateCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3310 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3311 uint32_t maxDrawCount, uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003312 return ValidateCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
3313 "vkCmdDrawIndirectCountAMD");
locke-lunarge1a67022020-04-29 00:15:36 -06003314}
3315
3316void SyncValidator::PreCallRecordCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3317 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3318 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003319 StateTracker::PreCallRecordCmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount,
3320 stride);
locke-lunargff255f92020-05-13 18:53:52 -06003321 PreCallRecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
3322}
3323
3324bool SyncValidator::ValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3325 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
3326 uint32_t stride, const char *function) const {
3327 bool skip = false;
locke-lunargff255f92020-05-13 18:53:52 -06003328 const auto *cb_access_context = GetAccessContext(commandBuffer);
3329 assert(cb_access_context);
3330 if (!cb_access_context) return skip;
3331
3332 const auto *context = cb_access_context->GetCurrentAccessContext();
3333 assert(context);
3334 if (!context) return skip;
3335
locke-lunarg61870c22020-06-09 14:51:50 -06003336 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, function);
3337 skip |= cb_access_context->ValidateDrawSubpassAttachment(function);
3338 skip |= ValidateIndirectBuffer(*context, commandBuffer, sizeof(VkDrawIndexedIndirectCommand), buffer, offset, maxDrawCount,
3339 stride, function);
3340 skip |= ValidateCountBuffer(*context, commandBuffer, countBuffer, countBufferOffset, function);
locke-lunargff255f92020-05-13 18:53:52 -06003341
3342 // TODO: For now, we validate the whole index and vertex buffer. It might cause some false positive.
3343 // VkDrawIndexedIndirectCommand buffer could be changed until SubmitQueue.
3344 // We will validate the index and vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003345 skip |= cb_access_context->ValidateDrawVertexIndex(UINT32_MAX, 0, function);
locke-lunargff255f92020-05-13 18:53:52 -06003346 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003347}
3348
3349bool SyncValidator::PreCallValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3350 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3351 uint32_t maxDrawCount, uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003352 return ValidateCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
3353 "vkCmdDrawIndexedIndirectCount");
locke-lunarge1a67022020-04-29 00:15:36 -06003354}
3355
3356void SyncValidator::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3357 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3358 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003359 StateTracker::PreCallRecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset,
3360 maxDrawCount, stride);
locke-lunargff255f92020-05-13 18:53:52 -06003361 auto *cb_access_context = GetAccessContext(commandBuffer);
3362 assert(cb_access_context);
3363 const auto tag = cb_access_context->NextCommandTag(CMD_DRAWINDEXEDINDIRECTCOUNT);
3364 auto *context = cb_access_context->GetCurrentAccessContext();
3365 assert(context);
3366
locke-lunarg61870c22020-06-09 14:51:50 -06003367 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, tag);
3368 cb_access_context->RecordDrawSubpassAttachment(tag);
3369 RecordIndirectBuffer(*context, tag, sizeof(VkDrawIndexedIndirectCommand), buffer, offset, 1, stride);
3370 RecordCountBuffer(*context, tag, countBuffer, countBufferOffset);
locke-lunargff255f92020-05-13 18:53:52 -06003371
3372 // TODO: For now, we record the whole index and vertex buffer. It might cause some false positive.
3373 // VkDrawIndexedIndirectCommand buffer could be changed until SubmitQueue.
locke-lunarg61870c22020-06-09 14:51:50 -06003374 // We will update the index and vertex buffer in SubmitQueue in the future.
3375 cb_access_context->RecordDrawVertexIndex(UINT32_MAX, 0, tag);
locke-lunarge1a67022020-04-29 00:15:36 -06003376}
3377
3378bool SyncValidator::PreCallValidateCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
3379 VkDeviceSize offset, VkBuffer countBuffer,
3380 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
3381 uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003382 return ValidateCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
3383 "vkCmdDrawIndexedIndirectCountKHR");
locke-lunarge1a67022020-04-29 00:15:36 -06003384}
3385
3386void SyncValidator::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3387 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3388 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003389 StateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset,
3390 maxDrawCount, stride);
locke-lunarge1a67022020-04-29 00:15:36 -06003391 PreCallRecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
3392}
3393
3394bool SyncValidator::PreCallValidateCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer,
3395 VkDeviceSize offset, VkBuffer countBuffer,
3396 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
3397 uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003398 return ValidateCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
3399 "vkCmdDrawIndexedIndirectCountAMD");
locke-lunarge1a67022020-04-29 00:15:36 -06003400}
3401
3402void SyncValidator::PreCallRecordCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3403 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3404 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003405 StateTracker::PreCallRecordCmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset,
3406 maxDrawCount, stride);
locke-lunarge1a67022020-04-29 00:15:36 -06003407 PreCallRecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
3408}
3409
3410bool SyncValidator::PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
3411 const VkClearColorValue *pColor, uint32_t rangeCount,
3412 const VkImageSubresourceRange *pRanges) const {
3413 bool skip = false;
3414 const auto *cb_access_context = GetAccessContext(commandBuffer);
3415 assert(cb_access_context);
3416 if (!cb_access_context) return skip;
3417
3418 const auto *context = cb_access_context->GetCurrentAccessContext();
3419 assert(context);
3420 if (!context) return skip;
3421
3422 const auto *image_state = Get<IMAGE_STATE>(image);
3423
3424 for (uint32_t index = 0; index < rangeCount; index++) {
3425 const auto &range = pRanges[index];
3426 if (image_state) {
3427 auto hazard =
3428 context->DetectHazard(*image_state, SYNC_TRANSFER_TRANSFER_WRITE, range, {0, 0, 0}, image_state->createInfo.extent);
3429 if (hazard.hazard) {
3430 skip |= LogError(image, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06003431 "vkCmdClearColorImage: Hazard %s for %s, range index %" PRIu32 ". Prior access %s.",
3432 string_SyncHazard(hazard.hazard), report_data->FormatHandle(image).c_str(), index,
3433 string_UsageTag(hazard.tag).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06003434 }
3435 }
3436 }
3437 return skip;
3438}
3439
3440void SyncValidator::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
3441 const VkClearColorValue *pColor, uint32_t rangeCount,
3442 const VkImageSubresourceRange *pRanges) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003443 StateTracker::PreCallRecordCmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
locke-lunarge1a67022020-04-29 00:15:36 -06003444 auto *cb_access_context = GetAccessContext(commandBuffer);
3445 assert(cb_access_context);
3446 const auto tag = cb_access_context->NextCommandTag(CMD_CLEARCOLORIMAGE);
3447 auto *context = cb_access_context->GetCurrentAccessContext();
3448 assert(context);
3449
3450 const auto *image_state = Get<IMAGE_STATE>(image);
3451
3452 for (uint32_t index = 0; index < rangeCount; index++) {
3453 const auto &range = pRanges[index];
3454 if (image_state) {
3455 context->UpdateAccessState(*image_state, SYNC_TRANSFER_TRANSFER_WRITE, range, {0, 0, 0}, image_state->createInfo.extent,
3456 tag);
3457 }
3458 }
3459}
3460
3461bool SyncValidator::PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
3462 VkImageLayout imageLayout,
3463 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
3464 const VkImageSubresourceRange *pRanges) const {
3465 bool skip = false;
3466 const auto *cb_access_context = GetAccessContext(commandBuffer);
3467 assert(cb_access_context);
3468 if (!cb_access_context) return skip;
3469
3470 const auto *context = cb_access_context->GetCurrentAccessContext();
3471 assert(context);
3472 if (!context) return skip;
3473
3474 const auto *image_state = Get<IMAGE_STATE>(image);
3475
3476 for (uint32_t index = 0; index < rangeCount; index++) {
3477 const auto &range = pRanges[index];
3478 if (image_state) {
3479 auto hazard =
3480 context->DetectHazard(*image_state, SYNC_TRANSFER_TRANSFER_WRITE, range, {0, 0, 0}, image_state->createInfo.extent);
3481 if (hazard.hazard) {
3482 skip |= LogError(image, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06003483 "vkCmdClearDepthStencilImage: Hazard %s for %s, range index %" PRIu32 ". Prior access %s.",
3484 string_SyncHazard(hazard.hazard), report_data->FormatHandle(image).c_str(), index,
3485 string_UsageTag(hazard.tag).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06003486 }
3487 }
3488 }
3489 return skip;
3490}
3491
3492void SyncValidator::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
3493 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
3494 const VkImageSubresourceRange *pRanges) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003495 StateTracker::PreCallRecordCmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
locke-lunarge1a67022020-04-29 00:15:36 -06003496 auto *cb_access_context = GetAccessContext(commandBuffer);
3497 assert(cb_access_context);
3498 const auto tag = cb_access_context->NextCommandTag(CMD_CLEARDEPTHSTENCILIMAGE);
3499 auto *context = cb_access_context->GetCurrentAccessContext();
3500 assert(context);
3501
3502 const auto *image_state = Get<IMAGE_STATE>(image);
3503
3504 for (uint32_t index = 0; index < rangeCount; index++) {
3505 const auto &range = pRanges[index];
3506 if (image_state) {
3507 context->UpdateAccessState(*image_state, SYNC_TRANSFER_TRANSFER_WRITE, range, {0, 0, 0}, image_state->createInfo.extent,
3508 tag);
3509 }
3510 }
3511}
3512
3513bool SyncValidator::PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3514 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3515 VkDeviceSize dstOffset, VkDeviceSize stride,
3516 VkQueryResultFlags flags) const {
3517 bool skip = false;
3518 const auto *cb_access_context = GetAccessContext(commandBuffer);
3519 assert(cb_access_context);
3520 if (!cb_access_context) return skip;
3521
3522 const auto *context = cb_access_context->GetCurrentAccessContext();
3523 assert(context);
3524 if (!context) return skip;
3525
3526 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
3527
3528 if (dst_buffer) {
locke-lunargff255f92020-05-13 18:53:52 -06003529 ResourceAccessRange range = MakeRange(dstOffset, stride * queryCount);
locke-lunarge1a67022020-04-29 00:15:36 -06003530 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range);
3531 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06003532 skip |= LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
3533 "vkCmdCopyQueryPoolResults: Hazard %s for dstBuffer %s. Prior access %s.",
3534 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstBuffer).c_str(),
3535 string_UsageTag(hazard.tag).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06003536 }
3537 }
locke-lunargff255f92020-05-13 18:53:52 -06003538
3539 // TODO:Track VkQueryPool
locke-lunarge1a67022020-04-29 00:15:36 -06003540 return skip;
3541}
3542
3543void SyncValidator::PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
3544 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
3545 VkDeviceSize stride, VkQueryResultFlags flags) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003546 StateTracker::PreCallRecordCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset,
3547 stride, flags);
locke-lunarge1a67022020-04-29 00:15:36 -06003548 auto *cb_access_context = GetAccessContext(commandBuffer);
3549 assert(cb_access_context);
locke-lunargff255f92020-05-13 18:53:52 -06003550 const auto tag = cb_access_context->NextCommandTag(CMD_COPYQUERYPOOLRESULTS);
locke-lunarge1a67022020-04-29 00:15:36 -06003551 auto *context = cb_access_context->GetCurrentAccessContext();
3552 assert(context);
3553
3554 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
3555
3556 if (dst_buffer) {
locke-lunargff255f92020-05-13 18:53:52 -06003557 ResourceAccessRange range = MakeRange(dstOffset, stride * queryCount);
locke-lunarge1a67022020-04-29 00:15:36 -06003558 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range, tag);
3559 }
locke-lunargff255f92020-05-13 18:53:52 -06003560
3561 // TODO:Track VkQueryPool
locke-lunarge1a67022020-04-29 00:15:36 -06003562}
3563
3564bool SyncValidator::PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
3565 VkDeviceSize size, uint32_t data) const {
3566 bool skip = false;
3567 const auto *cb_access_context = GetAccessContext(commandBuffer);
3568 assert(cb_access_context);
3569 if (!cb_access_context) return skip;
3570
3571 const auto *context = cb_access_context->GetCurrentAccessContext();
3572 assert(context);
3573 if (!context) return skip;
3574
3575 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
3576
3577 if (dst_buffer) {
3578 ResourceAccessRange range = MakeRange(dstOffset, size);
3579 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range);
3580 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06003581 skip |= LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
3582 "vkCmdFillBuffer: Hazard %s for dstBuffer %s. Prior access %s.", string_SyncHazard(hazard.hazard),
3583 report_data->FormatHandle(dstBuffer).c_str(), string_UsageTag(hazard.tag).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06003584 }
3585 }
3586 return skip;
3587}
3588
3589void SyncValidator::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
3590 VkDeviceSize size, uint32_t data) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003591 StateTracker::PreCallRecordCmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
locke-lunarge1a67022020-04-29 00:15:36 -06003592 auto *cb_access_context = GetAccessContext(commandBuffer);
3593 assert(cb_access_context);
3594 const auto tag = cb_access_context->NextCommandTag(CMD_FILLBUFFER);
3595 auto *context = cb_access_context->GetCurrentAccessContext();
3596 assert(context);
3597
3598 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
3599
3600 if (dst_buffer) {
3601 ResourceAccessRange range = MakeRange(dstOffset, size);
3602 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range, tag);
3603 }
3604}
3605
3606bool SyncValidator::PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
3607 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
3608 const VkImageResolve *pRegions) const {
3609 bool skip = false;
3610 const auto *cb_access_context = GetAccessContext(commandBuffer);
3611 assert(cb_access_context);
3612 if (!cb_access_context) return skip;
3613
3614 const auto *context = cb_access_context->GetCurrentAccessContext();
3615 assert(context);
3616 if (!context) return skip;
3617
3618 const auto *src_image = Get<IMAGE_STATE>(srcImage);
3619 const auto *dst_image = Get<IMAGE_STATE>(dstImage);
3620
3621 for (uint32_t region = 0; region < regionCount; region++) {
3622 const auto &resolve_region = pRegions[region];
3623 if (src_image) {
3624 auto hazard = context->DetectHazard(*src_image, SYNC_TRANSFER_TRANSFER_READ, resolve_region.srcSubresource,
3625 resolve_region.srcOffset, resolve_region.extent);
3626 if (hazard.hazard) {
3627 skip |= LogError(srcImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06003628 "vkCmdResolveImage: Hazard %s for srcImage %s, region %" PRIu32 ". Prior access %s.",
3629 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcImage).c_str(), region,
3630 string_UsageTag(hazard.tag).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06003631 }
3632 }
3633
3634 if (dst_image) {
3635 auto hazard = context->DetectHazard(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, resolve_region.dstSubresource,
3636 resolve_region.dstOffset, resolve_region.extent);
3637 if (hazard.hazard) {
3638 skip |= LogError(dstImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06003639 "vkCmdResolveImage: Hazard %s for dstImage %s, region %" PRIu32 ". Prior access %s.",
3640 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstImage).c_str(), region,
3641 string_UsageTag(hazard.tag).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06003642 }
3643 if (skip) break;
3644 }
3645 }
3646
3647 return skip;
3648}
3649
3650void SyncValidator::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
3651 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
3652 const VkImageResolve *pRegions) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003653 StateTracker::PreCallRecordCmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount,
3654 pRegions);
locke-lunarge1a67022020-04-29 00:15:36 -06003655 auto *cb_access_context = GetAccessContext(commandBuffer);
3656 assert(cb_access_context);
3657 const auto tag = cb_access_context->NextCommandTag(CMD_RESOLVEIMAGE);
3658 auto *context = cb_access_context->GetCurrentAccessContext();
3659 assert(context);
3660
3661 auto *src_image = Get<IMAGE_STATE>(srcImage);
3662 auto *dst_image = Get<IMAGE_STATE>(dstImage);
3663
3664 for (uint32_t region = 0; region < regionCount; region++) {
3665 const auto &resolve_region = pRegions[region];
3666 if (src_image) {
3667 context->UpdateAccessState(*src_image, SYNC_TRANSFER_TRANSFER_READ, resolve_region.srcSubresource,
3668 resolve_region.srcOffset, resolve_region.extent, tag);
3669 }
3670 if (dst_image) {
3671 context->UpdateAccessState(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, resolve_region.dstSubresource,
3672 resolve_region.dstOffset, resolve_region.extent, tag);
3673 }
3674 }
3675}
3676
3677bool SyncValidator::PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
3678 VkDeviceSize dataSize, const void *pData) const {
3679 bool skip = false;
3680 const auto *cb_access_context = GetAccessContext(commandBuffer);
3681 assert(cb_access_context);
3682 if (!cb_access_context) return skip;
3683
3684 const auto *context = cb_access_context->GetCurrentAccessContext();
3685 assert(context);
3686 if (!context) return skip;
3687
3688 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
3689
3690 if (dst_buffer) {
3691 ResourceAccessRange range = MakeRange(dstOffset, dataSize);
3692 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range);
3693 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06003694 skip |= LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
3695 "vkCmdUpdateBuffer: Hazard %s for dstBuffer %s. Prior access %s.", string_SyncHazard(hazard.hazard),
3696 report_data->FormatHandle(dstBuffer).c_str(), string_UsageTag(hazard.tag).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06003697 }
3698 }
3699 return skip;
3700}
3701
3702void SyncValidator::PreCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
3703 VkDeviceSize dataSize, const void *pData) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003704 StateTracker::PreCallRecordCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
locke-lunarge1a67022020-04-29 00:15:36 -06003705 auto *cb_access_context = GetAccessContext(commandBuffer);
3706 assert(cb_access_context);
3707 const auto tag = cb_access_context->NextCommandTag(CMD_UPDATEBUFFER);
3708 auto *context = cb_access_context->GetCurrentAccessContext();
3709 assert(context);
3710
3711 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
3712
3713 if (dst_buffer) {
3714 ResourceAccessRange range = MakeRange(dstOffset, dataSize);
3715 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range, tag);
3716 }
3717}
locke-lunargff255f92020-05-13 18:53:52 -06003718
3719bool SyncValidator::PreCallValidateCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3720 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) const {
3721 bool skip = false;
3722 const auto *cb_access_context = GetAccessContext(commandBuffer);
3723 assert(cb_access_context);
3724 if (!cb_access_context) return skip;
3725
3726 const auto *context = cb_access_context->GetCurrentAccessContext();
3727 assert(context);
3728 if (!context) return skip;
3729
3730 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
3731
3732 if (dst_buffer) {
3733 ResourceAccessRange range = MakeRange(dstOffset, 4);
3734 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range);
3735 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06003736 skip |= LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
3737 "vkCmdWriteBufferMarkerAMD: Hazard %s for dstBuffer %s. Prior access %s.",
3738 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstBuffer).c_str(),
3739 string_UsageTag(hazard.tag).c_str());
locke-lunargff255f92020-05-13 18:53:52 -06003740 }
3741 }
3742 return skip;
3743}
3744
3745void SyncValidator::PreCallRecordCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3746 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003747 StateTracker::PreCallRecordCmdWriteBufferMarkerAMD(commandBuffer, pipelineStage, dstBuffer, dstOffset, marker);
locke-lunargff255f92020-05-13 18:53:52 -06003748 auto *cb_access_context = GetAccessContext(commandBuffer);
3749 assert(cb_access_context);
3750 const auto tag = cb_access_context->NextCommandTag(CMD_WRITEBUFFERMARKERAMD);
3751 auto *context = cb_access_context->GetCurrentAccessContext();
3752 assert(context);
3753
3754 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
3755
3756 if (dst_buffer) {
3757 ResourceAccessRange range = MakeRange(dstOffset, 4);
3758 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range, tag);
3759 }
3760}