blob: c4d25294cf9ce5922a60d533a34e53acab3a81b4 [file] [log] [blame]
locke-lunarg8ec19162020-06-16 18:48:34 -06001/* Copyright (c) 2019-2020 The Khronos Group Inc.
2 * Copyright (c) 2019-2020 Valve Corporation
3 * Copyright (c) 2019-2020 LunarG, Inc.
John Zulauf9cb530d2019-09-30 14:14:10 -06004 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 * Author: John Zulauf <jzulauf@lunarg.com>
18 */
19
20#include <limits>
21#include <vector>
locke-lunarg296a3c92020-03-25 01:04:29 -060022#include <memory>
23#include <bitset>
John Zulauf9cb530d2019-09-30 14:14:10 -060024#include "synchronization_validation.h"
25
John Zulauf43cc7462020-12-03 12:33:12 -070026const static std::array<AccessAddressType, static_cast<size_t>(AccessAddressType::kTypeCount)> kAddressTypes = {
27 AccessAddressType::kLinear, AccessAddressType::kIdealized};
28
John Zulauf9cb530d2019-09-30 14:14:10 -060029static const char *string_SyncHazardVUID(SyncHazard hazard) {
30 switch (hazard) {
31 case SyncHazard::NONE:
John Zulauf2f952d22020-02-10 11:34:51 -070032 return "SYNC-HAZARD-NONE";
John Zulauf9cb530d2019-09-30 14:14:10 -060033 break;
34 case SyncHazard::READ_AFTER_WRITE:
35 return "SYNC-HAZARD-READ_AFTER_WRITE";
36 break;
37 case SyncHazard::WRITE_AFTER_READ:
38 return "SYNC-HAZARD-WRITE_AFTER_READ";
39 break;
40 case SyncHazard::WRITE_AFTER_WRITE:
41 return "SYNC-HAZARD-WRITE_AFTER_WRITE";
42 break;
John Zulauf2f952d22020-02-10 11:34:51 -070043 case SyncHazard::READ_RACING_WRITE:
44 return "SYNC-HAZARD-READ-RACING-WRITE";
45 break;
46 case SyncHazard::WRITE_RACING_WRITE:
47 return "SYNC-HAZARD-WRITE-RACING-WRITE";
48 break;
49 case SyncHazard::WRITE_RACING_READ:
50 return "SYNC-HAZARD-WRITE-RACING-READ";
51 break;
John Zulauf9cb530d2019-09-30 14:14:10 -060052 default:
53 assert(0);
54 }
55 return "SYNC-HAZARD-INVALID";
56}
57
John Zulauf59e25072020-07-17 10:55:21 -060058static bool IsHazardVsRead(SyncHazard hazard) {
59 switch (hazard) {
60 case SyncHazard::NONE:
61 return false;
62 break;
63 case SyncHazard::READ_AFTER_WRITE:
64 return false;
65 break;
66 case SyncHazard::WRITE_AFTER_READ:
67 return true;
68 break;
69 case SyncHazard::WRITE_AFTER_WRITE:
70 return false;
71 break;
72 case SyncHazard::READ_RACING_WRITE:
73 return false;
74 break;
75 case SyncHazard::WRITE_RACING_WRITE:
76 return false;
77 break;
78 case SyncHazard::WRITE_RACING_READ:
79 return true;
80 break;
81 default:
82 assert(0);
83 }
84 return false;
85}
86
John Zulauf9cb530d2019-09-30 14:14:10 -060087static const char *string_SyncHazard(SyncHazard hazard) {
88 switch (hazard) {
89 case SyncHazard::NONE:
90 return "NONR";
91 break;
92 case SyncHazard::READ_AFTER_WRITE:
93 return "READ_AFTER_WRITE";
94 break;
95 case SyncHazard::WRITE_AFTER_READ:
96 return "WRITE_AFTER_READ";
97 break;
98 case SyncHazard::WRITE_AFTER_WRITE:
99 return "WRITE_AFTER_WRITE";
100 break;
John Zulauf2f952d22020-02-10 11:34:51 -0700101 case SyncHazard::READ_RACING_WRITE:
102 return "READ_RACING_WRITE";
103 break;
104 case SyncHazard::WRITE_RACING_WRITE:
105 return "WRITE_RACING_WRITE";
106 break;
107 case SyncHazard::WRITE_RACING_READ:
108 return "WRITE_RACING_READ";
109 break;
John Zulauf9cb530d2019-09-30 14:14:10 -0600110 default:
111 assert(0);
112 }
113 return "INVALID HAZARD";
114}
115
John Zulauf37ceaed2020-07-03 16:18:15 -0600116static const SyncStageAccessInfoType *SyncStageAccessInfoFromMask(SyncStageAccessFlags flags) {
117 // Return the info for the first bit found
118 const SyncStageAccessInfoType *info = nullptr;
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700119 for (size_t i = 0; i < flags.size(); i++) {
120 if (flags.test(i)) {
121 info = &syncStageAccessInfoByStageAccessIndex[i];
122 break;
John Zulauf37ceaed2020-07-03 16:18:15 -0600123 }
124 }
125 return info;
126}
127
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700128static std::string string_SyncStageAccessFlags(const SyncStageAccessFlags &flags, const char *sep = "|") {
John Zulauf59e25072020-07-17 10:55:21 -0600129 std::string out_str;
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700130 if (flags.none()) {
John Zulauf389c34b2020-07-28 11:19:35 -0600131 out_str = "0";
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700132 } else {
133 for (size_t i = 0; i < syncStageAccessInfoByStageAccessIndex.size(); i++) {
134 const auto &info = syncStageAccessInfoByStageAccessIndex[i];
135 if ((flags & info.stage_access_bit).any()) {
136 if (!out_str.empty()) {
137 out_str.append(sep);
138 }
139 out_str.append(info.name);
John Zulauf59e25072020-07-17 10:55:21 -0600140 }
John Zulauf59e25072020-07-17 10:55:21 -0600141 }
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700142 if (out_str.length() == 0) {
143 out_str.append("Unhandled SyncStageAccess");
144 }
John Zulauf59e25072020-07-17 10:55:21 -0600145 }
146 return out_str;
147}
148
Jeremy Gebben6ea9d9e2020-12-11 09:41:01 -0700149static std::string string_UsageTag(const ResourceUsageTag &tag) {
150 std::stringstream out;
151
152 out << "command: " << CommandTypeString(tag.command);
153 out << ", seq_no: " << ((tag.index >> 1) & UINT32_MAX) << ", reset_no: " << (tag.index >> 33);
154 if (tag.index & 1) {
155 out << ", subcmd: " << (tag.index & 1);
156 }
157 return out.str();
158}
159
John Zulauf37ceaed2020-07-03 16:18:15 -0600160static std::string string_UsageTag(const HazardResult &hazard) {
161 const auto &tag = hazard.tag;
John Zulauf59e25072020-07-17 10:55:21 -0600162 assert(hazard.usage_index < static_cast<SyncStageAccessIndex>(syncStageAccessInfoByStageAccessIndex.size()));
163 const auto &usage_info = syncStageAccessInfoByStageAccessIndex[hazard.usage_index];
John Zulauf1dae9192020-06-16 15:46:44 -0600164 std::stringstream out;
John Zulauf37ceaed2020-07-03 16:18:15 -0600165 const auto *info = SyncStageAccessInfoFromMask(hazard.prior_access);
166 const char *stage_access_name = info ? info->name : "INVALID_STAGE_ACCESS";
John Zulauf59e25072020-07-17 10:55:21 -0600167 out << "(usage: " << usage_info.name << ", prior_usage: " << stage_access_name;
168 if (IsHazardVsRead(hazard.hazard)) {
169 const auto barriers = hazard.access_state->GetReadBarriers(hazard.prior_access);
170 out << ", read_barriers: " << string_VkPipelineStageFlags(barriers);
171 } else {
172 SyncStageAccessFlags write_barrier = hazard.access_state->GetWriteBarriers();
173 out << ", write_barriers: " << string_SyncStageAccessFlags(write_barrier);
174 }
175
Jeremy Gebben6ea9d9e2020-12-11 09:41:01 -0700176 out << ", " << string_UsageTag(tag) << ")";
John Zulauf1dae9192020-06-16 15:46:44 -0600177 return out.str();
178}
179
John Zulaufd14743a2020-07-03 09:42:39 -0600180// NOTE: the attachement read flag is put *only* in the access scope and not in the exect scope, since the ordering
181// rules apply only to this specific access for this stage, and not the stage as a whole. The ordering detection
182// also reflects this special case for read hazard detection (using access instead of exec scope)
John Zulaufb027cdb2020-05-21 14:25:22 -0600183static constexpr VkPipelineStageFlags kColorAttachmentExecScope = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700184static const SyncStageAccessFlags kColorAttachmentAccessScope =
185 SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ_BIT |
186 SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT |
187 SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE_BIT |
188 SYNC_FRAGMENT_SHADER_INPUT_ATTACHMENT_READ_BIT; // Note: this is intentionally not in the exec scope
John Zulaufb027cdb2020-05-21 14:25:22 -0600189static constexpr VkPipelineStageFlags kDepthStencilAttachmentExecScope =
190 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700191static const SyncStageAccessFlags kDepthStencilAttachmentAccessScope =
192 SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
193 SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
194 SYNC_FRAGMENT_SHADER_INPUT_ATTACHMENT_READ_BIT; // Note: this is intentionally not in the exec scope
John Zulaufb027cdb2020-05-21 14:25:22 -0600195
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700196static const SyncOrderingBarrier kColorAttachmentRasterOrder = {kColorAttachmentExecScope, kColorAttachmentAccessScope};
197static const SyncOrderingBarrier kDepthStencilAttachmentRasterOrder = {kDepthStencilAttachmentExecScope,
198 kDepthStencilAttachmentAccessScope};
199static const SyncOrderingBarrier kAttachmentRasterOrder = {kDepthStencilAttachmentExecScope | kColorAttachmentExecScope,
200 kDepthStencilAttachmentAccessScope | kColorAttachmentAccessScope};
John Zulauf7635de32020-05-29 17:14:15 -0600201// Sometimes we have an internal access conflict, and we using the kCurrentCommandTag to set and detect in temporary/proxy contexts
John Zulaufcc6fecb2020-06-17 15:24:54 -0600202static const ResourceUsageTag kCurrentCommandTag(ResourceUsageTag::kMaxIndex, CMD_NONE);
John Zulaufb027cdb2020-05-21 14:25:22 -0600203
John Zulaufb02c1eb2020-10-06 16:33:36 -0600204static VkDeviceSize ResourceBaseAddress(const BINDABLE &bindable) {
205 return bindable.binding.offset + bindable.binding.mem_state->fake_base_address;
206}
207
208static bool SimpleBinding(const BINDABLE &bindable) { return !bindable.sparse && bindable.binding.mem_state; }
209
locke-lunarg3c038002020-04-30 23:08:08 -0600210inline VkDeviceSize GetRealWholeSize(VkDeviceSize offset, VkDeviceSize size, VkDeviceSize whole_size) {
211 if (size == VK_WHOLE_SIZE) {
212 return (whole_size - offset);
213 }
214 return size;
215}
216
John Zulauf3e86bf02020-09-12 10:47:57 -0600217static inline VkDeviceSize GetBufferWholeSize(const BUFFER_STATE &buf_state, VkDeviceSize offset, VkDeviceSize size) {
218 return GetRealWholeSize(offset, size, buf_state.createInfo.size);
219}
220
John Zulauf16adfc92020-04-08 10:28:33 -0600221template <typename T>
John Zulauf355e49b2020-04-24 15:11:15 -0600222static ResourceAccessRange MakeRange(const T &has_offset_and_size) {
John Zulauf16adfc92020-04-08 10:28:33 -0600223 return ResourceAccessRange(has_offset_and_size.offset, (has_offset_and_size.offset + has_offset_and_size.size));
224}
225
John Zulauf355e49b2020-04-24 15:11:15 -0600226static ResourceAccessRange MakeRange(VkDeviceSize start, VkDeviceSize size) { return ResourceAccessRange(start, (start + size)); }
John Zulauf16adfc92020-04-08 10:28:33 -0600227
John Zulauf3e86bf02020-09-12 10:47:57 -0600228static inline ResourceAccessRange MakeRange(const BUFFER_STATE &buffer, VkDeviceSize offset, VkDeviceSize size) {
229 return MakeRange(offset, GetBufferWholeSize(buffer, offset, size));
230}
231
232static inline ResourceAccessRange MakeRange(const BUFFER_VIEW_STATE &buf_view_state) {
233 return MakeRange(*buf_view_state.buffer_state.get(), buf_view_state.create_info.offset, buf_view_state.create_info.range);
234}
235
John Zulauf0cb5be22020-01-23 12:18:22 -0700236// Expand the pipeline stage without regard to whether the are valid w.r.t. queue or extension
237VkPipelineStageFlags ExpandPipelineStages(VkQueueFlags queue_flags, VkPipelineStageFlags stage_mask) {
238 VkPipelineStageFlags expanded = stage_mask;
239 if (VK_PIPELINE_STAGE_ALL_COMMANDS_BIT & stage_mask) {
240 expanded = expanded & ~VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
241 for (const auto &all_commands : syncAllCommandStagesByQueueFlags) {
242 if (all_commands.first & queue_flags) {
243 expanded |= all_commands.second;
244 }
245 }
246 }
247 if (VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT & stage_mask) {
248 expanded = expanded & ~VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT;
249 expanded |= syncAllCommandStagesByQueueFlags.at(VK_QUEUE_GRAPHICS_BIT) & ~VK_PIPELINE_STAGE_HOST_BIT;
250 }
251 return expanded;
252}
253
John Zulauf36bcf6a2020-02-03 15:12:52 -0700254VkPipelineStageFlags RelatedPipelineStages(VkPipelineStageFlags stage_mask,
Jeremy Gebben91c36902020-11-09 08:17:08 -0700255 const std::map<VkPipelineStageFlagBits, VkPipelineStageFlags> &map) {
John Zulauf36bcf6a2020-02-03 15:12:52 -0700256 VkPipelineStageFlags unscanned = stage_mask;
257 VkPipelineStageFlags related = 0;
Jonah Ryan-Davis185189c2020-07-14 10:28:52 -0400258 for (const auto &entry : map) {
259 const auto &stage = entry.first;
John Zulauf36bcf6a2020-02-03 15:12:52 -0700260 if (stage & unscanned) {
261 related = related | entry.second;
262 unscanned = unscanned & ~stage;
263 if (!unscanned) break;
264 }
265 }
266 return related;
267}
268
269VkPipelineStageFlags WithEarlierPipelineStages(VkPipelineStageFlags stage_mask) {
270 return stage_mask | RelatedPipelineStages(stage_mask, syncLogicallyEarlierStages);
271}
272
273VkPipelineStageFlags WithLaterPipelineStages(VkPipelineStageFlags stage_mask) {
274 return stage_mask | RelatedPipelineStages(stage_mask, syncLogicallyLaterStages);
275}
276
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700277static const ResourceAccessRange kFullRange(std::numeric_limits<VkDeviceSize>::min(), std::numeric_limits<VkDeviceSize>::max());
John Zulauf5c5e88d2019-12-26 11:22:02 -0700278
John Zulauf3e86bf02020-09-12 10:47:57 -0600279ResourceAccessRange GetBufferRange(VkDeviceSize offset, VkDeviceSize buf_whole_size, uint32_t first_index, uint32_t count,
280 VkDeviceSize stride) {
281 VkDeviceSize range_start = offset + first_index * stride;
282 VkDeviceSize range_size = 0;
locke-lunargff255f92020-05-13 18:53:52 -0600283 if (count == UINT32_MAX) {
284 range_size = buf_whole_size - range_start;
285 } else {
286 range_size = count * stride;
287 }
John Zulauf3e86bf02020-09-12 10:47:57 -0600288 return MakeRange(range_start, range_size);
locke-lunargff255f92020-05-13 18:53:52 -0600289}
290
locke-lunarg654e3692020-06-04 17:19:15 -0600291SyncStageAccessIndex GetSyncStageAccessIndexsByDescriptorSet(VkDescriptorType descriptor_type, const interface_var &descriptor_data,
292 VkShaderStageFlagBits stage_flag) {
293 if (descriptor_type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) {
294 assert(stage_flag == VK_SHADER_STAGE_FRAGMENT_BIT);
295 return SYNC_FRAGMENT_SHADER_INPUT_ATTACHMENT_READ;
296 }
297 auto stage_access = syncStageAccessMaskByShaderStage.find(stage_flag);
298 if (stage_access == syncStageAccessMaskByShaderStage.end()) {
299 assert(0);
300 }
301 if (descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER || descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) {
302 return stage_access->second.uniform_read;
303 }
304
305 // If the desriptorSet is writable, we don't need to care SHADER_READ. SHADER_WRITE is enough.
306 // Because if write hazard happens, read hazard might or might not happen.
307 // But if write hazard doesn't happen, read hazard is impossible to happen.
308 if (descriptor_data.is_writable) {
309 return stage_access->second.shader_write;
310 }
311 return stage_access->second.shader_read;
312}
313
locke-lunarg37047832020-06-12 13:44:45 -0600314bool IsImageLayoutDepthWritable(VkImageLayout image_layout) {
315 return (image_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ||
316 image_layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL ||
317 image_layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL)
318 ? true
319 : false;
320}
321
322bool IsImageLayoutStencilWritable(VkImageLayout image_layout) {
323 return (image_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ||
324 image_layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL ||
325 image_layout == VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL)
326 ? true
327 : false;
328}
329
John Zulauf355e49b2020-04-24 15:11:15 -0600330// Class AccessContext stores the state of accesses specific to a Command, Subpass, or Queue
John Zulaufb02c1eb2020-10-06 16:33:36 -0600331template <typename Action>
332static void ApplyOverImageRange(const IMAGE_STATE &image_state, const VkImageSubresourceRange &subresource_range_arg,
333 Action &action) {
334 // At this point the "apply over range" logic only supports a single memory binding
335 if (!SimpleBinding(image_state)) return;
336 auto subresource_range = NormalizeSubresourceRange(image_state.createInfo, subresource_range_arg);
337 subresource_adapter::ImageRangeGenerator range_gen(*image_state.fragment_encoder.get(), subresource_range, {0, 0, 0},
338 image_state.createInfo.extent);
339 const auto base_address = ResourceBaseAddress(image_state);
340 for (; range_gen->non_empty(); ++range_gen) {
341 action((*range_gen + base_address));
342 }
343}
344
John Zulauf7635de32020-05-29 17:14:15 -0600345// Tranverse the attachment resolves for this a specific subpass, and do action() to them.
346// Used by both validation and record operations
347//
348// The signature for Action() reflect the needs of both uses.
349template <typename Action>
350void ResolveOperation(Action &action, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
351 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass) {
352 VkExtent3D extent = CastTo3D(render_area.extent);
353 VkOffset3D offset = CastTo3D(render_area.offset);
354 const auto &rp_ci = rp_state.createInfo;
355 const auto *attachment_ci = rp_ci.pAttachments;
356 const auto &subpass_ci = rp_ci.pSubpasses[subpass];
357
358 // Color resolves -- require an inuse color attachment and a matching inuse resolve attachment
359 const auto *color_attachments = subpass_ci.pColorAttachments;
360 const auto *color_resolve = subpass_ci.pResolveAttachments;
361 if (color_resolve && color_attachments) {
362 for (uint32_t i = 0; i < subpass_ci.colorAttachmentCount; i++) {
363 const auto &color_attach = color_attachments[i].attachment;
364 const auto &resolve_attach = subpass_ci.pResolveAttachments[i].attachment;
365 if ((color_attach != VK_ATTACHMENT_UNUSED) && (resolve_attach != VK_ATTACHMENT_UNUSED)) {
366 action("color", "resolve read", color_attach, resolve_attach, attachment_views[color_attach],
367 SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ, kColorAttachmentRasterOrder, offset, extent, 0);
368 action("color", "resolve write", color_attach, resolve_attach, attachment_views[resolve_attach],
369 SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, kColorAttachmentRasterOrder, offset, extent, 0);
370 }
371 }
372 }
373
374 // Depth stencil resolve only if the extension is present
375 const auto ds_resolve = lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolve>(subpass_ci.pNext);
376 if (ds_resolve && ds_resolve->pDepthStencilResolveAttachment &&
377 (ds_resolve->pDepthStencilResolveAttachment->attachment != VK_ATTACHMENT_UNUSED) && subpass_ci.pDepthStencilAttachment &&
378 (subpass_ci.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED)) {
379 const auto src_at = subpass_ci.pDepthStencilAttachment->attachment;
380 const auto src_ci = attachment_ci[src_at];
381 // The formats are required to match so we can pick either
382 const bool resolve_depth = (ds_resolve->depthResolveMode != VK_RESOLVE_MODE_NONE) && FormatHasDepth(src_ci.format);
383 const bool resolve_stencil = (ds_resolve->stencilResolveMode != VK_RESOLVE_MODE_NONE) && FormatHasStencil(src_ci.format);
384 const auto dst_at = ds_resolve->pDepthStencilResolveAttachment->attachment;
385 VkImageAspectFlags aspect_mask = 0u;
386
387 // Figure out which aspects are actually touched during resolve operations
388 const char *aspect_string = nullptr;
389 if (resolve_depth && resolve_stencil) {
390 // Validate all aspects together
391 aspect_mask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
392 aspect_string = "depth/stencil";
393 } else if (resolve_depth) {
394 // Validate depth only
395 aspect_mask = VK_IMAGE_ASPECT_DEPTH_BIT;
396 aspect_string = "depth";
397 } else if (resolve_stencil) {
398 // Validate all stencil only
399 aspect_mask = VK_IMAGE_ASPECT_STENCIL_BIT;
400 aspect_string = "stencil";
401 }
402
403 if (aspect_mask) {
404 action(aspect_string, "resolve read", src_at, dst_at, attachment_views[src_at],
Jeremy Gebbenec5cd382020-11-16 15:53:45 -0700405 SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ, kAttachmentRasterOrder, offset, extent,
John Zulauf7635de32020-05-29 17:14:15 -0600406 aspect_mask);
407 action(aspect_string, "resolve write", src_at, dst_at, attachment_views[dst_at],
408 SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, kAttachmentRasterOrder, offset, extent, aspect_mask);
409 }
410 }
411}
412
413// Action for validating resolve operations
414class ValidateResolveAction {
415 public:
416 ValidateResolveAction(VkRenderPass render_pass, uint32_t subpass, const AccessContext &context, const SyncValidator &sync_state,
417 const char *func_name)
418 : render_pass_(render_pass),
419 subpass_(subpass),
420 context_(context),
421 sync_state_(sync_state),
422 func_name_(func_name),
423 skip_(false) {}
424 void operator()(const char *aspect_name, const char *attachment_name, uint32_t src_at, uint32_t dst_at,
425 const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const SyncOrderingBarrier &ordering,
426 const VkOffset3D &offset, const VkExtent3D &extent, VkImageAspectFlags aspect_mask) {
427 HazardResult hazard;
428 hazard = context_.DetectHazard(view, current_usage, ordering, offset, extent, aspect_mask);
429 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -0600430 skip_ |= sync_state_.LogError(render_pass_, string_SyncHazardVUID(hazard.hazard),
431 "%s: Hazard %s in subpass %" PRIu32 "during %s %s, from attachment %" PRIu32
John Zulauf59e25072020-07-17 10:55:21 -0600432 " to resolve attachment %" PRIu32 ". Access info %s.",
John Zulauf1dae9192020-06-16 15:46:44 -0600433 func_name_, string_SyncHazard(hazard.hazard), subpass_, aspect_name, attachment_name,
John Zulauf37ceaed2020-07-03 16:18:15 -0600434 src_at, dst_at, string_UsageTag(hazard).c_str());
John Zulauf7635de32020-05-29 17:14:15 -0600435 }
436 }
437 // Providing a mechanism for the constructing caller to get the result of the validation
438 bool GetSkip() const { return skip_; }
439
440 private:
441 VkRenderPass render_pass_;
442 const uint32_t subpass_;
443 const AccessContext &context_;
444 const SyncValidator &sync_state_;
445 const char *func_name_;
446 bool skip_;
447};
448
449// Update action for resolve operations
450class UpdateStateResolveAction {
451 public:
452 UpdateStateResolveAction(AccessContext &context, const ResourceUsageTag &tag) : context_(context), tag_(tag) {}
453 void operator()(const char *aspect_name, const char *attachment_name, uint32_t src_at, uint32_t dst_at,
454 const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const SyncOrderingBarrier &ordering,
455 const VkOffset3D &offset, const VkExtent3D &extent, VkImageAspectFlags aspect_mask) {
456 // Ignores validation only arguments...
457 context_.UpdateAccessState(view, current_usage, offset, extent, aspect_mask, tag_);
458 }
459
460 private:
461 AccessContext &context_;
462 const ResourceUsageTag &tag_;
463};
464
John Zulauf59e25072020-07-17 10:55:21 -0600465void HazardResult::Set(const ResourceAccessState *access_state_, SyncStageAccessIndex usage_index_, SyncHazard hazard_,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -0700466 const SyncStageAccessFlags &prior_, const ResourceUsageTag &tag_) {
John Zulauf59e25072020-07-17 10:55:21 -0600467 access_state = std::unique_ptr<const ResourceAccessState>(new ResourceAccessState(*access_state_));
468 usage_index = usage_index_;
469 hazard = hazard_;
470 prior_access = prior_;
471 tag = tag_;
472}
473
John Zulauf540266b2020-04-06 18:54:53 -0600474AccessContext::AccessContext(uint32_t subpass, VkQueueFlags queue_flags,
475 const std::vector<SubpassDependencyGraphNode> &dependencies,
John Zulauf1a224292020-06-30 14:52:13 -0600476 const std::vector<AccessContext> &contexts, const AccessContext *external_context) {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600477 Reset();
478 const auto &subpass_dep = dependencies[subpass];
479 prev_.reserve(subpass_dep.prev.size());
John Zulauf355e49b2020-04-24 15:11:15 -0600480 prev_by_subpass_.resize(subpass, nullptr); // Can't be more prevs than the subpass we're on
John Zulauf3d84f1b2020-03-09 13:33:25 -0600481 for (const auto &prev_dep : subpass_dep.prev) {
John Zulaufbaea94f2020-09-15 17:55:16 -0600482 const auto prev_pass = prev_dep.first->pass;
483 const auto &prev_barriers = prev_dep.second;
484 assert(prev_dep.second.size());
485 prev_.emplace_back(&contexts[prev_pass], queue_flags, prev_barriers);
486 prev_by_subpass_[prev_pass] = &prev_.back();
John Zulauf5c5e88d2019-12-26 11:22:02 -0700487 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600488
489 async_.reserve(subpass_dep.async.size());
490 for (const auto async_subpass : subpass_dep.async) {
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700491 async_.emplace_back(&contexts[async_subpass]);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600492 }
John Zulaufbaea94f2020-09-15 17:55:16 -0600493 if (subpass_dep.barrier_from_external.size()) {
494 src_external_ = TrackBack(external_context, queue_flags, subpass_dep.barrier_from_external);
John Zulaufe5da6e52020-03-18 15:32:18 -0600495 }
John Zulaufbaea94f2020-09-15 17:55:16 -0600496 if (subpass_dep.barrier_to_external.size()) {
497 dst_external_ = TrackBack(this, queue_flags, subpass_dep.barrier_to_external);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600498 }
John Zulauf5c5e88d2019-12-26 11:22:02 -0700499}
500
John Zulauf5f13a792020-03-10 07:31:21 -0600501template <typename Detector>
John Zulauf43cc7462020-12-03 12:33:12 -0700502HazardResult AccessContext::DetectPreviousHazard(AccessAddressType type, const Detector &detector,
John Zulauf540266b2020-04-06 18:54:53 -0600503 const ResourceAccessRange &range) const {
John Zulauf5f13a792020-03-10 07:31:21 -0600504 ResourceAccessRangeMap descent_map;
John Zulauf69133422020-05-20 14:55:53 -0600505 ResolvePreviousAccess(type, range, &descent_map, nullptr);
John Zulauf5f13a792020-03-10 07:31:21 -0600506
507 HazardResult hazard;
508 for (auto prev = descent_map.begin(); prev != descent_map.end() && !hazard.hazard; ++prev) {
509 hazard = detector.Detect(prev);
510 }
511 return hazard;
512}
513
John Zulauf3d84f1b2020-03-09 13:33:25 -0600514// A recursive range walker for hazard detection, first for the current context and the (DetectHazardRecur) to walk
515// the DAG of the contexts (for example subpasses)
516template <typename Detector>
John Zulauf43cc7462020-12-03 12:33:12 -0700517HazardResult AccessContext::DetectHazard(AccessAddressType type, const Detector &detector, const ResourceAccessRange &range,
John Zulauf355e49b2020-04-24 15:11:15 -0600518 DetectOptions options) const {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600519 HazardResult hazard;
John Zulauf5f13a792020-03-10 07:31:21 -0600520
John Zulauf1a224292020-06-30 14:52:13 -0600521 if (static_cast<uint32_t>(options) & DetectOptions::kDetectAsync) {
John Zulauf355e49b2020-04-24 15:11:15 -0600522 // Async checks don't require recursive lookups, as the async lists are exhaustive for the top-level context
523 // so we'll check these first
524 for (const auto &async_context : async_) {
525 hazard = async_context->DetectAsyncHazard(type, detector, range);
526 if (hazard.hazard) return hazard;
527 }
John Zulauf5f13a792020-03-10 07:31:21 -0600528 }
529
John Zulauf1a224292020-06-30 14:52:13 -0600530 const bool detect_prev = (static_cast<uint32_t>(options) & DetectOptions::kDetectPrevious) != 0;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600531
John Zulauf69133422020-05-20 14:55:53 -0600532 const auto &accesses = GetAccessStateMap(type);
533 const auto from = accesses.lower_bound(range);
534 const auto to = accesses.upper_bound(range);
535 ResourceAccessRange gap = {range.begin, range.begin};
John Zulauf5f13a792020-03-10 07:31:21 -0600536
John Zulauf69133422020-05-20 14:55:53 -0600537 for (auto pos = from; pos != to; ++pos) {
538 // Cover any leading gap, or gap between entries
539 if (detect_prev) {
540 // TODO: After profiling we may want to change the descent logic such that we don't recur per gap...
541 // Cover any leading gap, or gap between entries
542 gap.end = pos->first.begin; // We know this begin is < range.end
John Zulauf355e49b2020-04-24 15:11:15 -0600543 if (gap.non_empty()) {
John Zulauf69133422020-05-20 14:55:53 -0600544 // Recur on all gaps
John Zulauf16adfc92020-04-08 10:28:33 -0600545 hazard = DetectPreviousHazard(type, detector, gap);
John Zulauf5f13a792020-03-10 07:31:21 -0600546 if (hazard.hazard) return hazard;
547 }
John Zulauf69133422020-05-20 14:55:53 -0600548 // Set up for the next gap. If pos..end is >= range.end, loop will exit, and trailing gap will be empty
549 gap.begin = pos->first.end;
550 }
551
552 hazard = detector.Detect(pos);
553 if (hazard.hazard) return hazard;
554 }
555
556 if (detect_prev) {
557 // Detect in the trailing empty as needed
558 gap.end = range.end;
559 if (gap.non_empty()) {
560 hazard = DetectPreviousHazard(type, detector, gap);
John Zulauf16adfc92020-04-08 10:28:33 -0600561 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600562 }
563
564 return hazard;
565}
566
567// A non recursive range walker for the asynchronous contexts (those we have no barriers with)
568template <typename Detector>
John Zulauf43cc7462020-12-03 12:33:12 -0700569HazardResult AccessContext::DetectAsyncHazard(AccessAddressType type, const Detector &detector,
570 const ResourceAccessRange &range) const {
John Zulauf16adfc92020-04-08 10:28:33 -0600571 auto &accesses = GetAccessStateMap(type);
572 const auto from = accesses.lower_bound(range);
573 const auto to = accesses.upper_bound(range);
574
John Zulauf3d84f1b2020-03-09 13:33:25 -0600575 HazardResult hazard;
John Zulauf16adfc92020-04-08 10:28:33 -0600576 for (auto pos = from; pos != to && !hazard.hazard; ++pos) {
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700577 hazard = detector.DetectAsync(pos, start_tag_);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600578 }
John Zulauf16adfc92020-04-08 10:28:33 -0600579
John Zulauf3d84f1b2020-03-09 13:33:25 -0600580 return hazard;
581}
582
John Zulaufb02c1eb2020-10-06 16:33:36 -0600583struct ApplySubpassTransitionBarriersAction {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700584 explicit ApplySubpassTransitionBarriersAction(const std::vector<SyncBarrier> &barriers_) : barriers(barriers_) {}
John Zulaufb02c1eb2020-10-06 16:33:36 -0600585 void operator()(ResourceAccessState *access) const {
586 assert(access);
587 access->ApplyBarriers(barriers, true);
588 }
589 const std::vector<SyncBarrier> &barriers;
590};
591
592struct ApplyTrackbackBarriersAction {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700593 explicit ApplyTrackbackBarriersAction(const std::vector<SyncBarrier> &barriers_) : barriers(barriers_) {}
John Zulaufb02c1eb2020-10-06 16:33:36 -0600594 void operator()(ResourceAccessState *access) const {
595 assert(access);
596 assert(!access->HasPendingState());
597 access->ApplyBarriers(barriers, false);
598 access->ApplyPendingBarriers(kCurrentCommandTag);
599 }
600 const std::vector<SyncBarrier> &barriers;
601};
602
603// Splits a single map entry into piece matching the entries in [first, last) the total range over [first, last) must be
604// contained with entry. Entry must be an iterator pointing to dest, first and last must be iterators pointing to a
605// *different* map from dest.
606// Returns the position past the last resolved range -- the entry covering the remainder of entry->first not included in the
607// range [first, last)
608template <typename BarrierAction>
John Zulauf355e49b2020-04-24 15:11:15 -0600609static void ResolveMapToEntry(ResourceAccessRangeMap *dest, ResourceAccessRangeMap::iterator entry,
610 ResourceAccessRangeMap::const_iterator first, ResourceAccessRangeMap::const_iterator last,
John Zulaufb02c1eb2020-10-06 16:33:36 -0600611 BarrierAction &barrier_action) {
John Zulauf355e49b2020-04-24 15:11:15 -0600612 auto at = entry;
613 for (auto pos = first; pos != last; ++pos) {
614 // Every member of the input iterator range must fit within the remaining portion of entry
615 assert(at->first.includes(pos->first));
616 assert(at != dest->end());
617 // Trim up at to the same size as the entry to resolve
618 at = sparse_container::split(at, *dest, pos->first);
John Zulaufb02c1eb2020-10-06 16:33:36 -0600619 auto access = pos->second; // intentional copy
620 barrier_action(&access);
John Zulauf355e49b2020-04-24 15:11:15 -0600621 at->second.Resolve(access);
622 ++at; // Go to the remaining unused section of entry
623 }
624}
625
John Zulaufa0a98292020-09-18 09:30:10 -0600626static SyncBarrier MergeBarriers(const std::vector<SyncBarrier> &barriers) {
627 SyncBarrier merged = {};
628 for (const auto &barrier : barriers) {
629 merged.Merge(barrier);
630 }
631 return merged;
632}
633
John Zulaufb02c1eb2020-10-06 16:33:36 -0600634template <typename BarrierAction>
John Zulauf43cc7462020-12-03 12:33:12 -0700635void AccessContext::ResolveAccessRange(AccessAddressType type, const ResourceAccessRange &range, BarrierAction &barrier_action,
John Zulauf355e49b2020-04-24 15:11:15 -0600636 ResourceAccessRangeMap *resolve_map, const ResourceAccessState *infill_state,
637 bool recur_to_infill) const {
John Zulauf3bcab5e2020-06-19 14:42:32 -0600638 if (!range.non_empty()) return;
639
John Zulauf355e49b2020-04-24 15:11:15 -0600640 ResourceRangeMergeIterator current(*resolve_map, GetAccessStateMap(type), range.begin);
641 while (current->range.non_empty() && range.includes(current->range.begin)) {
John Zulauf3bcab5e2020-06-19 14:42:32 -0600642 const auto current_range = current->range & range;
John Zulauf16adfc92020-04-08 10:28:33 -0600643 if (current->pos_B->valid) {
644 const auto &src_pos = current->pos_B->lower_bound;
John Zulaufb02c1eb2020-10-06 16:33:36 -0600645 auto access = src_pos->second; // intentional copy
646 barrier_action(&access);
647
John Zulauf16adfc92020-04-08 10:28:33 -0600648 if (current->pos_A->valid) {
John Zulauf3bcab5e2020-06-19 14:42:32 -0600649 const auto trimmed = sparse_container::split(current->pos_A->lower_bound, *resolve_map, current_range);
650 trimmed->second.Resolve(access);
651 current.invalidate_A(trimmed);
John Zulauf5f13a792020-03-10 07:31:21 -0600652 } else {
John Zulauf3bcab5e2020-06-19 14:42:32 -0600653 auto inserted = resolve_map->insert(current->pos_A->lower_bound, std::make_pair(current_range, access));
John Zulauf355e49b2020-04-24 15:11:15 -0600654 current.invalidate_A(inserted); // Update the parallel iterator to point at the insert segment
John Zulauf5f13a792020-03-10 07:31:21 -0600655 }
John Zulauf16adfc92020-04-08 10:28:33 -0600656 } else {
657 // we have to descend to fill this gap
658 if (recur_to_infill) {
John Zulauf355e49b2020-04-24 15:11:15 -0600659 if (current->pos_A->valid) {
660 // Dest is valid, so we need to accumulate along the DAG and then resolve... in an N-to-1 resolve operation
661 ResourceAccessRangeMap gap_map;
John Zulauf3bcab5e2020-06-19 14:42:32 -0600662 ResolvePreviousAccess(type, current_range, &gap_map, infill_state);
John Zulaufb02c1eb2020-10-06 16:33:36 -0600663 ResolveMapToEntry(resolve_map, current->pos_A->lower_bound, gap_map.begin(), gap_map.end(), barrier_action);
John Zulauf355e49b2020-04-24 15:11:15 -0600664 } else {
John Zulauf3bcab5e2020-06-19 14:42:32 -0600665 // There isn't anything in dest in current)range, so we can accumulate directly into it.
666 ResolvePreviousAccess(type, current_range, resolve_map, infill_state);
John Zulaufb02c1eb2020-10-06 16:33:36 -0600667 // Need to apply the barrier to the accesses we accumulated, noting that we haven't updated current
668 for (auto pos = resolve_map->lower_bound(current_range); pos != current->pos_A->lower_bound; ++pos) {
669 barrier_action(&pos->second);
John Zulauf355e49b2020-04-24 15:11:15 -0600670 }
671 }
672 // Given that there could be gaps we need to seek carefully to not repeatedly search the same gaps in the next
673 // iterator of the outer while.
674
675 // Set the parallel iterator to the end of this range s.t. ++ will move us to the next range whether or
676 // not the end of the range is a gap. For the seek to work, first we need to warn the parallel iterator
677 // we stepped on the dest map
locke-lunarg88dbb542020-06-23 22:05:42 -0600678 const auto seek_to = current_range.end - 1; // The subtraction is safe as range can't be empty (loop condition)
679 current.invalidate_A(); // Changes current->range
John Zulauf355e49b2020-04-24 15:11:15 -0600680 current.seek(seek_to);
681 } else if (!current->pos_A->valid && infill_state) {
682 // If we didn't find anything in the current range, and we aren't reccuring... we infill if required
683 auto inserted = resolve_map->insert(current->pos_A->lower_bound, std::make_pair(current->range, *infill_state));
684 current.invalidate_A(inserted); // Update the parallel iterator to point at the correct segment after insert
John Zulauf16adfc92020-04-08 10:28:33 -0600685 }
John Zulauf5f13a792020-03-10 07:31:21 -0600686 }
John Zulauf16adfc92020-04-08 10:28:33 -0600687 ++current;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600688 }
John Zulauf1a224292020-06-30 14:52:13 -0600689
690 // Infill if range goes passed both the current and resolve map prior contents
691 if (recur_to_infill && (current->range.end < range.end)) {
692 ResourceAccessRange trailing_fill_range = {current->range.end, range.end};
693 ResourceAccessRangeMap gap_map;
694 const auto the_end = resolve_map->end();
695 ResolvePreviousAccess(type, trailing_fill_range, &gap_map, infill_state);
696 for (auto &access : gap_map) {
John Zulaufb02c1eb2020-10-06 16:33:36 -0600697 barrier_action(&access.second);
John Zulauf1a224292020-06-30 14:52:13 -0600698 resolve_map->insert(the_end, access);
699 }
700 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600701}
702
John Zulauf43cc7462020-12-03 12:33:12 -0700703void AccessContext::ResolvePreviousAccess(AccessAddressType type, const ResourceAccessRange &range,
704 ResourceAccessRangeMap *descent_map, const ResourceAccessState *infill_state) const {
John Zulaufe5da6e52020-03-18 15:32:18 -0600705 if ((prev_.size() == 0) && (src_external_.context == nullptr)) {
John Zulauf5f13a792020-03-10 07:31:21 -0600706 if (range.non_empty() && infill_state) {
707 descent_map->insert(std::make_pair(range, *infill_state));
708 }
709 } else {
710 // Look for something to fill the gap further along.
711 for (const auto &prev_dep : prev_) {
John Zulaufb02c1eb2020-10-06 16:33:36 -0600712 const ApplyTrackbackBarriersAction barrier_action(prev_dep.barriers);
713 prev_dep.context->ResolveAccessRange(type, range, barrier_action, descent_map, infill_state);
John Zulauf5f13a792020-03-10 07:31:21 -0600714 }
715
John Zulaufe5da6e52020-03-18 15:32:18 -0600716 if (src_external_.context) {
John Zulaufb02c1eb2020-10-06 16:33:36 -0600717 const ApplyTrackbackBarriersAction barrier_action(src_external_.barriers);
718 src_external_.context->ResolveAccessRange(type, range, barrier_action, descent_map, infill_state);
John Zulauf5f13a792020-03-10 07:31:21 -0600719 }
720 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600721}
722
John Zulauf43cc7462020-12-03 12:33:12 -0700723AccessAddressType AccessContext::ImageAddressType(const IMAGE_STATE &image) {
724 return (image.fragment_encoder->IsLinearImage()) ? AccessAddressType::kLinear : AccessAddressType::kIdealized;
John Zulauf16adfc92020-04-08 10:28:33 -0600725}
726
John Zulauf1507ee42020-05-18 11:33:09 -0600727static SyncStageAccessIndex ColorLoadUsage(VkAttachmentLoadOp load_op) {
728 const auto stage_access = (load_op == VK_ATTACHMENT_LOAD_OP_LOAD) ? SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ
729 : SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE;
730 return stage_access;
731}
732static SyncStageAccessIndex DepthStencilLoadUsage(VkAttachmentLoadOp load_op) {
733 const auto stage_access = (load_op == VK_ATTACHMENT_LOAD_OP_LOAD) ? SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_READ
734 : SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE;
735 return stage_access;
736}
737
John Zulauf7635de32020-05-29 17:14:15 -0600738// Caller must manage returned pointer
739static AccessContext *CreateStoreResolveProxyContext(const AccessContext &context, const RENDER_PASS_STATE &rp_state,
740 uint32_t subpass, const VkRect2D &render_area,
741 std::vector<const IMAGE_VIEW_STATE *> attachment_views) {
742 auto *proxy = new AccessContext(context);
743 proxy->UpdateAttachmentResolveAccess(rp_state, render_area, attachment_views, subpass, kCurrentCommandTag);
John Zulaufaff20662020-06-01 14:07:58 -0600744 proxy->UpdateAttachmentStoreAccess(rp_state, render_area, attachment_views, subpass, kCurrentCommandTag);
John Zulauf7635de32020-05-29 17:14:15 -0600745 return proxy;
746}
747
John Zulaufb02c1eb2020-10-06 16:33:36 -0600748template <typename BarrierAction>
John Zulauf52446eb2020-10-22 16:40:08 -0600749class ResolveAccessRangeFunctor {
John Zulaufb02c1eb2020-10-06 16:33:36 -0600750 public:
John Zulauf43cc7462020-12-03 12:33:12 -0700751 ResolveAccessRangeFunctor(const AccessContext &context, AccessAddressType address_type, ResourceAccessRangeMap *descent_map,
752 const ResourceAccessState *infill_state, BarrierAction &barrier_action)
John Zulauf52446eb2020-10-22 16:40:08 -0600753 : context_(context),
754 address_type_(address_type),
755 descent_map_(descent_map),
756 infill_state_(infill_state),
757 barrier_action_(barrier_action) {}
John Zulaufb02c1eb2020-10-06 16:33:36 -0600758 ResolveAccessRangeFunctor() = delete;
759 void operator()(const ResourceAccessRange &range) const {
760 context_.ResolveAccessRange(address_type_, range, barrier_action_, descent_map_, infill_state_);
761 }
762
763 private:
John Zulauf52446eb2020-10-22 16:40:08 -0600764 const AccessContext &context_;
John Zulauf43cc7462020-12-03 12:33:12 -0700765 const AccessAddressType address_type_;
John Zulauf52446eb2020-10-22 16:40:08 -0600766 ResourceAccessRangeMap *const descent_map_;
767 const ResourceAccessState *infill_state_;
John Zulaufb02c1eb2020-10-06 16:33:36 -0600768 BarrierAction &barrier_action_;
769};
770
John Zulaufb02c1eb2020-10-06 16:33:36 -0600771template <typename BarrierAction>
772void AccessContext::ResolveAccessRange(const IMAGE_STATE &image_state, const VkImageSubresourceRange &subresource_range,
John Zulauf43cc7462020-12-03 12:33:12 -0700773 BarrierAction &barrier_action, AccessAddressType address_type,
774 ResourceAccessRangeMap *descent_map, const ResourceAccessState *infill_state) const {
John Zulaufb02c1eb2020-10-06 16:33:36 -0600775 const ResolveAccessRangeFunctor<BarrierAction> action(*this, address_type, descent_map, infill_state, barrier_action);
776 ApplyOverImageRange(image_state, subresource_range, action);
John Zulauf62f10592020-04-03 12:20:02 -0600777}
778
John Zulauf7635de32020-05-29 17:14:15 -0600779// Layout transitions are handled as if the were occuring in the beginning of the next subpass
John Zulauf1507ee42020-05-18 11:33:09 -0600780bool AccessContext::ValidateLayoutTransitions(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state,
John Zulauf7635de32020-05-29 17:14:15 -0600781 const VkRect2D &render_area, uint32_t subpass,
782 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
783 const char *func_name) const {
John Zulauf355e49b2020-04-24 15:11:15 -0600784 bool skip = false;
John Zulauf7635de32020-05-29 17:14:15 -0600785 // As validation methods are const and precede the record/update phase, for any tranistions from the immediately
786 // previous subpass, we have to validate them against a copy of the AccessContext, with resolve operations applied, as
787 // those affects have not been recorded yet.
788 //
789 // Note: we could be more efficient by tracking whether or not we actually *have* any changes (e.g. attachment resolve)
790 // to apply and only copy then, if this proves a hot spot.
791 std::unique_ptr<AccessContext> proxy_for_prev;
792 TrackBack proxy_track_back;
793
John Zulauf355e49b2020-04-24 15:11:15 -0600794 const auto &transitions = rp_state.subpass_transitions[subpass];
795 for (const auto &transition : transitions) {
John Zulauf7635de32020-05-29 17:14:15 -0600796 const bool prev_needs_proxy = transition.prev_pass != VK_SUBPASS_EXTERNAL && (transition.prev_pass + 1 == subpass);
797
798 const auto *track_back = GetTrackBackFromSubpass(transition.prev_pass);
799 if (prev_needs_proxy) {
800 if (!proxy_for_prev) {
801 proxy_for_prev.reset(CreateStoreResolveProxyContext(*track_back->context, rp_state, transition.prev_pass,
802 render_area, attachment_views));
803 proxy_track_back = *track_back;
804 proxy_track_back.context = proxy_for_prev.get();
805 }
806 track_back = &proxy_track_back;
807 }
808 auto hazard = DetectSubpassTransitionHazard(*track_back, attachment_views[transition.attachment]);
John Zulauf355e49b2020-04-24 15:11:15 -0600809 if (hazard.hazard) {
John Zulauf389c34b2020-07-28 11:19:35 -0600810 skip |= sync_state.LogError(rp_state.renderPass, string_SyncHazardVUID(hazard.hazard),
811 "%s: Hazard %s in subpass %" PRIu32 " for attachment %" PRIu32
812 " image layout transition (old_layout: %s, new_layout: %s). Access info %s.",
813 func_name, string_SyncHazard(hazard.hazard), subpass, transition.attachment,
814 string_VkImageLayout(transition.old_layout), string_VkImageLayout(transition.new_layout),
815 string_UsageTag(hazard).c_str());
John Zulauf355e49b2020-04-24 15:11:15 -0600816 }
817 }
818 return skip;
819}
820
John Zulauf1507ee42020-05-18 11:33:09 -0600821bool AccessContext::ValidateLoadOperation(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state,
John Zulauf7635de32020-05-29 17:14:15 -0600822 const VkRect2D &render_area, uint32_t subpass,
823 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
824 const char *func_name) const {
John Zulauf1507ee42020-05-18 11:33:09 -0600825 bool skip = false;
826 const auto *attachment_ci = rp_state.createInfo.pAttachments;
827 VkExtent3D extent = CastTo3D(render_area.extent);
828 VkOffset3D offset = CastTo3D(render_area.offset);
John Zulaufa0a98292020-09-18 09:30:10 -0600829
John Zulauf1507ee42020-05-18 11:33:09 -0600830 for (uint32_t i = 0; i < rp_state.createInfo.attachmentCount; i++) {
831 if (subpass == rp_state.attachment_first_subpass[i]) {
832 if (attachment_views[i] == nullptr) continue;
833 const IMAGE_VIEW_STATE &view = *attachment_views[i];
834 const IMAGE_STATE *image = view.image_state.get();
835 if (image == nullptr) continue;
836 const auto &ci = attachment_ci[i];
John Zulauf1507ee42020-05-18 11:33:09 -0600837
838 // Need check in the following way
839 // 1) if the usage bit isn't in the dest_access_scope, and there is layout traniition for initial use, report hazard
840 // vs. transition
841 // 2) if there isn't a layout transition, we need to look at the external context with a "detect hazard" operation
842 // for each aspect loaded.
843
844 const bool has_depth = FormatHasDepth(ci.format);
John Zulaufb027cdb2020-05-21 14:25:22 -0600845 const bool has_stencil = FormatHasStencil(ci.format);
John Zulauf1507ee42020-05-18 11:33:09 -0600846 const bool is_color = !(has_depth || has_stencil);
847
848 const SyncStageAccessIndex load_index = has_depth ? DepthStencilLoadUsage(ci.loadOp) : ColorLoadUsage(ci.loadOp);
John Zulauf1507ee42020-05-18 11:33:09 -0600849 const SyncStageAccessIndex stencil_load_index = has_stencil ? DepthStencilLoadUsage(ci.stencilLoadOp) : load_index;
John Zulauf1507ee42020-05-18 11:33:09 -0600850
John Zulaufaff20662020-06-01 14:07:58 -0600851 HazardResult hazard;
John Zulauf1507ee42020-05-18 11:33:09 -0600852 const char *aspect = nullptr;
John Zulauf1507ee42020-05-18 11:33:09 -0600853
John Zulaufb02c1eb2020-10-06 16:33:36 -0600854 auto hazard_range = view.normalized_subresource_range;
855 bool checked_stencil = false;
856 if (is_color) {
John Zulauf859089b2020-10-29 17:37:03 -0600857 hazard = DetectHazard(*image, load_index, view.normalized_subresource_range, kColorAttachmentRasterOrder, offset,
858 extent);
John Zulaufb02c1eb2020-10-06 16:33:36 -0600859 aspect = "color";
860 } else {
861 if (has_depth) {
862 hazard_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
John Zulauf859089b2020-10-29 17:37:03 -0600863 hazard = DetectHazard(*image, load_index, hazard_range, kDepthStencilAttachmentRasterOrder, offset, extent);
John Zulaufb02c1eb2020-10-06 16:33:36 -0600864 aspect = "depth";
865 }
866 if (!hazard.hazard && has_stencil) {
867 hazard_range.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
John Zulauf859089b2020-10-29 17:37:03 -0600868 hazard =
869 DetectHazard(*image, stencil_load_index, hazard_range, kDepthStencilAttachmentRasterOrder, offset, extent);
John Zulaufb02c1eb2020-10-06 16:33:36 -0600870 aspect = "stencil";
871 checked_stencil = true;
872 }
873 }
874
875 if (hazard.hazard) {
876 auto load_op_string = string_VkAttachmentLoadOp(checked_stencil ? ci.stencilLoadOp : ci.loadOp);
877 if (hazard.tag == kCurrentCommandTag) {
878 // Hazard vs. ILT
879 skip |= sync_state.LogError(rp_state.renderPass, string_SyncHazardVUID(hazard.hazard),
880 "%s: Hazard %s vs. layout transition in subpass %" PRIu32 " for attachment %" PRIu32
881 " aspect %s during load with loadOp %s.",
882 func_name, string_SyncHazard(hazard.hazard), subpass, i, aspect, load_op_string);
883 } else {
John Zulauf1507ee42020-05-18 11:33:09 -0600884 skip |= sync_state.LogError(rp_state.renderPass, string_SyncHazardVUID(hazard.hazard),
885 "%s: Hazard %s in subpass %" PRIu32 " for attachment %" PRIu32
John Zulauf59e25072020-07-17 10:55:21 -0600886 " aspect %s during load with loadOp %s. Access info %s.",
locke-lunarg88dbb542020-06-23 22:05:42 -0600887 func_name, string_SyncHazard(hazard.hazard), subpass, i, aspect, load_op_string,
John Zulauf37ceaed2020-07-03 16:18:15 -0600888 string_UsageTag(hazard).c_str());
John Zulauf1507ee42020-05-18 11:33:09 -0600889 }
890 }
891 }
892 }
893 return skip;
894}
895
John Zulaufaff20662020-06-01 14:07:58 -0600896// Store operation validation can ignore resolve (before it) and layout tranistions after it. The first is ignored
897// because of the ordering guarantees w.r.t. sample access and that the resolve validation hasn't altered the state, because
898// store is part of the same Next/End operation.
899// The latter is handled in layout transistion validation directly
900bool AccessContext::ValidateStoreOperation(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state,
901 const VkRect2D &render_area, uint32_t subpass,
902 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
903 const char *func_name) const {
904 bool skip = false;
905 const auto *attachment_ci = rp_state.createInfo.pAttachments;
906 VkExtent3D extent = CastTo3D(render_area.extent);
907 VkOffset3D offset = CastTo3D(render_area.offset);
908
909 for (uint32_t i = 0; i < rp_state.createInfo.attachmentCount; i++) {
910 if (subpass == rp_state.attachment_last_subpass[i]) {
911 if (attachment_views[i] == nullptr) continue;
912 const IMAGE_VIEW_STATE &view = *attachment_views[i];
913 const IMAGE_STATE *image = view.image_state.get();
914 if (image == nullptr) continue;
915 const auto &ci = attachment_ci[i];
916
917 // The spec states that "don't care" is an operation with VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
918 // so we assume that an implementation is *free* to write in that case, meaning that for correctness
919 // sake, we treat DONT_CARE as writing.
920 const bool has_depth = FormatHasDepth(ci.format);
921 const bool has_stencil = FormatHasStencil(ci.format);
922 const bool is_color = !(has_depth || has_stencil);
923 const bool store_op_stores = ci.storeOp != VK_ATTACHMENT_STORE_OP_NONE_QCOM;
924 if (!has_stencil && !store_op_stores) continue;
925
926 HazardResult hazard;
927 const char *aspect = nullptr;
928 bool checked_stencil = false;
929 if (is_color) {
930 hazard = DetectHazard(*image, SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE,
931 view.normalized_subresource_range, kAttachmentRasterOrder, offset, extent);
932 aspect = "color";
933 } else {
934 const bool stencil_op_stores = ci.stencilStoreOp != VK_ATTACHMENT_STORE_OP_NONE_QCOM;
935 auto hazard_range = view.normalized_subresource_range;
936 if (has_depth && store_op_stores) {
937 hazard_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
938 hazard = DetectHazard(*image, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, hazard_range,
939 kAttachmentRasterOrder, offset, extent);
940 aspect = "depth";
941 }
942 if (!hazard.hazard && has_stencil && stencil_op_stores) {
943 hazard_range.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
944 hazard = DetectHazard(*image, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, hazard_range,
945 kAttachmentRasterOrder, offset, extent);
946 aspect = "stencil";
947 checked_stencil = true;
948 }
949 }
950
951 if (hazard.hazard) {
952 const char *const op_type_string = checked_stencil ? "stencilStoreOp" : "storeOp";
953 const char *const store_op_string = string_VkAttachmentStoreOp(checked_stencil ? ci.stencilStoreOp : ci.storeOp);
John Zulauf1dae9192020-06-16 15:46:44 -0600954 skip |= sync_state.LogError(rp_state.renderPass, string_SyncHazardVUID(hazard.hazard),
955 "%s: Hazard %s in subpass %" PRIu32 " for attachment %" PRIu32
John Zulauf59e25072020-07-17 10:55:21 -0600956 " %s aspect during store with %s %s. Access info %s",
John Zulauf1dae9192020-06-16 15:46:44 -0600957 func_name, string_SyncHazard(hazard.hazard), subpass, i, aspect, op_type_string,
John Zulauf37ceaed2020-07-03 16:18:15 -0600958 store_op_string, string_UsageTag(hazard).c_str());
John Zulaufaff20662020-06-01 14:07:58 -0600959 }
960 }
961 }
962 return skip;
963}
964
John Zulaufb027cdb2020-05-21 14:25:22 -0600965bool AccessContext::ValidateResolveOperations(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state,
966 const VkRect2D &render_area,
967 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const char *func_name,
968 uint32_t subpass) const {
John Zulauf7635de32020-05-29 17:14:15 -0600969 ValidateResolveAction validate_action(rp_state.renderPass, subpass, *this, sync_state, func_name);
970 ResolveOperation(validate_action, rp_state, render_area, attachment_views, subpass);
971 return validate_action.GetSkip();
John Zulaufb027cdb2020-05-21 14:25:22 -0600972}
973
John Zulauf3d84f1b2020-03-09 13:33:25 -0600974class HazardDetector {
975 SyncStageAccessIndex usage_index_;
976
977 public:
John Zulauf5f13a792020-03-10 07:31:21 -0600978 HazardResult Detect(const ResourceAccessRangeMap::const_iterator &pos) const { return pos->second.DetectHazard(usage_index_); }
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700979 HazardResult DetectAsync(const ResourceAccessRangeMap::const_iterator &pos, const ResourceUsageTag &start_tag) const {
980 return pos->second.DetectAsyncHazard(usage_index_, start_tag);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600981 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700982 explicit HazardDetector(SyncStageAccessIndex usage) : usage_index_(usage) {}
John Zulauf3d84f1b2020-03-09 13:33:25 -0600983};
984
John Zulauf69133422020-05-20 14:55:53 -0600985class HazardDetectorWithOrdering {
986 const SyncStageAccessIndex usage_index_;
987 const SyncOrderingBarrier &ordering_;
988
989 public:
990 HazardResult Detect(const ResourceAccessRangeMap::const_iterator &pos) const {
991 return pos->second.DetectHazard(usage_index_, ordering_);
992 }
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -0700993 HazardResult DetectAsync(const ResourceAccessRangeMap::const_iterator &pos, const ResourceUsageTag &start_tag) const {
994 return pos->second.DetectAsyncHazard(usage_index_, start_tag);
John Zulauf69133422020-05-20 14:55:53 -0600995 }
996 HazardDetectorWithOrdering(SyncStageAccessIndex usage, const SyncOrderingBarrier &ordering)
997 : usage_index_(usage), ordering_(ordering) {}
998};
999
John Zulauf43cc7462020-12-03 12:33:12 -07001000HazardResult AccessContext::DetectHazard(AccessAddressType type, SyncStageAccessIndex usage_index,
John Zulauf540266b2020-04-06 18:54:53 -06001001 const ResourceAccessRange &range) const {
John Zulauf3d84f1b2020-03-09 13:33:25 -06001002 HazardDetector detector(usage_index);
John Zulauf355e49b2020-04-24 15:11:15 -06001003 return DetectHazard(type, detector, range, DetectOptions::kDetectAll);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001004}
1005
John Zulauf16adfc92020-04-08 10:28:33 -06001006HazardResult AccessContext::DetectHazard(const BUFFER_STATE &buffer, SyncStageAccessIndex usage_index,
John Zulauf355e49b2020-04-24 15:11:15 -06001007 const ResourceAccessRange &range) const {
John Zulauf16adfc92020-04-08 10:28:33 -06001008 if (!SimpleBinding(buffer)) return HazardResult();
John Zulauf43cc7462020-12-03 12:33:12 -07001009 return DetectHazard(AccessAddressType::kLinear, usage_index, range + ResourceBaseAddress(buffer));
John Zulaufe5da6e52020-03-18 15:32:18 -06001010}
1011
John Zulauf69133422020-05-20 14:55:53 -06001012template <typename Detector>
1013HazardResult AccessContext::DetectHazard(Detector &detector, const IMAGE_STATE &image,
1014 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset,
1015 const VkExtent3D &extent, DetectOptions options) const {
1016 if (!SimpleBinding(image)) return HazardResult();
1017 subresource_adapter::ImageRangeGenerator range_gen(*image.fragment_encoder.get(), subresource_range, offset, extent);
1018 const auto address_type = ImageAddressType(image);
1019 const auto base_address = ResourceBaseAddress(image);
1020 for (; range_gen->non_empty(); ++range_gen) {
1021 HazardResult hazard = DetectHazard(address_type, detector, (*range_gen + base_address), options);
1022 if (hazard.hazard) return hazard;
1023 }
1024 return HazardResult();
1025}
1026
John Zulauf540266b2020-04-06 18:54:53 -06001027HazardResult AccessContext::DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
1028 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset,
1029 const VkExtent3D &extent) const {
John Zulauf5c5e88d2019-12-26 11:22:02 -07001030 VkImageSubresourceRange subresource_range = {subresource.aspectMask, subresource.mipLevel, 1, subresource.baseArrayLayer,
1031 subresource.layerCount};
John Zulauf1507ee42020-05-18 11:33:09 -06001032 return DetectHazard(image, current_usage, subresource_range, offset, extent);
1033}
1034
1035HazardResult AccessContext::DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
1036 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset,
1037 const VkExtent3D &extent) const {
John Zulauf69133422020-05-20 14:55:53 -06001038 HazardDetector detector(current_usage);
1039 return DetectHazard(detector, image, subresource_range, offset, extent, DetectOptions::kDetectAll);
1040}
1041
1042HazardResult AccessContext::DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
1043 const VkImageSubresourceRange &subresource_range, const SyncOrderingBarrier &ordering,
1044 const VkOffset3D &offset, const VkExtent3D &extent) const {
1045 HazardDetectorWithOrdering detector(current_usage, ordering);
1046 return DetectHazard(detector, image, subresource_range, offset, extent, DetectOptions::kDetectAll);
John Zulauf9cb530d2019-09-30 14:14:10 -06001047}
1048
John Zulaufb027cdb2020-05-21 14:25:22 -06001049// Some common code for looking at attachments, if there's anything wrong, we return no hazard, core validation
1050// should have reported the issue regarding an invalid attachment entry
1051HazardResult AccessContext::DetectHazard(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage,
1052 const SyncOrderingBarrier &ordering, const VkOffset3D &offset, const VkExtent3D &extent,
1053 VkImageAspectFlags aspect_mask) const {
1054 if (view != nullptr) {
1055 const IMAGE_STATE *image = view->image_state.get();
1056 if (image != nullptr) {
1057 auto *detect_range = &view->normalized_subresource_range;
1058 VkImageSubresourceRange masked_range;
1059 if (aspect_mask) { // If present and non-zero, restrict the normalized range to aspects present in aspect_mask
1060 masked_range = view->normalized_subresource_range;
1061 masked_range.aspectMask = aspect_mask & masked_range.aspectMask;
1062 detect_range = &masked_range;
1063 }
1064
1065 // NOTE: The range encoding code is not robust to invalid ranges, so we protect it from our change
1066 if (detect_range->aspectMask) {
1067 return DetectHazard(*image, current_usage, *detect_range, ordering, offset, extent);
1068 }
1069 }
1070 }
1071 return HazardResult();
1072}
John Zulauf43cc7462020-12-03 12:33:12 -07001073
John Zulauf3d84f1b2020-03-09 13:33:25 -06001074class BarrierHazardDetector {
1075 public:
1076 BarrierHazardDetector(SyncStageAccessIndex usage_index, VkPipelineStageFlags src_exec_scope,
1077 SyncStageAccessFlags src_access_scope)
1078 : usage_index_(usage_index), src_exec_scope_(src_exec_scope), src_access_scope_(src_access_scope) {}
1079
John Zulauf5f13a792020-03-10 07:31:21 -06001080 HazardResult Detect(const ResourceAccessRangeMap::const_iterator &pos) const {
1081 return pos->second.DetectBarrierHazard(usage_index_, src_exec_scope_, src_access_scope_);
John Zulauf0cb5be22020-01-23 12:18:22 -07001082 }
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -07001083 HazardResult DetectAsync(const ResourceAccessRangeMap::const_iterator &pos, const ResourceUsageTag &start_tag) const {
John Zulauf3d84f1b2020-03-09 13:33:25 -06001084 // Async barrier hazard detection can use the same path as the usage index is not IsRead, but is IsWrite
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -07001085 return pos->second.DetectAsyncHazard(usage_index_, start_tag);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001086 }
1087
1088 private:
1089 SyncStageAccessIndex usage_index_;
1090 VkPipelineStageFlags src_exec_scope_;
1091 SyncStageAccessFlags src_access_scope_;
1092};
1093
John Zulauf16adfc92020-04-08 10:28:33 -06001094HazardResult AccessContext::DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -07001095 const SyncStageAccessFlags &src_access_scope,
John Zulauf355e49b2020-04-24 15:11:15 -06001096 const VkImageSubresourceRange &subresource_range,
John Zulauf43cc7462020-12-03 12:33:12 -07001097 const DetectOptions options) const {
John Zulauf69133422020-05-20 14:55:53 -06001098 BarrierHazardDetector detector(SyncStageAccessIndex::SYNC_IMAGE_LAYOUT_TRANSITION, src_exec_scope, src_access_scope);
1099 VkOffset3D zero_offset = {0, 0, 0};
1100 return DetectHazard(detector, image, subresource_range, zero_offset, image.createInfo.extent, options);
John Zulauf0cb5be22020-01-23 12:18:22 -07001101}
1102
John Zulauf355e49b2020-04-24 15:11:15 -06001103HazardResult AccessContext::DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -07001104 const SyncStageAccessFlags &src_stage_accesses,
John Zulauf355e49b2020-04-24 15:11:15 -06001105 const VkImageMemoryBarrier &barrier) const {
1106 auto subresource_range = NormalizeSubresourceRange(image.createInfo, barrier.subresourceRange);
1107 const auto src_access_scope = SyncStageAccess::AccessScope(src_stage_accesses, barrier.srcAccessMask);
1108 return DetectImageBarrierHazard(image, src_exec_scope, src_access_scope, subresource_range, kDetectAll);
1109}
1110
John Zulauf9cb530d2019-09-30 14:14:10 -06001111template <typename Flags, typename Map>
1112SyncStageAccessFlags AccessScopeImpl(Flags flag_mask, const Map &map) {
1113 SyncStageAccessFlags scope = 0;
1114 for (const auto &bit_scope : map) {
1115 if (flag_mask < bit_scope.first) break;
1116
1117 if (flag_mask & bit_scope.first) {
1118 scope |= bit_scope.second;
1119 }
1120 }
1121 return scope;
1122}
1123
1124SyncStageAccessFlags SyncStageAccess::AccessScopeByStage(VkPipelineStageFlags stages) {
1125 return AccessScopeImpl(stages, syncStageAccessMaskByStageBit);
1126}
1127
1128SyncStageAccessFlags SyncStageAccess::AccessScopeByAccess(VkAccessFlags accesses) {
1129 return AccessScopeImpl(accesses, syncStageAccessMaskByAccessBit);
1130}
1131
1132// Getting from stage mask and access mask to stage/acess masks is something we need to be good at...
1133SyncStageAccessFlags SyncStageAccess::AccessScope(VkPipelineStageFlags stages, VkAccessFlags accesses) {
John Zulauf5f13a792020-03-10 07:31:21 -06001134 // The access scope is the intersection of all stage/access types possible for the enabled stages and the enables
1135 // accesses (after doing a couple factoring of common terms the union of stage/access intersections is the intersections
1136 // of the union of all stage/access types for all the stages and the same unions for the access mask...
John Zulauf9cb530d2019-09-30 14:14:10 -06001137 return AccessScopeByStage(stages) & AccessScopeByAccess(accesses);
1138}
1139
1140template <typename Action>
John Zulauf5c5e88d2019-12-26 11:22:02 -07001141void UpdateMemoryAccessState(ResourceAccessRangeMap *accesses, const ResourceAccessRange &range, const Action &action) {
John Zulauf7635de32020-05-29 17:14:15 -06001142 // TODO: Optimization for operations that do a pure overwrite (i.e. WRITE usages which rewrite the state, vs READ usages
1143 // that do incrementalupdates
John Zulauf9cb530d2019-09-30 14:14:10 -06001144 auto pos = accesses->lower_bound(range);
1145 if (pos == accesses->end() || !pos->first.intersects(range)) {
1146 // The range is empty, fill it with a default value.
1147 pos = action.Infill(accesses, pos, range);
1148 } else if (range.begin < pos->first.begin) {
1149 // Leading empty space, infill
John Zulauf5c5e88d2019-12-26 11:22:02 -07001150 pos = action.Infill(accesses, pos, ResourceAccessRange(range.begin, pos->first.begin));
John Zulauf9cb530d2019-09-30 14:14:10 -06001151 } else if (pos->first.begin < range.begin) {
1152 // Trim the beginning if needed
1153 pos = accesses->split(pos, range.begin, sparse_container::split_op_keep_both());
1154 ++pos;
1155 }
1156
1157 const auto the_end = accesses->end();
1158 while ((pos != the_end) && pos->first.intersects(range)) {
1159 if (pos->first.end > range.end) {
1160 pos = accesses->split(pos, range.end, sparse_container::split_op_keep_both());
1161 }
1162
1163 pos = action(accesses, pos);
1164 if (pos == the_end) break;
1165
1166 auto next = pos;
1167 ++next;
1168 if ((pos->first.end < range.end) && (next != the_end) && !next->first.is_subsequent_to(pos->first)) {
1169 // Need to infill if next is disjoint
1170 VkDeviceSize limit = (next == the_end) ? range.end : std::min(range.end, next->first.begin);
John Zulauf5c5e88d2019-12-26 11:22:02 -07001171 ResourceAccessRange new_range(pos->first.end, limit);
John Zulauf9cb530d2019-09-30 14:14:10 -06001172 next = action.Infill(accesses, next, new_range);
1173 }
1174 pos = next;
1175 }
1176}
1177
1178struct UpdateMemoryAccessStateFunctor {
John Zulauf5c5e88d2019-12-26 11:22:02 -07001179 using Iterator = ResourceAccessRangeMap::iterator;
1180 Iterator Infill(ResourceAccessRangeMap *accesses, Iterator pos, ResourceAccessRange range) const {
John Zulauf5f13a792020-03-10 07:31:21 -06001181 // this is only called on gaps, and never returns a gap.
1182 ResourceAccessState default_state;
John Zulauf16adfc92020-04-08 10:28:33 -06001183 context.ResolvePreviousAccess(type, range, accesses, &default_state);
John Zulauf5f13a792020-03-10 07:31:21 -06001184 return accesses->lower_bound(range);
John Zulauf9cb530d2019-09-30 14:14:10 -06001185 }
John Zulauf5f13a792020-03-10 07:31:21 -06001186
John Zulauf5c5e88d2019-12-26 11:22:02 -07001187 Iterator operator()(ResourceAccessRangeMap *accesses, Iterator pos) const {
John Zulauf9cb530d2019-09-30 14:14:10 -06001188 auto &access_state = pos->second;
1189 access_state.Update(usage, tag);
1190 return pos;
1191 }
1192
John Zulauf43cc7462020-12-03 12:33:12 -07001193 UpdateMemoryAccessStateFunctor(AccessAddressType type_, const AccessContext &context_, SyncStageAccessIndex usage_,
John Zulauf540266b2020-04-06 18:54:53 -06001194 const ResourceUsageTag &tag_)
John Zulauf16adfc92020-04-08 10:28:33 -06001195 : type(type_), context(context_), usage(usage_), tag(tag_) {}
John Zulauf43cc7462020-12-03 12:33:12 -07001196 const AccessAddressType type;
John Zulauf540266b2020-04-06 18:54:53 -06001197 const AccessContext &context;
John Zulauf16adfc92020-04-08 10:28:33 -06001198 const SyncStageAccessIndex usage;
John Zulauf9cb530d2019-09-30 14:14:10 -06001199 const ResourceUsageTag &tag;
1200};
1201
John Zulauf89311b42020-09-29 16:28:47 -06001202// This functor applies a single barrier, updating the "pending state" in each touched memory range, but does not
1203// resolve the pendinging state. Suitable for processing Image and Buffer barriers from PipelineBarriers or Events
1204class ApplyBarrierFunctor {
1205 public:
John Zulauf5c5e88d2019-12-26 11:22:02 -07001206 using Iterator = ResourceAccessRangeMap::iterator;
1207 inline Iterator Infill(ResourceAccessRangeMap *accesses, Iterator pos, ResourceAccessRange range) const { return pos; }
John Zulauf9cb530d2019-09-30 14:14:10 -06001208
John Zulauf5c5e88d2019-12-26 11:22:02 -07001209 Iterator operator()(ResourceAccessRangeMap *accesses, Iterator pos) const {
John Zulauf9cb530d2019-09-30 14:14:10 -06001210 auto &access_state = pos->second;
John Zulauf89311b42020-09-29 16:28:47 -06001211 access_state.ApplyBarrier(barrier_, layout_transition_);
John Zulauf9cb530d2019-09-30 14:14:10 -06001212 return pos;
1213 }
1214
John Zulauf89311b42020-09-29 16:28:47 -06001215 ApplyBarrierFunctor(const SyncBarrier &barrier, bool layout_transition)
1216 : barrier_(barrier), layout_transition_(layout_transition) {}
John Zulauf9cb530d2019-09-30 14:14:10 -06001217
John Zulauf89311b42020-09-29 16:28:47 -06001218 private:
1219 const SyncBarrier barrier_;
1220 const bool layout_transition_;
John Zulauf9cb530d2019-09-30 14:14:10 -06001221};
1222
John Zulauf89311b42020-09-29 16:28:47 -06001223// This functor applies a collection of barriers, updating the "pending state" in each touched memory range, and optionally
1224// resolves the pending state. Suitable for processing Global memory barriers, or Subpass Barriers when the "final" barrier
1225// of a collection is known/present.
1226class ApplyBarrierOpsFunctor {
1227 public:
John Zulauf5c5e88d2019-12-26 11:22:02 -07001228 using Iterator = ResourceAccessRangeMap::iterator;
1229 inline Iterator Infill(ResourceAccessRangeMap *accesses, Iterator pos, ResourceAccessRange range) const { return pos; }
John Zulauf9cb530d2019-09-30 14:14:10 -06001230
John Zulauf89311b42020-09-29 16:28:47 -06001231 struct BarrierOp {
1232 SyncBarrier barrier;
1233 bool layout_transition;
1234 BarrierOp(const SyncBarrier &barrier_, bool layout_transition_)
1235 : barrier(barrier_), layout_transition(layout_transition_) {}
1236 BarrierOp() = default;
1237 };
John Zulauf5c5e88d2019-12-26 11:22:02 -07001238 Iterator operator()(ResourceAccessRangeMap *accesses, Iterator pos) const {
John Zulauf9cb530d2019-09-30 14:14:10 -06001239 auto &access_state = pos->second;
John Zulauf89311b42020-09-29 16:28:47 -06001240 for (const auto op : barrier_ops_) {
1241 access_state.ApplyBarrier(op.barrier, op.layout_transition);
1242 }
John Zulauf9cb530d2019-09-30 14:14:10 -06001243
John Zulauf89311b42020-09-29 16:28:47 -06001244 if (resolve_) {
1245 // If this is the last (or only) batch, we can do the pending resolve as the last step in this operation to avoid
1246 // another walk
1247 access_state.ApplyPendingBarriers(tag_);
John Zulauf9cb530d2019-09-30 14:14:10 -06001248 }
1249 return pos;
1250 }
1251
John Zulauf89311b42020-09-29 16:28:47 -06001252 // A valid tag is required IFF any of the barriers ops are a layout transition, as transitions are write ops
1253 ApplyBarrierOpsFunctor(bool resolve, size_t size_hint, const ResourceUsageTag &tag)
1254 : resolve_(resolve), barrier_ops_(), tag_(tag) {
1255 if (size_hint) {
1256 barrier_ops_.reserve(size_hint);
1257 }
1258 };
1259
1260 // A valid tag is required IFF layout_transition is true, as transitions are write ops
1261 ApplyBarrierOpsFunctor(bool resolve, const std::vector<SyncBarrier> &barriers, bool layout_transition,
1262 const ResourceUsageTag &tag)
John Zulaufb02c1eb2020-10-06 16:33:36 -06001263 : resolve_(resolve), barrier_ops_(), tag_(tag) {
1264 barrier_ops_.reserve(barriers.size());
John Zulauf89311b42020-09-29 16:28:47 -06001265 for (const auto &barrier : barriers) {
1266 barrier_ops_.emplace_back(barrier, layout_transition);
John Zulauf9cb530d2019-09-30 14:14:10 -06001267 }
1268 }
1269
John Zulauf89311b42020-09-29 16:28:47 -06001270 void PushBack(const SyncBarrier &barrier, bool layout_transition) { barrier_ops_.emplace_back(barrier, layout_transition); }
1271
1272 void PushBack(const std::vector<SyncBarrier> &barriers, bool layout_transition) {
1273 barrier_ops_.reserve(barrier_ops_.size() + barriers.size());
1274 for (const auto &barrier : barriers) {
1275 barrier_ops_.emplace_back(barrier, layout_transition);
1276 }
1277 }
1278
1279 private:
1280 bool resolve_;
1281 std::vector<BarrierOp> barrier_ops_;
1282 const ResourceUsageTag &tag_;
John Zulauf9cb530d2019-09-30 14:14:10 -06001283};
1284
John Zulauf43cc7462020-12-03 12:33:12 -07001285void AccessContext::UpdateAccessState(AccessAddressType type, SyncStageAccessIndex current_usage, const ResourceAccessRange &range,
John Zulauf355e49b2020-04-24 15:11:15 -06001286 const ResourceUsageTag &tag) {
John Zulauf16adfc92020-04-08 10:28:33 -06001287 UpdateMemoryAccessStateFunctor action(type, *this, current_usage, tag);
1288 UpdateMemoryAccessState(&GetAccessStateMap(type), range, action);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001289}
1290
John Zulauf16adfc92020-04-08 10:28:33 -06001291void AccessContext::UpdateAccessState(const BUFFER_STATE &buffer, SyncStageAccessIndex current_usage,
John Zulauf355e49b2020-04-24 15:11:15 -06001292 const ResourceAccessRange &range, const ResourceUsageTag &tag) {
John Zulauf16adfc92020-04-08 10:28:33 -06001293 if (!SimpleBinding(buffer)) return;
1294 const auto base_address = ResourceBaseAddress(buffer);
John Zulauf43cc7462020-12-03 12:33:12 -07001295 UpdateAccessState(AccessAddressType::kLinear, current_usage, range + base_address, tag);
John Zulauf16adfc92020-04-08 10:28:33 -06001296}
John Zulauf355e49b2020-04-24 15:11:15 -06001297
John Zulauf540266b2020-04-06 18:54:53 -06001298void AccessContext::UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
John Zulauf355e49b2020-04-24 15:11:15 -06001299 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset,
John Zulauf540266b2020-04-06 18:54:53 -06001300 const VkExtent3D &extent, const ResourceUsageTag &tag) {
John Zulauf16adfc92020-04-08 10:28:33 -06001301 if (!SimpleBinding(image)) return;
locke-lunargae26eac2020-04-16 15:29:05 -06001302 subresource_adapter::ImageRangeGenerator range_gen(*image.fragment_encoder.get(), subresource_range, offset, extent);
John Zulauf16adfc92020-04-08 10:28:33 -06001303 const auto address_type = ImageAddressType(image);
1304 const auto base_address = ResourceBaseAddress(image);
1305 UpdateMemoryAccessStateFunctor action(address_type, *this, current_usage, tag);
John Zulauf5f13a792020-03-10 07:31:21 -06001306 for (; range_gen->non_empty(); ++range_gen) {
John Zulauf16adfc92020-04-08 10:28:33 -06001307 UpdateMemoryAccessState(&GetAccessStateMap(address_type), (*range_gen + base_address), action);
John Zulauf5f13a792020-03-10 07:31:21 -06001308 }
John Zulauf3d84f1b2020-03-09 13:33:25 -06001309}
John Zulauf7635de32020-05-29 17:14:15 -06001310void AccessContext::UpdateAccessState(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage, const VkOffset3D &offset,
1311 const VkExtent3D &extent, VkImageAspectFlags aspect_mask, const ResourceUsageTag &tag) {
1312 if (view != nullptr) {
1313 const IMAGE_STATE *image = view->image_state.get();
1314 if (image != nullptr) {
1315 auto *update_range = &view->normalized_subresource_range;
1316 VkImageSubresourceRange masked_range;
1317 if (aspect_mask) { // If present and non-zero, restrict the normalized range to aspects present in aspect_mask
1318 masked_range = view->normalized_subresource_range;
1319 masked_range.aspectMask = aspect_mask & masked_range.aspectMask;
1320 update_range = &masked_range;
1321 }
1322 UpdateAccessState(*image, current_usage, *update_range, offset, extent, tag);
1323 }
1324 }
1325}
John Zulauf3d84f1b2020-03-09 13:33:25 -06001326
John Zulauf355e49b2020-04-24 15:11:15 -06001327void AccessContext::UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
1328 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset,
1329 const VkExtent3D &extent, const ResourceUsageTag &tag) {
John Zulauf355e49b2020-04-24 15:11:15 -06001330 VkImageSubresourceRange subresource_range = {subresource.aspectMask, subresource.mipLevel, 1, subresource.baseArrayLayer,
1331 subresource.layerCount};
1332 UpdateAccessState(image, current_usage, subresource_range, offset, extent, tag);
1333}
1334
John Zulauf540266b2020-04-06 18:54:53 -06001335template <typename Action>
John Zulauf89311b42020-09-29 16:28:47 -06001336void AccessContext::UpdateResourceAccess(const BUFFER_STATE &buffer, const ResourceAccessRange &range, const Action action) {
John Zulauf16adfc92020-04-08 10:28:33 -06001337 if (!SimpleBinding(buffer)) return;
1338 const auto base_address = ResourceBaseAddress(buffer);
John Zulauf43cc7462020-12-03 12:33:12 -07001339 UpdateMemoryAccessState(&GetAccessStateMap(AccessAddressType::kLinear), (range + base_address), action);
John Zulauf540266b2020-04-06 18:54:53 -06001340}
1341
1342template <typename Action>
John Zulauf89311b42020-09-29 16:28:47 -06001343void AccessContext::UpdateResourceAccess(const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range,
1344 const Action action) {
John Zulauf16adfc92020-04-08 10:28:33 -06001345 if (!SimpleBinding(image)) return;
1346 const auto address_type = ImageAddressType(image);
1347 auto *accesses = &GetAccessStateMap(address_type);
John Zulauf540266b2020-04-06 18:54:53 -06001348
locke-lunargae26eac2020-04-16 15:29:05 -06001349 subresource_adapter::ImageRangeGenerator range_gen(*image.fragment_encoder.get(), subresource_range, {0, 0, 0},
locke-lunarg5f7d3c62020-04-07 00:10:39 -06001350 image.createInfo.extent);
John Zulauf540266b2020-04-06 18:54:53 -06001351
John Zulauf16adfc92020-04-08 10:28:33 -06001352 const auto base_address = ResourceBaseAddress(image);
John Zulauf540266b2020-04-06 18:54:53 -06001353 for (; range_gen->non_empty(); ++range_gen) {
John Zulauf16adfc92020-04-08 10:28:33 -06001354 UpdateMemoryAccessState(accesses, (*range_gen + base_address), action);
John Zulauf540266b2020-04-06 18:54:53 -06001355 }
1356}
1357
John Zulauf7635de32020-05-29 17:14:15 -06001358void AccessContext::UpdateAttachmentResolveAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
1359 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass,
1360 const ResourceUsageTag &tag) {
1361 UpdateStateResolveAction update(*this, tag);
1362 ResolveOperation(update, rp_state, render_area, attachment_views, subpass);
1363}
1364
John Zulaufaff20662020-06-01 14:07:58 -06001365void AccessContext::UpdateAttachmentStoreAccess(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area,
1366 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, uint32_t subpass,
1367 const ResourceUsageTag &tag) {
1368 const auto *attachment_ci = rp_state.createInfo.pAttachments;
1369 VkExtent3D extent = CastTo3D(render_area.extent);
1370 VkOffset3D offset = CastTo3D(render_area.offset);
1371
1372 for (uint32_t i = 0; i < rp_state.createInfo.attachmentCount; i++) {
1373 if (rp_state.attachment_last_subpass[i] == subpass) {
1374 if (attachment_views[i] == nullptr) continue; // UNUSED
1375 const auto &view = *attachment_views[i];
1376 const IMAGE_STATE *image = view.image_state.get();
1377 if (image == nullptr) continue;
1378
1379 const auto &ci = attachment_ci[i];
1380 const bool has_depth = FormatHasDepth(ci.format);
1381 const bool has_stencil = FormatHasStencil(ci.format);
1382 const bool is_color = !(has_depth || has_stencil);
1383 const bool store_op_stores = ci.storeOp != VK_ATTACHMENT_STORE_OP_NONE_QCOM;
1384
1385 if (is_color && store_op_stores) {
1386 UpdateAccessState(*image, SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, view.normalized_subresource_range,
1387 offset, extent, tag);
1388 } else {
1389 auto update_range = view.normalized_subresource_range;
1390 if (has_depth && store_op_stores) {
1391 update_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
1392 UpdateAccessState(*image, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, update_range, offset, extent,
1393 tag);
1394 }
1395 const bool stencil_op_stores = ci.stencilStoreOp != VK_ATTACHMENT_STORE_OP_NONE_QCOM;
1396 if (has_stencil && stencil_op_stores) {
1397 update_range.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
1398 UpdateAccessState(*image, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, update_range, offset, extent,
1399 tag);
1400 }
1401 }
1402 }
1403 }
1404}
1405
John Zulauf540266b2020-04-06 18:54:53 -06001406template <typename Action>
1407void AccessContext::ApplyGlobalBarriers(const Action &barrier_action) {
1408 // Note: Barriers do *not* cross context boundaries, applying to accessess within.... (at least for renderpass subpasses)
John Zulauf16adfc92020-04-08 10:28:33 -06001409 for (const auto address_type : kAddressTypes) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001410 UpdateMemoryAccessState(&GetAccessStateMap(address_type), kFullRange, barrier_action);
John Zulauf540266b2020-04-06 18:54:53 -06001411 }
1412}
1413
1414void AccessContext::ResolveChildContexts(const std::vector<AccessContext> &contexts) {
John Zulauf540266b2020-04-06 18:54:53 -06001415 for (uint32_t subpass_index = 0; subpass_index < contexts.size(); subpass_index++) {
1416 auto &context = contexts[subpass_index];
John Zulaufb02c1eb2020-10-06 16:33:36 -06001417 ApplyTrackbackBarriersAction barrier_action(context.GetDstExternalTrackBack().barriers);
John Zulauf16adfc92020-04-08 10:28:33 -06001418 for (const auto address_type : kAddressTypes) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001419 context.ResolveAccessRange(address_type, kFullRange, barrier_action, &GetAccessStateMap(address_type), nullptr, false);
John Zulauf540266b2020-04-06 18:54:53 -06001420 }
1421 }
1422}
1423
John Zulauf355e49b2020-04-24 15:11:15 -06001424// Suitable only for *subpass* access contexts
John Zulauf7635de32020-05-29 17:14:15 -06001425HazardResult AccessContext::DetectSubpassTransitionHazard(const TrackBack &track_back, const IMAGE_VIEW_STATE *attach_view) const {
John Zulauf355e49b2020-04-24 15:11:15 -06001426 if (!attach_view) return HazardResult();
1427 const auto image_state = attach_view->image_state.get();
1428 if (!image_state) return HazardResult();
1429
John Zulauf355e49b2020-04-24 15:11:15 -06001430 // We should never ask for a transition from a context we don't have
John Zulauf7635de32020-05-29 17:14:15 -06001431 assert(track_back.context);
John Zulauf355e49b2020-04-24 15:11:15 -06001432
1433 // Do the detection against the specific prior context independent of other contexts. (Synchronous only)
John Zulaufa0a98292020-09-18 09:30:10 -06001434 // Hazard detection for the transition can be against the merged of the barriers (it only uses src_...)
1435 const auto merged_barrier = MergeBarriers(track_back.barriers);
1436 HazardResult hazard =
1437 track_back.context->DetectImageBarrierHazard(*image_state, merged_barrier.src_exec_scope, merged_barrier.src_access_scope,
1438 attach_view->normalized_subresource_range, kDetectPrevious);
John Zulauf355e49b2020-04-24 15:11:15 -06001439 if (!hazard.hazard) {
1440 // The Async hazard check is against the current context's async set.
John Zulaufa0a98292020-09-18 09:30:10 -06001441 hazard = DetectImageBarrierHazard(*image_state, merged_barrier.src_exec_scope, merged_barrier.src_access_scope,
John Zulauf355e49b2020-04-24 15:11:15 -06001442 attach_view->normalized_subresource_range, kDetectAsync);
1443 }
John Zulaufa0a98292020-09-18 09:30:10 -06001444
John Zulauf355e49b2020-04-24 15:11:15 -06001445 return hazard;
1446}
1447
John Zulaufb02c1eb2020-10-06 16:33:36 -06001448void AccessContext::RecordLayoutTransitions(const RENDER_PASS_STATE &rp_state, uint32_t subpass,
1449 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views,
1450 const ResourceUsageTag &tag) {
1451 const auto &transitions = rp_state.subpass_transitions[subpass];
John Zulauf646cc292020-10-23 09:16:45 -06001452 const ResourceAccessState empty_infill;
John Zulaufb02c1eb2020-10-06 16:33:36 -06001453 for (const auto &transition : transitions) {
1454 const auto prev_pass = transition.prev_pass;
1455 const auto attachment_view = attachment_views[transition.attachment];
1456 if (!attachment_view) continue;
1457 const auto *image = attachment_view->image_state.get();
1458 if (!image) continue;
1459 if (!SimpleBinding(*image)) continue;
1460
1461 const auto *trackback = GetTrackBackFromSubpass(prev_pass);
1462 assert(trackback);
1463
1464 // Import the attachments into the current context
1465 const auto *prev_context = trackback->context;
1466 assert(prev_context);
1467 const auto address_type = ImageAddressType(*image);
1468 auto &target_map = GetAccessStateMap(address_type);
1469 ApplySubpassTransitionBarriersAction barrier_action(trackback->barriers);
1470 prev_context->ResolveAccessRange(*image, attachment_view->normalized_subresource_range, barrier_action, address_type,
John Zulauf646cc292020-10-23 09:16:45 -06001471 &target_map, &empty_infill);
John Zulaufb02c1eb2020-10-06 16:33:36 -06001472 }
1473
John Zulauf86356ca2020-10-19 11:46:41 -06001474 // If there were no transitions skip this global map walk
1475 if (transitions.size()) {
1476 ApplyBarrierOpsFunctor apply_pending_action(true /* resolve */, 0, tag);
1477 ApplyGlobalBarriers(apply_pending_action);
1478 }
John Zulaufb02c1eb2020-10-06 16:33:36 -06001479}
1480
John Zulauf355e49b2020-04-24 15:11:15 -06001481// Class CommandBufferAccessContext: Keep track of resource access state information for a specific command buffer
1482bool CommandBufferAccessContext::ValidateBeginRenderPass(const RENDER_PASS_STATE &rp_state,
1483
1484 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08001485 const VkSubpassBeginInfo *pSubpassBeginInfo, const char *func_name) const {
John Zulauf355e49b2020-04-24 15:11:15 -06001486 // Check if any of the layout transitions are hazardous.... but we don't have the renderpass context to work with, so we
1487 bool skip = false;
John Zulauf355e49b2020-04-24 15:11:15 -06001488
John Zulauf86356ca2020-10-19 11:46:41 -06001489 assert(pRenderPassBegin);
1490 if (nullptr == pRenderPassBegin) return skip;
John Zulauf355e49b2020-04-24 15:11:15 -06001491
John Zulauf86356ca2020-10-19 11:46:41 -06001492 const uint32_t subpass = 0;
John Zulauf355e49b2020-04-24 15:11:15 -06001493
John Zulauf86356ca2020-10-19 11:46:41 -06001494 // Construct the state we can use to validate against... (since validation is const and RecordCmdBeginRenderPass
1495 // hasn't happened yet)
1496 const std::vector<AccessContext> empty_context_vector;
1497 AccessContext temp_context(subpass, queue_flags_, rp_state.subpass_dependencies, empty_context_vector,
1498 const_cast<AccessContext *>(&cb_access_context_));
John Zulauf355e49b2020-04-24 15:11:15 -06001499
John Zulauf86356ca2020-10-19 11:46:41 -06001500 // Create a view list
1501 const auto fb_state = sync_state_->Get<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer);
1502 assert(fb_state);
1503 if (nullptr == fb_state) return skip;
1504 // NOTE: Must not use COMMAND_BUFFER_STATE variant of this as RecordCmdBeginRenderPass hasn't run and thus
1505 // the activeRenderPass.* fields haven't been set.
1506 const auto views = sync_state_->GetAttachmentViews(*pRenderPassBegin, *fb_state);
1507
1508 // Validate transitions
1509 skip |= temp_context.ValidateLayoutTransitions(*sync_state_, rp_state, pRenderPassBegin->renderArea, subpass, views, func_name);
1510
1511 // Validate load operations if there were no layout transition hazards
1512 if (!skip) {
1513 temp_context.RecordLayoutTransitions(rp_state, subpass, views, kCurrentCommandTag);
1514 skip |= temp_context.ValidateLoadOperation(*sync_state_, rp_state, pRenderPassBegin->renderArea, subpass, views, func_name);
John Zulauf355e49b2020-04-24 15:11:15 -06001515 }
John Zulauf86356ca2020-10-19 11:46:41 -06001516
John Zulauf355e49b2020-04-24 15:11:15 -06001517 return skip;
1518}
1519
locke-lunarg61870c22020-06-09 14:51:50 -06001520bool CommandBufferAccessContext::ValidateDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint,
1521 const char *func_name) const {
1522 bool skip = false;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001523 const PIPELINE_STATE *pipe = nullptr;
locke-lunarg61870c22020-06-09 14:51:50 -06001524 const std::vector<LAST_BOUND_STATE::PER_SET> *per_sets = nullptr;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001525 GetCurrentPipelineAndDesriptorSetsFromCommandBuffer(*cb_state_.get(), pipelineBindPoint, &pipe, &per_sets);
1526 if (!pipe || !per_sets) {
locke-lunarg61870c22020-06-09 14:51:50 -06001527 return skip;
1528 }
1529
1530 using DescriptorClass = cvdescriptorset::DescriptorClass;
1531 using BufferDescriptor = cvdescriptorset::BufferDescriptor;
1532 using ImageDescriptor = cvdescriptorset::ImageDescriptor;
1533 using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor;
1534 using TexelDescriptor = cvdescriptorset::TexelDescriptor;
1535
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001536 for (const auto &stage_state : pipe->stage_state) {
1537 if (stage_state.stage_flag == VK_SHADER_STAGE_FRAGMENT_BIT && pipe->graphicsPipelineCI.pRasterizationState &&
1538 pipe->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable) {
locke-lunarge9f1cdf2020-06-12 12:28:57 -06001539 continue;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001540 }
locke-lunarg61870c22020-06-09 14:51:50 -06001541 for (const auto &set_binding : stage_state.descriptor_uses) {
1542 cvdescriptorset::DescriptorSet *descriptor_set = (*per_sets)[set_binding.first.first].bound_descriptor_set;
1543 cvdescriptorset::DescriptorSetLayout::ConstBindingIterator binding_it(descriptor_set->GetLayout().get(),
1544 set_binding.first.second);
1545 const auto descriptor_type = binding_it.GetType();
1546 cvdescriptorset::IndexRange index_range = binding_it.GetGlobalIndexRange();
1547 auto array_idx = 0;
1548
1549 if (binding_it.IsVariableDescriptorCount()) {
1550 index_range.end = index_range.start + descriptor_set->GetVariableDescriptorCount();
1551 }
1552 SyncStageAccessIndex sync_index =
1553 GetSyncStageAccessIndexsByDescriptorSet(descriptor_type, set_binding.second, stage_state.stage_flag);
1554
1555 for (uint32_t i = index_range.start; i < index_range.end; ++i, ++array_idx) {
1556 uint32_t index = i - index_range.start;
1557 const auto *descriptor = descriptor_set->GetDescriptorFromGlobalIndex(i);
1558 switch (descriptor->GetClass()) {
1559 case DescriptorClass::ImageSampler:
1560 case DescriptorClass::Image: {
1561 const IMAGE_VIEW_STATE *img_view_state = nullptr;
locke-lunarg7cc0ead2020-07-17 14:29:16 -06001562 VkImageLayout image_layout;
locke-lunarg61870c22020-06-09 14:51:50 -06001563 if (descriptor->GetClass() == DescriptorClass::ImageSampler) {
locke-lunarg7cc0ead2020-07-17 14:29:16 -06001564 const auto image_sampler_descriptor = static_cast<const ImageSamplerDescriptor *>(descriptor);
1565 img_view_state = image_sampler_descriptor->GetImageViewState();
1566 image_layout = image_sampler_descriptor->GetImageLayout();
locke-lunarg61870c22020-06-09 14:51:50 -06001567 } else {
locke-lunarg7cc0ead2020-07-17 14:29:16 -06001568 const auto image_descriptor = static_cast<const ImageDescriptor *>(descriptor);
1569 img_view_state = image_descriptor->GetImageViewState();
1570 image_layout = image_descriptor->GetImageLayout();
locke-lunarg61870c22020-06-09 14:51:50 -06001571 }
1572 if (!img_view_state) continue;
1573 const IMAGE_STATE *img_state = img_view_state->image_state.get();
1574 VkExtent3D extent = {};
1575 VkOffset3D offset = {};
1576 if (sync_index == SYNC_FRAGMENT_SHADER_INPUT_ATTACHMENT_READ) {
1577 extent = CastTo3D(cb_state_->activeRenderPassBeginInfo.renderArea.extent);
1578 offset = CastTo3D(cb_state_->activeRenderPassBeginInfo.renderArea.offset);
1579 } else {
1580 extent = img_state->createInfo.extent;
1581 }
John Zulauf361fb532020-07-22 10:45:39 -06001582 HazardResult hazard;
1583 const auto &subresource_range = img_view_state->normalized_subresource_range;
1584 if (descriptor_type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) {
1585 // Input attachments are subject to raster ordering rules
1586 hazard = current_context_->DetectHazard(*img_state, sync_index, subresource_range,
1587 kAttachmentRasterOrder, offset, extent);
1588 } else {
1589 hazard = current_context_->DetectHazard(*img_state, sync_index, subresource_range, offset, extent);
1590 }
John Zulauf33fc1d52020-07-17 11:01:10 -06001591 if (hazard.hazard && !sync_state_->SupressedBoundDescriptorWAW(hazard)) {
John Zulauf1dae9192020-06-16 15:46:44 -06001592 skip |= sync_state_->LogError(
1593 img_view_state->image_view, string_SyncHazardVUID(hazard.hazard),
locke-lunarg7cc0ead2020-07-17 14:29:16 -06001594 "%s: Hazard %s for %s, in %s, and %s, %s, type: %s, imageLayout: %s, binding #%" PRIu32
1595 ", index %" PRIu32 ". Access info %s.",
John Zulauf1dae9192020-06-16 15:46:44 -06001596 func_name, string_SyncHazard(hazard.hazard),
1597 sync_state_->report_data->FormatHandle(img_view_state->image_view).c_str(),
1598 sync_state_->report_data->FormatHandle(cb_state_->commandBuffer).c_str(),
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001599 sync_state_->report_data->FormatHandle(pipe->pipeline).c_str(),
locke-lunarg7cc0ead2020-07-17 14:29:16 -06001600 sync_state_->report_data->FormatHandle(descriptor_set->GetSet()).c_str(),
1601 string_VkDescriptorType(descriptor_type), string_VkImageLayout(image_layout),
1602 set_binding.first.second, index, string_UsageTag(hazard).c_str());
locke-lunarg61870c22020-06-09 14:51:50 -06001603 }
1604 break;
1605 }
1606 case DescriptorClass::TexelBuffer: {
1607 auto buf_view_state = static_cast<const TexelDescriptor *>(descriptor)->GetBufferViewState();
1608 if (!buf_view_state) continue;
1609 const BUFFER_STATE *buf_state = buf_view_state->buffer_state.get();
John Zulauf3e86bf02020-09-12 10:47:57 -06001610 const ResourceAccessRange range = MakeRange(*buf_view_state);
locke-lunarg61870c22020-06-09 14:51:50 -06001611 auto hazard = current_context_->DetectHazard(*buf_state, sync_index, range);
John Zulauf33fc1d52020-07-17 11:01:10 -06001612 if (hazard.hazard && !sync_state_->SupressedBoundDescriptorWAW(hazard)) {
locke-lunarg88dbb542020-06-23 22:05:42 -06001613 skip |= sync_state_->LogError(
1614 buf_view_state->buffer_view, string_SyncHazardVUID(hazard.hazard),
locke-lunarg7cc0ead2020-07-17 14:29:16 -06001615 "%s: Hazard %s for %s in %s, %s, and %s, type: %s, binding #%d index %d. Access info %s.",
1616 func_name, string_SyncHazard(hazard.hazard),
locke-lunarg88dbb542020-06-23 22:05:42 -06001617 sync_state_->report_data->FormatHandle(buf_view_state->buffer_view).c_str(),
1618 sync_state_->report_data->FormatHandle(cb_state_->commandBuffer).c_str(),
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001619 sync_state_->report_data->FormatHandle(pipe->pipeline).c_str(),
locke-lunarg7cc0ead2020-07-17 14:29:16 -06001620 sync_state_->report_data->FormatHandle(descriptor_set->GetSet()).c_str(),
1621 string_VkDescriptorType(descriptor_type), set_binding.first.second, index,
1622 string_UsageTag(hazard).c_str());
locke-lunarg61870c22020-06-09 14:51:50 -06001623 }
1624 break;
1625 }
1626 case DescriptorClass::GeneralBuffer: {
1627 const auto *buffer_descriptor = static_cast<const BufferDescriptor *>(descriptor);
1628 auto buf_state = buffer_descriptor->GetBufferState();
1629 if (!buf_state) continue;
John Zulauf3e86bf02020-09-12 10:47:57 -06001630 const ResourceAccessRange range =
1631 MakeRange(*buf_state, buffer_descriptor->GetOffset(), buffer_descriptor->GetRange());
locke-lunarg61870c22020-06-09 14:51:50 -06001632 auto hazard = current_context_->DetectHazard(*buf_state, sync_index, range);
John Zulauf3ac701a2020-09-07 14:34:41 -06001633 if (hazard.hazard && !sync_state_->SupressedBoundDescriptorWAW(hazard)) {
locke-lunarg88dbb542020-06-23 22:05:42 -06001634 skip |= sync_state_->LogError(
1635 buf_state->buffer, string_SyncHazardVUID(hazard.hazard),
locke-lunarg7cc0ead2020-07-17 14:29:16 -06001636 "%s: Hazard %s for %s in %s, %s, and %s, type: %s, binding #%d index %d. Access info %s.",
1637 func_name, string_SyncHazard(hazard.hazard),
1638 sync_state_->report_data->FormatHandle(buf_state->buffer).c_str(),
locke-lunarg88dbb542020-06-23 22:05:42 -06001639 sync_state_->report_data->FormatHandle(cb_state_->commandBuffer).c_str(),
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001640 sync_state_->report_data->FormatHandle(pipe->pipeline).c_str(),
locke-lunarg7cc0ead2020-07-17 14:29:16 -06001641 sync_state_->report_data->FormatHandle(descriptor_set->GetSet()).c_str(),
1642 string_VkDescriptorType(descriptor_type), set_binding.first.second, index,
1643 string_UsageTag(hazard).c_str());
locke-lunarg61870c22020-06-09 14:51:50 -06001644 }
1645 break;
1646 }
1647 // TODO: INLINE_UNIFORM_BLOCK_EXT, ACCELERATION_STRUCTURE_KHR
1648 default:
1649 break;
1650 }
1651 }
1652 }
1653 }
1654 return skip;
1655}
1656
1657void CommandBufferAccessContext::RecordDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint,
1658 const ResourceUsageTag &tag) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001659 const PIPELINE_STATE *pipe = nullptr;
locke-lunarg61870c22020-06-09 14:51:50 -06001660 const std::vector<LAST_BOUND_STATE::PER_SET> *per_sets = nullptr;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001661 GetCurrentPipelineAndDesriptorSetsFromCommandBuffer(*cb_state_.get(), pipelineBindPoint, &pipe, &per_sets);
1662 if (!pipe || !per_sets) {
locke-lunarg61870c22020-06-09 14:51:50 -06001663 return;
1664 }
1665
1666 using DescriptorClass = cvdescriptorset::DescriptorClass;
1667 using BufferDescriptor = cvdescriptorset::BufferDescriptor;
1668 using ImageDescriptor = cvdescriptorset::ImageDescriptor;
1669 using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor;
1670 using TexelDescriptor = cvdescriptorset::TexelDescriptor;
1671
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001672 for (const auto &stage_state : pipe->stage_state) {
1673 if (stage_state.stage_flag == VK_SHADER_STAGE_FRAGMENT_BIT && pipe->graphicsPipelineCI.pRasterizationState &&
1674 pipe->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable) {
locke-lunarge9f1cdf2020-06-12 12:28:57 -06001675 continue;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001676 }
locke-lunarg61870c22020-06-09 14:51:50 -06001677 for (const auto &set_binding : stage_state.descriptor_uses) {
1678 cvdescriptorset::DescriptorSet *descriptor_set = (*per_sets)[set_binding.first.first].bound_descriptor_set;
1679 cvdescriptorset::DescriptorSetLayout::ConstBindingIterator binding_it(descriptor_set->GetLayout().get(),
1680 set_binding.first.second);
1681 const auto descriptor_type = binding_it.GetType();
1682 cvdescriptorset::IndexRange index_range = binding_it.GetGlobalIndexRange();
1683 auto array_idx = 0;
1684
1685 if (binding_it.IsVariableDescriptorCount()) {
1686 index_range.end = index_range.start + descriptor_set->GetVariableDescriptorCount();
1687 }
1688 SyncStageAccessIndex sync_index =
1689 GetSyncStageAccessIndexsByDescriptorSet(descriptor_type, set_binding.second, stage_state.stage_flag);
1690
1691 for (uint32_t i = index_range.start; i < index_range.end; ++i, ++array_idx) {
1692 const auto *descriptor = descriptor_set->GetDescriptorFromGlobalIndex(i);
1693 switch (descriptor->GetClass()) {
1694 case DescriptorClass::ImageSampler:
1695 case DescriptorClass::Image: {
1696 const IMAGE_VIEW_STATE *img_view_state = nullptr;
1697 if (descriptor->GetClass() == DescriptorClass::ImageSampler) {
1698 img_view_state = static_cast<const ImageSamplerDescriptor *>(descriptor)->GetImageViewState();
1699 } else {
1700 img_view_state = static_cast<const ImageDescriptor *>(descriptor)->GetImageViewState();
1701 }
1702 if (!img_view_state) continue;
1703 const IMAGE_STATE *img_state = img_view_state->image_state.get();
1704 VkExtent3D extent = {};
1705 VkOffset3D offset = {};
1706 if (sync_index == SYNC_FRAGMENT_SHADER_INPUT_ATTACHMENT_READ) {
1707 extent = CastTo3D(cb_state_->activeRenderPassBeginInfo.renderArea.extent);
1708 offset = CastTo3D(cb_state_->activeRenderPassBeginInfo.renderArea.offset);
1709 } else {
1710 extent = img_state->createInfo.extent;
1711 }
1712 current_context_->UpdateAccessState(*img_state, sync_index, img_view_state->normalized_subresource_range,
1713 offset, extent, tag);
1714 break;
1715 }
1716 case DescriptorClass::TexelBuffer: {
1717 auto buf_view_state = static_cast<const TexelDescriptor *>(descriptor)->GetBufferViewState();
1718 if (!buf_view_state) continue;
1719 const BUFFER_STATE *buf_state = buf_view_state->buffer_state.get();
John Zulauf3e86bf02020-09-12 10:47:57 -06001720 const ResourceAccessRange range = MakeRange(*buf_view_state);
locke-lunarg61870c22020-06-09 14:51:50 -06001721 current_context_->UpdateAccessState(*buf_state, sync_index, range, tag);
1722 break;
1723 }
1724 case DescriptorClass::GeneralBuffer: {
1725 const auto *buffer_descriptor = static_cast<const BufferDescriptor *>(descriptor);
1726 auto buf_state = buffer_descriptor->GetBufferState();
1727 if (!buf_state) continue;
John Zulauf3e86bf02020-09-12 10:47:57 -06001728 const ResourceAccessRange range =
1729 MakeRange(*buf_state, buffer_descriptor->GetOffset(), buffer_descriptor->GetRange());
locke-lunarg61870c22020-06-09 14:51:50 -06001730 current_context_->UpdateAccessState(*buf_state, sync_index, range, tag);
1731 break;
1732 }
1733 // TODO: INLINE_UNIFORM_BLOCK_EXT, ACCELERATION_STRUCTURE_KHR
1734 default:
1735 break;
1736 }
1737 }
1738 }
1739 }
1740}
1741
1742bool CommandBufferAccessContext::ValidateDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const char *func_name) const {
1743 bool skip = false;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001744 const auto *pipe = GetCurrentPipelineFromCommandBuffer(*cb_state_.get(), VK_PIPELINE_BIND_POINT_GRAPHICS);
1745 if (!pipe) {
locke-lunarg61870c22020-06-09 14:51:50 -06001746 return skip;
1747 }
1748
1749 const auto &binding_buffers = cb_state_->current_vertex_buffer_binding_info.vertex_buffer_bindings;
1750 const auto &binding_buffers_size = binding_buffers.size();
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001751 const auto &binding_descriptions_size = pipe->vertex_binding_descriptions_.size();
locke-lunarg61870c22020-06-09 14:51:50 -06001752
1753 for (size_t i = 0; i < binding_descriptions_size; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001754 const auto &binding_description = pipe->vertex_binding_descriptions_[i];
locke-lunarg61870c22020-06-09 14:51:50 -06001755 if (binding_description.binding < binding_buffers_size) {
1756 const auto &binding_buffer = binding_buffers[binding_description.binding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07001757 if (binding_buffer.buffer_state == nullptr || binding_buffer.buffer_state->destroyed) continue;
locke-lunarg61870c22020-06-09 14:51:50 -06001758
locke-lunarg1ae57d62020-11-18 10:49:19 -07001759 auto *buf_state = binding_buffer.buffer_state.get();
John Zulauf3e86bf02020-09-12 10:47:57 -06001760 const ResourceAccessRange range = GetBufferRange(binding_buffer.offset, buf_state->createInfo.size, firstVertex,
1761 vertexCount, binding_description.stride);
locke-lunarg61870c22020-06-09 14:51:50 -06001762 auto hazard = current_context_->DetectHazard(*buf_state, SYNC_VERTEX_INPUT_VERTEX_ATTRIBUTE_READ, range);
1763 if (hazard.hazard) {
locke-lunarg88dbb542020-06-23 22:05:42 -06001764 skip |= sync_state_->LogError(
John Zulauf59e25072020-07-17 10:55:21 -06001765 buf_state->buffer, string_SyncHazardVUID(hazard.hazard), "%s: Hazard %s for vertex %s in %s. Access info %s.",
locke-lunarg88dbb542020-06-23 22:05:42 -06001766 func_name, string_SyncHazard(hazard.hazard), sync_state_->report_data->FormatHandle(buf_state->buffer).c_str(),
John Zulauf37ceaed2020-07-03 16:18:15 -06001767 sync_state_->report_data->FormatHandle(cb_state_->commandBuffer).c_str(), string_UsageTag(hazard).c_str());
locke-lunarg61870c22020-06-09 14:51:50 -06001768 }
1769 }
1770 }
1771 return skip;
1772}
1773
1774void CommandBufferAccessContext::RecordDrawVertex(uint32_t vertexCount, uint32_t firstVertex, const ResourceUsageTag &tag) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001775 const auto *pipe = GetCurrentPipelineFromCommandBuffer(*cb_state_.get(), VK_PIPELINE_BIND_POINT_GRAPHICS);
1776 if (!pipe) {
locke-lunarg61870c22020-06-09 14:51:50 -06001777 return;
1778 }
1779 const auto &binding_buffers = cb_state_->current_vertex_buffer_binding_info.vertex_buffer_bindings;
1780 const auto &binding_buffers_size = binding_buffers.size();
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001781 const auto &binding_descriptions_size = pipe->vertex_binding_descriptions_.size();
locke-lunarg61870c22020-06-09 14:51:50 -06001782
1783 for (size_t i = 0; i < binding_descriptions_size; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001784 const auto &binding_description = pipe->vertex_binding_descriptions_[i];
locke-lunarg61870c22020-06-09 14:51:50 -06001785 if (binding_description.binding < binding_buffers_size) {
1786 const auto &binding_buffer = binding_buffers[binding_description.binding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07001787 if (binding_buffer.buffer_state == nullptr || binding_buffer.buffer_state->destroyed) continue;
locke-lunarg61870c22020-06-09 14:51:50 -06001788
locke-lunarg1ae57d62020-11-18 10:49:19 -07001789 auto *buf_state = binding_buffer.buffer_state.get();
John Zulauf3e86bf02020-09-12 10:47:57 -06001790 const ResourceAccessRange range = GetBufferRange(binding_buffer.offset, buf_state->createInfo.size, firstVertex,
1791 vertexCount, binding_description.stride);
locke-lunarg61870c22020-06-09 14:51:50 -06001792 current_context_->UpdateAccessState(*buf_state, SYNC_VERTEX_INPUT_VERTEX_ATTRIBUTE_READ, range, tag);
1793 }
1794 }
1795}
1796
1797bool CommandBufferAccessContext::ValidateDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const char *func_name) const {
1798 bool skip = false;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001799 if (cb_state_->index_buffer_binding.buffer_state == nullptr || cb_state_->index_buffer_binding.buffer_state->destroyed) {
locke-lunarg1ae57d62020-11-18 10:49:19 -07001800 return skip;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001801 }
locke-lunarg61870c22020-06-09 14:51:50 -06001802
locke-lunarg1ae57d62020-11-18 10:49:19 -07001803 auto *index_buf_state = cb_state_->index_buffer_binding.buffer_state.get();
locke-lunarg61870c22020-06-09 14:51:50 -06001804 const auto index_size = GetIndexAlignment(cb_state_->index_buffer_binding.index_type);
John Zulauf3e86bf02020-09-12 10:47:57 -06001805 const ResourceAccessRange range = GetBufferRange(cb_state_->index_buffer_binding.offset, index_buf_state->createInfo.size,
1806 firstIndex, indexCount, index_size);
locke-lunarg61870c22020-06-09 14:51:50 -06001807 auto hazard = current_context_->DetectHazard(*index_buf_state, SYNC_VERTEX_INPUT_INDEX_READ, range);
1808 if (hazard.hazard) {
locke-lunarg88dbb542020-06-23 22:05:42 -06001809 skip |= sync_state_->LogError(
John Zulauf59e25072020-07-17 10:55:21 -06001810 index_buf_state->buffer, string_SyncHazardVUID(hazard.hazard), "%s: Hazard %s for index %s in %s. Access info %s.",
locke-lunarg88dbb542020-06-23 22:05:42 -06001811 func_name, string_SyncHazard(hazard.hazard), sync_state_->report_data->FormatHandle(index_buf_state->buffer).c_str(),
John Zulauf37ceaed2020-07-03 16:18:15 -06001812 sync_state_->report_data->FormatHandle(cb_state_->commandBuffer).c_str(), string_UsageTag(hazard).c_str());
locke-lunarg61870c22020-06-09 14:51:50 -06001813 }
1814
1815 // TODO: For now, we detect the whole vertex buffer. Index buffer could be changed until SubmitQueue.
1816 // We will detect more accurate range in the future.
1817 skip |= ValidateDrawVertex(UINT32_MAX, 0, func_name);
1818 return skip;
1819}
1820
1821void CommandBufferAccessContext::RecordDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, const ResourceUsageTag &tag) {
locke-lunarg1ae57d62020-11-18 10:49:19 -07001822 if (cb_state_->index_buffer_binding.buffer_state == nullptr || cb_state_->index_buffer_binding.buffer_state->destroyed) return;
locke-lunarg61870c22020-06-09 14:51:50 -06001823
locke-lunarg1ae57d62020-11-18 10:49:19 -07001824 auto *index_buf_state = cb_state_->index_buffer_binding.buffer_state.get();
locke-lunarg61870c22020-06-09 14:51:50 -06001825 const auto index_size = GetIndexAlignment(cb_state_->index_buffer_binding.index_type);
John Zulauf3e86bf02020-09-12 10:47:57 -06001826 const ResourceAccessRange range = GetBufferRange(cb_state_->index_buffer_binding.offset, index_buf_state->createInfo.size,
1827 firstIndex, indexCount, index_size);
locke-lunarg61870c22020-06-09 14:51:50 -06001828 current_context_->UpdateAccessState(*index_buf_state, SYNC_VERTEX_INPUT_INDEX_READ, range, tag);
1829
1830 // TODO: For now, we detect the whole vertex buffer. Index buffer could be changed until SubmitQueue.
1831 // We will detect more accurate range in the future.
1832 RecordDrawVertex(UINT32_MAX, 0, tag);
1833}
1834
1835bool CommandBufferAccessContext::ValidateDrawSubpassAttachment(const char *func_name) const {
locke-lunarg7077d502020-06-18 21:37:26 -06001836 bool skip = false;
1837 if (!current_renderpass_context_) return skip;
1838 skip |= current_renderpass_context_->ValidateDrawSubpassAttachment(*sync_state_, *cb_state_.get(),
1839 cb_state_->activeRenderPassBeginInfo.renderArea, func_name);
1840 return skip;
locke-lunarg61870c22020-06-09 14:51:50 -06001841}
1842
1843void CommandBufferAccessContext::RecordDrawSubpassAttachment(const ResourceUsageTag &tag) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001844 if (current_renderpass_context_) {
locke-lunarg7077d502020-06-18 21:37:26 -06001845 current_renderpass_context_->RecordDrawSubpassAttachment(*cb_state_.get(), cb_state_->activeRenderPassBeginInfo.renderArea,
1846 tag);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001847 }
locke-lunarg61870c22020-06-09 14:51:50 -06001848}
1849
John Zulauf355e49b2020-04-24 15:11:15 -06001850bool CommandBufferAccessContext::ValidateNextSubpass(const char *func_name) const {
John Zulauf355e49b2020-04-24 15:11:15 -06001851 bool skip = false;
locke-lunarg7077d502020-06-18 21:37:26 -06001852 if (!current_renderpass_context_) return skip;
John Zulauf1507ee42020-05-18 11:33:09 -06001853 skip |=
1854 current_renderpass_context_->ValidateNextSubpass(*sync_state_, cb_state_->activeRenderPassBeginInfo.renderArea, func_name);
John Zulauf355e49b2020-04-24 15:11:15 -06001855
1856 return skip;
1857}
1858
1859bool CommandBufferAccessContext::ValidateEndRenderpass(const char *func_name) const {
1860 // TODO: Things to add here.
John Zulauf7635de32020-05-29 17:14:15 -06001861 // Validate Preserve attachments
John Zulauf355e49b2020-04-24 15:11:15 -06001862 bool skip = false;
locke-lunarg7077d502020-06-18 21:37:26 -06001863 if (!current_renderpass_context_) return skip;
John Zulauf7635de32020-05-29 17:14:15 -06001864 skip |= current_renderpass_context_->ValidateEndRenderPass(*sync_state_, cb_state_->activeRenderPassBeginInfo.renderArea,
1865 func_name);
John Zulauf355e49b2020-04-24 15:11:15 -06001866
1867 return skip;
1868}
1869
1870void CommandBufferAccessContext::RecordBeginRenderPass(const ResourceUsageTag &tag) {
1871 assert(sync_state_);
1872 if (!cb_state_) return;
1873
1874 // Create an access context the current renderpass.
John Zulauf1a224292020-06-30 14:52:13 -06001875 render_pass_contexts_.emplace_back();
John Zulauf16adfc92020-04-08 10:28:33 -06001876 current_renderpass_context_ = &render_pass_contexts_.back();
John Zulauf1a224292020-06-30 14:52:13 -06001877 current_renderpass_context_->RecordBeginRenderPass(*sync_state_, *cb_state_, &cb_access_context_, queue_flags_, tag);
John Zulauf16adfc92020-04-08 10:28:33 -06001878 current_context_ = &current_renderpass_context_->CurrentContext();
John Zulauf16adfc92020-04-08 10:28:33 -06001879}
1880
John Zulauf355e49b2020-04-24 15:11:15 -06001881void CommandBufferAccessContext::RecordNextSubpass(const RENDER_PASS_STATE &rp_state, const ResourceUsageTag &tag) {
John Zulauf16adfc92020-04-08 10:28:33 -06001882 assert(current_renderpass_context_);
John Zulauf1507ee42020-05-18 11:33:09 -06001883 current_renderpass_context_->RecordNextSubpass(cb_state_->activeRenderPassBeginInfo.renderArea, tag);
John Zulauf16adfc92020-04-08 10:28:33 -06001884 current_context_ = &current_renderpass_context_->CurrentContext();
1885}
1886
John Zulauf355e49b2020-04-24 15:11:15 -06001887void CommandBufferAccessContext::RecordEndRenderPass(const RENDER_PASS_STATE &render_pass, const ResourceUsageTag &tag) {
John Zulauf16adfc92020-04-08 10:28:33 -06001888 assert(current_renderpass_context_);
1889 if (!current_renderpass_context_) return;
1890
John Zulauf1a224292020-06-30 14:52:13 -06001891 current_renderpass_context_->RecordEndRenderPass(&cb_access_context_, cb_state_->activeRenderPassBeginInfo.renderArea, tag);
John Zulauf355e49b2020-04-24 15:11:15 -06001892 current_context_ = &cb_access_context_;
John Zulauf16adfc92020-04-08 10:28:33 -06001893 current_renderpass_context_ = nullptr;
1894}
1895
John Zulauf49beb112020-11-04 16:06:31 -07001896bool CommandBufferAccessContext::ValidateSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
1897 VkPipelineStageFlags stageMask) const {
1898 return false;
1899}
1900
1901void CommandBufferAccessContext::RecordSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {}
1902
1903bool CommandBufferAccessContext::ValidateResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
1904 VkPipelineStageFlags stageMask) const {
1905 return false;
1906}
1907
1908void CommandBufferAccessContext::RecordResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {}
1909
1910bool CommandBufferAccessContext::ValidateWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
1911 VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
1912 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
1913 uint32_t bufferMemoryBarrierCount,
1914 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1915 uint32_t imageMemoryBarrierCount,
1916 const VkImageMemoryBarrier *pImageMemoryBarriers) const {
1917 return false;
1918}
1919
1920void CommandBufferAccessContext::RecordWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
1921 VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
1922 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
1923 uint32_t bufferMemoryBarrierCount,
1924 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1925 uint32_t imageMemoryBarrierCount,
1926 const VkImageMemoryBarrier *pImageMemoryBarriers) const {}
1927
locke-lunarg61870c22020-06-09 14:51:50 -06001928bool RenderPassAccessContext::ValidateDrawSubpassAttachment(const SyncValidator &sync_state, const CMD_BUFFER_STATE &cmd,
1929 const VkRect2D &render_area, const char *func_name) const {
1930 bool skip = false;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001931 const auto *pipe = GetCurrentPipelineFromCommandBuffer(cmd, VK_PIPELINE_BIND_POINT_GRAPHICS);
1932 if (!pipe ||
1933 (pipe->graphicsPipelineCI.pRasterizationState && pipe->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable)) {
locke-lunarg96dc9632020-06-10 17:22:18 -06001934 return skip;
1935 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001936 const auto &list = pipe->fragmentShader_writable_output_location_list;
locke-lunarg61870c22020-06-09 14:51:50 -06001937 const auto &subpass = rp_state_->createInfo.pSubpasses[current_subpass_];
1938 VkExtent3D extent = CastTo3D(render_area.extent);
1939 VkOffset3D offset = CastTo3D(render_area.offset);
locke-lunarg37047832020-06-12 13:44:45 -06001940
John Zulauf1a224292020-06-30 14:52:13 -06001941 const auto &current_context = CurrentContext();
locke-lunarg44f9bb12020-06-10 14:43:57 -06001942 // Subpass's inputAttachment has been done in ValidateDispatchDrawDescriptorSet
locke-lunarg96dc9632020-06-10 17:22:18 -06001943 if (subpass.pColorAttachments && subpass.colorAttachmentCount && !list.empty()) {
1944 for (const auto location : list) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001945 if (location >= subpass.colorAttachmentCount ||
1946 subpass.pColorAttachments[location].attachment == VK_ATTACHMENT_UNUSED) {
locke-lunarg96dc9632020-06-10 17:22:18 -06001947 continue;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001948 }
locke-lunarg96dc9632020-06-10 17:22:18 -06001949 const IMAGE_VIEW_STATE *img_view_state = attachment_views_[subpass.pColorAttachments[location].attachment];
John Zulauf1a224292020-06-30 14:52:13 -06001950 HazardResult hazard = current_context.DetectHazard(img_view_state, SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE,
1951 kColorAttachmentRasterOrder, offset, extent);
locke-lunarg96dc9632020-06-10 17:22:18 -06001952 if (hazard.hazard) {
locke-lunarg88dbb542020-06-23 22:05:42 -06001953 skip |= sync_state.LogError(img_view_state->image_view, string_SyncHazardVUID(hazard.hazard),
John Zulauf59e25072020-07-17 10:55:21 -06001954 "%s: Hazard %s for %s in %s, Subpass #%d, and pColorAttachments #%d. Access info %s.",
locke-lunarg88dbb542020-06-23 22:05:42 -06001955 func_name, string_SyncHazard(hazard.hazard),
1956 sync_state.report_data->FormatHandle(img_view_state->image_view).c_str(),
1957 sync_state.report_data->FormatHandle(cmd.commandBuffer).c_str(), cmd.activeSubpass,
John Zulauf37ceaed2020-07-03 16:18:15 -06001958 location, string_UsageTag(hazard).c_str());
locke-lunarg61870c22020-06-09 14:51:50 -06001959 }
1960 }
1961 }
locke-lunarg37047832020-06-12 13:44:45 -06001962
1963 // PHASE1 TODO: Add layout based read/vs. write selection.
1964 // PHASE1 TODO: Read operations for both depth and stencil are possible in the future.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001965 if (pipe->graphicsPipelineCI.pDepthStencilState && subpass.pDepthStencilAttachment &&
locke-lunarg37047832020-06-12 13:44:45 -06001966 subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
locke-lunarg61870c22020-06-09 14:51:50 -06001967 const IMAGE_VIEW_STATE *img_view_state = attachment_views_[subpass.pDepthStencilAttachment->attachment];
locke-lunarg37047832020-06-12 13:44:45 -06001968 bool depth_write = false, stencil_write = false;
1969
1970 // PHASE1 TODO: These validation should be in core_checks.
1971 if (!FormatIsStencilOnly(img_view_state->create_info.format) &&
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001972 pipe->graphicsPipelineCI.pDepthStencilState->depthTestEnable &&
1973 pipe->graphicsPipelineCI.pDepthStencilState->depthWriteEnable &&
locke-lunarg37047832020-06-12 13:44:45 -06001974 IsImageLayoutDepthWritable(subpass.pDepthStencilAttachment->layout)) {
1975 depth_write = true;
1976 }
1977 // PHASE1 TODO: It needs to check if stencil is writable.
1978 // If failOp, passOp, or depthFailOp are not KEEP, and writeMask isn't 0, it's writable.
1979 // If depth test is disable, it's considered depth test passes, and then depthFailOp doesn't run.
1980 // PHASE1 TODO: These validation should be in core_checks.
1981 if (!FormatIsDepthOnly(img_view_state->create_info.format) &&
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001982 pipe->graphicsPipelineCI.pDepthStencilState->stencilTestEnable &&
locke-lunarg37047832020-06-12 13:44:45 -06001983 IsImageLayoutStencilWritable(subpass.pDepthStencilAttachment->layout)) {
1984 stencil_write = true;
1985 }
1986
1987 // PHASE1 TODO: Add EARLY stage detection based on ExecutionMode.
1988 if (depth_write) {
1989 HazardResult hazard =
John Zulauf1a224292020-06-30 14:52:13 -06001990 current_context.DetectHazard(img_view_state, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE,
1991 kDepthStencilAttachmentRasterOrder, offset, extent, VK_IMAGE_ASPECT_DEPTH_BIT);
locke-lunarg37047832020-06-12 13:44:45 -06001992 if (hazard.hazard) {
locke-lunarg88dbb542020-06-23 22:05:42 -06001993 skip |= sync_state.LogError(
1994 img_view_state->image_view, string_SyncHazardVUID(hazard.hazard),
John Zulauf59e25072020-07-17 10:55:21 -06001995 "%s: Hazard %s for %s in %s, Subpass #%d, and depth part of pDepthStencilAttachment. Access info %s.",
locke-lunarg88dbb542020-06-23 22:05:42 -06001996 func_name, string_SyncHazard(hazard.hazard),
1997 sync_state.report_data->FormatHandle(img_view_state->image_view).c_str(),
1998 sync_state.report_data->FormatHandle(cmd.commandBuffer).c_str(), cmd.activeSubpass,
John Zulauf37ceaed2020-07-03 16:18:15 -06001999 string_UsageTag(hazard).c_str());
locke-lunarg37047832020-06-12 13:44:45 -06002000 }
2001 }
2002 if (stencil_write) {
2003 HazardResult hazard =
John Zulauf1a224292020-06-30 14:52:13 -06002004 current_context.DetectHazard(img_view_state, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE,
2005 kDepthStencilAttachmentRasterOrder, offset, extent, VK_IMAGE_ASPECT_STENCIL_BIT);
locke-lunarg37047832020-06-12 13:44:45 -06002006 if (hazard.hazard) {
locke-lunarg88dbb542020-06-23 22:05:42 -06002007 skip |= sync_state.LogError(
2008 img_view_state->image_view, string_SyncHazardVUID(hazard.hazard),
John Zulauf59e25072020-07-17 10:55:21 -06002009 "%s: Hazard %s for %s in %s, Subpass #%d, and stencil part of pDepthStencilAttachment. Access info %s.",
locke-lunarg88dbb542020-06-23 22:05:42 -06002010 func_name, string_SyncHazard(hazard.hazard),
2011 sync_state.report_data->FormatHandle(img_view_state->image_view).c_str(),
2012 sync_state.report_data->FormatHandle(cmd.commandBuffer).c_str(), cmd.activeSubpass,
John Zulauf37ceaed2020-07-03 16:18:15 -06002013 string_UsageTag(hazard).c_str());
locke-lunarg37047832020-06-12 13:44:45 -06002014 }
locke-lunarg61870c22020-06-09 14:51:50 -06002015 }
2016 }
2017 return skip;
2018}
2019
locke-lunarg96dc9632020-06-10 17:22:18 -06002020void RenderPassAccessContext::RecordDrawSubpassAttachment(const CMD_BUFFER_STATE &cmd, const VkRect2D &render_area,
2021 const ResourceUsageTag &tag) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002022 const auto *pipe = GetCurrentPipelineFromCommandBuffer(cmd, VK_PIPELINE_BIND_POINT_GRAPHICS);
2023 if (!pipe ||
2024 (pipe->graphicsPipelineCI.pRasterizationState && pipe->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable)) {
locke-lunarg96dc9632020-06-10 17:22:18 -06002025 return;
2026 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002027 const auto &list = pipe->fragmentShader_writable_output_location_list;
locke-lunarg61870c22020-06-09 14:51:50 -06002028 const auto &subpass = rp_state_->createInfo.pSubpasses[current_subpass_];
2029 VkExtent3D extent = CastTo3D(render_area.extent);
2030 VkOffset3D offset = CastTo3D(render_area.offset);
2031
John Zulauf1a224292020-06-30 14:52:13 -06002032 auto &current_context = CurrentContext();
locke-lunarg44f9bb12020-06-10 14:43:57 -06002033 // Subpass's inputAttachment has been done in RecordDispatchDrawDescriptorSet
locke-lunarg96dc9632020-06-10 17:22:18 -06002034 if (subpass.pColorAttachments && subpass.colorAttachmentCount && !list.empty()) {
2035 for (const auto location : list) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002036 if (location >= subpass.colorAttachmentCount ||
2037 subpass.pColorAttachments[location].attachment == VK_ATTACHMENT_UNUSED) {
locke-lunarg96dc9632020-06-10 17:22:18 -06002038 continue;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002039 }
locke-lunarg96dc9632020-06-10 17:22:18 -06002040 const IMAGE_VIEW_STATE *img_view_state = attachment_views_[subpass.pColorAttachments[location].attachment];
John Zulauf1a224292020-06-30 14:52:13 -06002041 current_context.UpdateAccessState(img_view_state, SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, offset, extent,
2042 0, tag);
locke-lunarg61870c22020-06-09 14:51:50 -06002043 }
2044 }
locke-lunarg37047832020-06-12 13:44:45 -06002045
2046 // PHASE1 TODO: Add layout based read/vs. write selection.
2047 // PHASE1 TODO: Read operations for both depth and stencil are possible in the future.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002048 if (pipe->graphicsPipelineCI.pDepthStencilState && subpass.pDepthStencilAttachment &&
locke-lunarg37047832020-06-12 13:44:45 -06002049 subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
locke-lunarg61870c22020-06-09 14:51:50 -06002050 const IMAGE_VIEW_STATE *img_view_state = attachment_views_[subpass.pDepthStencilAttachment->attachment];
locke-lunarg37047832020-06-12 13:44:45 -06002051 bool depth_write = false, stencil_write = false;
2052
2053 // PHASE1 TODO: These validation should be in core_checks.
2054 if (!FormatIsStencilOnly(img_view_state->create_info.format) &&
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002055 pipe->graphicsPipelineCI.pDepthStencilState->depthTestEnable &&
2056 pipe->graphicsPipelineCI.pDepthStencilState->depthWriteEnable &&
locke-lunarg37047832020-06-12 13:44:45 -06002057 IsImageLayoutDepthWritable(subpass.pDepthStencilAttachment->layout)) {
2058 depth_write = true;
2059 }
2060 // PHASE1 TODO: It needs to check if stencil is writable.
2061 // If failOp, passOp, or depthFailOp are not KEEP, and writeMask isn't 0, it's writable.
2062 // If depth test is disable, it's considered depth test passes, and then depthFailOp doesn't run.
2063 // PHASE1 TODO: These validation should be in core_checks.
2064 if (!FormatIsDepthOnly(img_view_state->create_info.format) &&
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002065 pipe->graphicsPipelineCI.pDepthStencilState->stencilTestEnable &&
locke-lunarg37047832020-06-12 13:44:45 -06002066 IsImageLayoutStencilWritable(subpass.pDepthStencilAttachment->layout)) {
2067 stencil_write = true;
2068 }
2069
2070 // PHASE1 TODO: Add EARLY stage detection based on ExecutionMode.
2071 if (depth_write) {
John Zulauf1a224292020-06-30 14:52:13 -06002072 current_context.UpdateAccessState(img_view_state, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, offset,
2073 extent, VK_IMAGE_ASPECT_DEPTH_BIT, tag);
locke-lunarg37047832020-06-12 13:44:45 -06002074 }
2075 if (stencil_write) {
John Zulauf1a224292020-06-30 14:52:13 -06002076 current_context.UpdateAccessState(img_view_state, SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, offset,
2077 extent, VK_IMAGE_ASPECT_STENCIL_BIT, tag);
locke-lunarg37047832020-06-12 13:44:45 -06002078 }
locke-lunarg61870c22020-06-09 14:51:50 -06002079 }
2080}
2081
John Zulauf1507ee42020-05-18 11:33:09 -06002082bool RenderPassAccessContext::ValidateNextSubpass(const SyncValidator &sync_state, const VkRect2D &render_area,
2083 const char *func_name) const {
John Zulaufaff20662020-06-01 14:07:58 -06002084 // PHASE1 TODO: Add Validate Preserve attachments
John Zulauf355e49b2020-04-24 15:11:15 -06002085 bool skip = false;
John Zulaufb027cdb2020-05-21 14:25:22 -06002086 skip |= CurrentContext().ValidateResolveOperations(sync_state, *rp_state_, render_area, attachment_views_, func_name,
2087 current_subpass_);
John Zulaufaff20662020-06-01 14:07:58 -06002088 skip |= CurrentContext().ValidateStoreOperation(sync_state, *rp_state_, render_area, current_subpass_, attachment_views_,
2089 func_name);
2090
John Zulauf355e49b2020-04-24 15:11:15 -06002091 const auto next_subpass = current_subpass_ + 1;
John Zulauf1507ee42020-05-18 11:33:09 -06002092 const auto &next_context = subpass_contexts_[next_subpass];
John Zulauf7635de32020-05-29 17:14:15 -06002093 skip |= next_context.ValidateLayoutTransitions(sync_state, *rp_state_, render_area, next_subpass, attachment_views_, func_name);
John Zulaufb02c1eb2020-10-06 16:33:36 -06002094 if (!skip) {
2095 // To avoid complex (and buggy) duplication of the affect of layout transitions on load operations, we'll record them
2096 // on a copy of the (empty) next context.
2097 // Note: The resource access map should be empty so hopefully this copy isn't too horrible from a perf POV.
2098 AccessContext temp_context(next_context);
2099 temp_context.RecordLayoutTransitions(*rp_state_, next_subpass, attachment_views_, kCurrentCommandTag);
2100 skip |= temp_context.ValidateLoadOperation(sync_state, *rp_state_, render_area, next_subpass, attachment_views_, func_name);
2101 }
John Zulauf7635de32020-05-29 17:14:15 -06002102 return skip;
2103}
2104bool RenderPassAccessContext::ValidateEndRenderPass(const SyncValidator &sync_state, const VkRect2D &render_area,
2105 const char *func_name) const {
John Zulaufaff20662020-06-01 14:07:58 -06002106 // PHASE1 TODO: Validate Preserve
John Zulauf7635de32020-05-29 17:14:15 -06002107 bool skip = false;
2108 skip |= CurrentContext().ValidateResolveOperations(sync_state, *rp_state_, render_area, attachment_views_, func_name,
2109 current_subpass_);
John Zulaufaff20662020-06-01 14:07:58 -06002110 skip |= CurrentContext().ValidateStoreOperation(sync_state, *rp_state_, render_area, current_subpass_, attachment_views_,
2111 func_name);
John Zulauf7635de32020-05-29 17:14:15 -06002112 skip |= ValidateFinalSubpassLayoutTransitions(sync_state, render_area, func_name);
John Zulauf355e49b2020-04-24 15:11:15 -06002113 return skip;
2114}
2115
John Zulauf7635de32020-05-29 17:14:15 -06002116AccessContext *RenderPassAccessContext::CreateStoreResolveProxy(const VkRect2D &render_area) const {
2117 return CreateStoreResolveProxyContext(CurrentContext(), *rp_state_, current_subpass_, render_area, attachment_views_);
2118}
2119
2120bool RenderPassAccessContext::ValidateFinalSubpassLayoutTransitions(const SyncValidator &sync_state, const VkRect2D &render_area,
2121 const char *func_name) const {
John Zulauf355e49b2020-04-24 15:11:15 -06002122 bool skip = false;
2123
John Zulauf7635de32020-05-29 17:14:15 -06002124 // As validation methods are const and precede the record/update phase, for any tranistions from the current (last)
2125 // subpass, we have to validate them against a copy of the current AccessContext, with resolve operations applied.
2126 // Note: we could be more efficient by tracking whether or not we actually *have* any changes (e.g. attachment resolve)
2127 // to apply and only copy then, if this proves a hot spot.
2128 std::unique_ptr<AccessContext> proxy_for_current;
2129
John Zulauf355e49b2020-04-24 15:11:15 -06002130 // Validate the "finalLayout" transitions to external
2131 // Get them from where there we're hidding in the extra entry.
2132 const auto &final_transitions = rp_state_->subpass_transitions.back();
2133 for (const auto &transition : final_transitions) {
2134 const auto &attach_view = attachment_views_[transition.attachment];
2135 const auto &trackback = subpass_contexts_[transition.prev_pass].GetDstExternalTrackBack();
2136 assert(trackback.context); // Transitions are given implicit transitions if the StateTracker is working correctly
John Zulauf7635de32020-05-29 17:14:15 -06002137 auto *context = trackback.context;
2138
2139 if (transition.prev_pass == current_subpass_) {
2140 if (!proxy_for_current) {
2141 // We haven't recorded resolve ofor the current_subpass, so we need to copy current and update it *as if*
2142 proxy_for_current.reset(CreateStoreResolveProxy(render_area));
2143 }
2144 context = proxy_for_current.get();
2145 }
2146
John Zulaufa0a98292020-09-18 09:30:10 -06002147 // Use the merged barrier for the hazard check (safe since it just considers the src (first) scope.
2148 const auto merged_barrier = MergeBarriers(trackback.barriers);
2149 auto hazard = context->DetectImageBarrierHazard(*attach_view->image_state, merged_barrier.src_exec_scope,
2150 merged_barrier.src_access_scope, attach_view->normalized_subresource_range,
2151 AccessContext::DetectOptions::kDetectPrevious);
John Zulauf355e49b2020-04-24 15:11:15 -06002152 if (hazard.hazard) {
2153 skip |= sync_state.LogError(rp_state_->renderPass, string_SyncHazardVUID(hazard.hazard),
2154 "%s: Hazard %s with last use subpass %" PRIu32 " for attachment %" PRIu32
John Zulauf389c34b2020-07-28 11:19:35 -06002155 " final image layout transition (old_layout: %s, new_layout: %s). Access info %s.",
John Zulauf1dae9192020-06-16 15:46:44 -06002156 func_name, string_SyncHazard(hazard.hazard), transition.prev_pass, transition.attachment,
John Zulauf389c34b2020-07-28 11:19:35 -06002157 string_VkImageLayout(transition.old_layout), string_VkImageLayout(transition.new_layout),
John Zulauf37ceaed2020-07-03 16:18:15 -06002158 string_UsageTag(hazard).c_str());
John Zulauf355e49b2020-04-24 15:11:15 -06002159 }
2160 }
2161 return skip;
2162}
2163
2164void RenderPassAccessContext::RecordLayoutTransitions(const ResourceUsageTag &tag) {
2165 // Add layout transitions...
John Zulaufb02c1eb2020-10-06 16:33:36 -06002166 subpass_contexts_[current_subpass_].RecordLayoutTransitions(*rp_state_, current_subpass_, attachment_views_, tag);
John Zulauf355e49b2020-04-24 15:11:15 -06002167}
2168
John Zulauf1507ee42020-05-18 11:33:09 -06002169void RenderPassAccessContext::RecordLoadOperations(const VkRect2D &render_area, const ResourceUsageTag &tag) {
2170 const auto *attachment_ci = rp_state_->createInfo.pAttachments;
2171 auto &subpass_context = subpass_contexts_[current_subpass_];
2172 VkExtent3D extent = CastTo3D(render_area.extent);
2173 VkOffset3D offset = CastTo3D(render_area.offset);
2174
2175 for (uint32_t i = 0; i < rp_state_->createInfo.attachmentCount; i++) {
2176 if (rp_state_->attachment_first_subpass[i] == current_subpass_) {
2177 if (attachment_views_[i] == nullptr) continue; // UNUSED
2178 const auto &view = *attachment_views_[i];
2179 const IMAGE_STATE *image = view.image_state.get();
2180 if (image == nullptr) continue;
2181
2182 const auto &ci = attachment_ci[i];
2183 const bool has_depth = FormatHasDepth(ci.format);
John Zulaufb027cdb2020-05-21 14:25:22 -06002184 const bool has_stencil = FormatHasStencil(ci.format);
John Zulauf1507ee42020-05-18 11:33:09 -06002185 const bool is_color = !(has_depth || has_stencil);
2186
2187 if (is_color) {
2188 subpass_context.UpdateAccessState(*image, ColorLoadUsage(ci.loadOp), view.normalized_subresource_range, offset,
2189 extent, tag);
2190 } else {
2191 auto update_range = view.normalized_subresource_range;
2192 if (has_depth) {
2193 update_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
2194 subpass_context.UpdateAccessState(*image, DepthStencilLoadUsage(ci.loadOp), update_range, offset, extent, tag);
2195 }
2196 if (has_stencil) {
2197 update_range.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
2198 subpass_context.UpdateAccessState(*image, DepthStencilLoadUsage(ci.stencilLoadOp), update_range, offset, extent,
2199 tag);
2200 }
2201 }
2202 }
2203 }
2204}
2205
John Zulauf355e49b2020-04-24 15:11:15 -06002206void RenderPassAccessContext::RecordBeginRenderPass(const SyncValidator &state, const CMD_BUFFER_STATE &cb_state,
John Zulauf1a224292020-06-30 14:52:13 -06002207 const AccessContext *external_context, VkQueueFlags queue_flags,
2208 const ResourceUsageTag &tag) {
John Zulauf355e49b2020-04-24 15:11:15 -06002209 current_subpass_ = 0;
locke-lunargaecf2152020-05-12 17:15:41 -06002210 rp_state_ = cb_state.activeRenderPass.get();
John Zulauf355e49b2020-04-24 15:11:15 -06002211 subpass_contexts_.reserve(rp_state_->createInfo.subpassCount);
2212 // Add this for all subpasses here so that they exsist during next subpass validation
2213 for (uint32_t pass = 0; pass < rp_state_->createInfo.subpassCount; pass++) {
John Zulauf1a224292020-06-30 14:52:13 -06002214 subpass_contexts_.emplace_back(pass, queue_flags, rp_state_->subpass_dependencies, subpass_contexts_, external_context);
John Zulauf355e49b2020-04-24 15:11:15 -06002215 }
2216 attachment_views_ = state.GetCurrentAttachmentViews(cb_state);
2217
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -07002218 subpass_contexts_[current_subpass_].SetStartTag(tag);
John Zulauf355e49b2020-04-24 15:11:15 -06002219 RecordLayoutTransitions(tag);
John Zulauf1507ee42020-05-18 11:33:09 -06002220 RecordLoadOperations(cb_state.activeRenderPassBeginInfo.renderArea, tag);
John Zulauf355e49b2020-04-24 15:11:15 -06002221}
John Zulauf1507ee42020-05-18 11:33:09 -06002222
2223void RenderPassAccessContext::RecordNextSubpass(const VkRect2D &render_area, const ResourceUsageTag &tag) {
John Zulauf7635de32020-05-29 17:14:15 -06002224 // Resolves are against *prior* subpass context and thus *before* the subpass increment
2225 CurrentContext().UpdateAttachmentResolveAccess(*rp_state_, render_area, attachment_views_, current_subpass_, tag);
John Zulaufaff20662020-06-01 14:07:58 -06002226 CurrentContext().UpdateAttachmentStoreAccess(*rp_state_, render_area, attachment_views_, current_subpass_, tag);
John Zulauf7635de32020-05-29 17:14:15 -06002227
Jeremy Gebben6ea9d9e2020-12-11 09:41:01 -07002228 // Move to the next sub-command for the new subpass. The resolve and store are logically part of the previous
2229 // subpass, so their tag needs to be different from the layout and load operations below.
2230 ResourceUsageTag next_tag = tag;
2231 next_tag.index++;
John Zulauf355e49b2020-04-24 15:11:15 -06002232 current_subpass_++;
2233 assert(current_subpass_ < subpass_contexts_.size());
Jeremy Gebben6ea9d9e2020-12-11 09:41:01 -07002234 subpass_contexts_[current_subpass_].SetStartTag(next_tag);
2235 RecordLayoutTransitions(next_tag);
2236 RecordLoadOperations(render_area, next_tag);
John Zulauf355e49b2020-04-24 15:11:15 -06002237}
2238
John Zulauf1a224292020-06-30 14:52:13 -06002239void RenderPassAccessContext::RecordEndRenderPass(AccessContext *external_context, const VkRect2D &render_area,
2240 const ResourceUsageTag &tag) {
John Zulaufaff20662020-06-01 14:07:58 -06002241 // Add the resolve and store accesses
John Zulauf7635de32020-05-29 17:14:15 -06002242 CurrentContext().UpdateAttachmentResolveAccess(*rp_state_, render_area, attachment_views_, current_subpass_, tag);
John Zulaufaff20662020-06-01 14:07:58 -06002243 CurrentContext().UpdateAttachmentStoreAccess(*rp_state_, render_area, attachment_views_, current_subpass_, tag);
John Zulauf7635de32020-05-29 17:14:15 -06002244
John Zulauf355e49b2020-04-24 15:11:15 -06002245 // Export the accesses from the renderpass...
John Zulauf1a224292020-06-30 14:52:13 -06002246 external_context->ResolveChildContexts(subpass_contexts_);
John Zulauf355e49b2020-04-24 15:11:15 -06002247
2248 // Add the "finalLayout" transitions to external
2249 // Get them from where there we're hidding in the extra entry.
John Zulauf89311b42020-09-29 16:28:47 -06002250 // Not that since *final* always comes from *one* subpass per view, we don't have to accumulate the barriers
2251 // TODO Aliasing we may need to reconsider barrier accumulation... though I don't know that it would be valid for aliasing
2252 // that had mulitple final layout transistions from mulitple final subpasses.
John Zulauf355e49b2020-04-24 15:11:15 -06002253 const auto &final_transitions = rp_state_->subpass_transitions.back();
2254 for (const auto &transition : final_transitions) {
2255 const auto &attachment = attachment_views_[transition.attachment];
2256 const auto &last_trackback = subpass_contexts_[transition.prev_pass].GetDstExternalTrackBack();
John Zulaufaa97d8b2020-07-14 10:58:13 -06002257 assert(&subpass_contexts_[transition.prev_pass] == last_trackback.context);
John Zulauf89311b42020-09-29 16:28:47 -06002258 ApplyBarrierOpsFunctor barrier_ops(true /* resolve */, last_trackback.barriers, true /* layout transition */, tag);
2259 external_context->UpdateResourceAccess(*attachment->image_state, attachment->normalized_subresource_range, barrier_ops);
John Zulauf355e49b2020-04-24 15:11:15 -06002260 }
2261}
2262
John Zulauf3d84f1b2020-03-09 13:33:25 -06002263SyncBarrier::SyncBarrier(VkQueueFlags queue_flags, const VkSubpassDependency2 &barrier) {
2264 const auto src_stage_mask = ExpandPipelineStages(queue_flags, barrier.srcStageMask);
2265 src_exec_scope = WithEarlierPipelineStages(src_stage_mask);
2266 src_access_scope = SyncStageAccess::AccessScope(src_stage_mask, barrier.srcAccessMask);
2267 const auto dst_stage_mask = ExpandPipelineStages(queue_flags, barrier.dstStageMask);
2268 dst_exec_scope = WithLaterPipelineStages(dst_stage_mask);
2269 dst_access_scope = SyncStageAccess::AccessScope(dst_stage_mask, barrier.dstAccessMask);
2270}
2271
John Zulaufb02c1eb2020-10-06 16:33:36 -06002272// Apply a list of barriers, without resolving pending state, useful for subpass layout transitions
2273void ResourceAccessState::ApplyBarriers(const std::vector<SyncBarrier> &barriers, bool layout_transition) {
2274 for (const auto &barrier : barriers) {
2275 ApplyBarrier(barrier, layout_transition);
2276 }
2277}
2278
John Zulauf89311b42020-09-29 16:28:47 -06002279// ApplyBarriers is design for *fully* inclusive barrier lists without layout tranistions. Designed use was for
2280// inter-subpass barriers for lazy-evaluation of parent context memory ranges. Subpass layout transistions are *not* done
2281// lazily, s.t. no previous access reports should need layout transitions.
John Zulaufb02c1eb2020-10-06 16:33:36 -06002282void ResourceAccessState::ApplyBarriers(const std::vector<SyncBarrier> &barriers, const ResourceUsageTag &tag) {
2283 assert(!pending_layout_transition); // This should never be call in the middle of another barrier application
Jeremy Gebbend0de1f82020-11-09 08:21:07 -07002284 assert(pending_write_barriers.none());
John Zulaufb02c1eb2020-10-06 16:33:36 -06002285 assert(!pending_write_dep_chain);
John Zulaufa0a98292020-09-18 09:30:10 -06002286 for (const auto &barrier : barriers) {
John Zulauf89311b42020-09-29 16:28:47 -06002287 ApplyBarrier(barrier, false);
John Zulaufa0a98292020-09-18 09:30:10 -06002288 }
John Zulaufb02c1eb2020-10-06 16:33:36 -06002289 ApplyPendingBarriers(tag);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002290}
John Zulauf9cb530d2019-09-30 14:14:10 -06002291HazardResult ResourceAccessState::DetectHazard(SyncStageAccessIndex usage_index) const {
2292 HazardResult hazard;
2293 auto usage = FlagBit(usage_index);
John Zulauf361fb532020-07-22 10:45:39 -06002294 const auto usage_stage = PipelineStageBit(usage_index);
John Zulauf9cb530d2019-09-30 14:14:10 -06002295 if (IsRead(usage)) {
John Zulauf4285ee92020-09-23 10:20:52 -06002296 if (IsRAWHazard(usage_stage, usage)) {
John Zulauf59e25072020-07-17 10:55:21 -06002297 hazard.Set(this, usage_index, READ_AFTER_WRITE, last_write, write_tag);
John Zulauf9cb530d2019-09-30 14:14:10 -06002298 }
2299 } else {
John Zulauf361fb532020-07-22 10:45:39 -06002300 // Write operation:
2301 // Check for read operations more recent than last_write (as setting last_write clears reads, that would be *any*
2302 // If reads exists -- test only against them because either:
2303 // * the reads were hazards, and we've reported the hazard, so just test the current write vs. the read operations
2304 // * the read weren't hazards, and thus if the write is safe w.r.t. the reads, no hazard vs. last_write is possible if
2305 // the current write happens after the reads, so just test the write against the reades
2306 // Otherwise test against last_write
2307 //
2308 // Look for casus belli for WAR
2309 if (last_read_count) {
2310 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
2311 const auto &read_access = last_reads[read_index];
2312 if (IsReadHazard(usage_stage, read_access)) {
2313 hazard.Set(this, usage_index, WRITE_AFTER_READ, read_access.access, read_access.tag);
2314 break;
2315 }
2316 }
Jeremy Gebbend0de1f82020-11-09 08:21:07 -07002317 } else if (last_write.any() && IsWriteHazard(usage)) {
John Zulauf361fb532020-07-22 10:45:39 -06002318 // Write-After-Write check -- if we have a previous write to test against
2319 hazard.Set(this, usage_index, WRITE_AFTER_WRITE, last_write, write_tag);
John Zulauf9cb530d2019-09-30 14:14:10 -06002320 }
2321 }
2322 return hazard;
2323}
2324
John Zulauf69133422020-05-20 14:55:53 -06002325HazardResult ResourceAccessState::DetectHazard(SyncStageAccessIndex usage_index, const SyncOrderingBarrier &ordering) const {
2326 // The ordering guarantees act as barriers to the last accesses, independent of synchronization operations
2327 HazardResult hazard;
John Zulauf4285ee92020-09-23 10:20:52 -06002328 const auto usage_bit = FlagBit(usage_index);
John Zulauf361fb532020-07-22 10:45:39 -06002329 const auto usage_stage = PipelineStageBit(usage_index);
Jeremy Gebbend0de1f82020-11-09 08:21:07 -07002330 const bool input_attachment_ordering = (ordering.access_scope & SYNC_FRAGMENT_SHADER_INPUT_ATTACHMENT_READ_BIT).any();
2331 const bool last_write_is_ordered = (last_write & ordering.access_scope).any();
John Zulauf4285ee92020-09-23 10:20:52 -06002332 if (IsRead(usage_bit)) {
2333 // Exclude RAW if no write, or write not most "most recent" operation w.r.t. usage;
2334 bool is_raw_hazard = IsRAWHazard(usage_stage, usage_bit);
2335 if (is_raw_hazard) {
2336 // NOTE: we know last_write is non-zero
2337 // See if the ordering rules save us from the simple RAW check above
2338 // First check to see if the current usage is covered by the ordering rules
2339 const bool usage_is_input_attachment = (usage_index == SYNC_FRAGMENT_SHADER_INPUT_ATTACHMENT_READ);
2340 const bool usage_is_ordered =
2341 (input_attachment_ordering && usage_is_input_attachment) || (0 != (usage_stage & ordering.exec_scope));
2342 if (usage_is_ordered) {
2343 // Now see of the most recent write (or a subsequent read) are ordered
2344 const bool most_recent_is_ordered = last_write_is_ordered || (0 != GetOrderedStages(ordering));
2345 is_raw_hazard = !most_recent_is_ordered;
John Zulauf361fb532020-07-22 10:45:39 -06002346 }
2347 }
John Zulauf4285ee92020-09-23 10:20:52 -06002348 if (is_raw_hazard) {
2349 hazard.Set(this, usage_index, READ_AFTER_WRITE, last_write, write_tag);
2350 }
John Zulauf361fb532020-07-22 10:45:39 -06002351 } else {
2352 // Only check for WAW if there are no reads since last_write
Jeremy Gebbend0de1f82020-11-09 08:21:07 -07002353 bool usage_write_is_ordered = (usage_bit & ordering.access_scope).any();
John Zulauf361fb532020-07-22 10:45:39 -06002354 if (last_read_count) {
John Zulauf361fb532020-07-22 10:45:39 -06002355 // Look for any WAR hazards outside the ordered set of stages
John Zulauf4285ee92020-09-23 10:20:52 -06002356 VkPipelineStageFlags ordered_stages = 0;
2357 if (usage_write_is_ordered) {
2358 // If the usage is ordered, we can ignore all ordered read stages w.r.t. WAR)
2359 ordered_stages = GetOrderedStages(ordering);
2360 }
2361 // If we're tracking any reads that aren't ordered against the current write, got to check 'em all.
2362 if ((ordered_stages & last_read_stages) != last_read_stages) {
2363 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
2364 const auto &read_access = last_reads[read_index];
2365 if (read_access.stage & ordered_stages) continue; // but we can skip the ordered ones
2366 if (IsReadHazard(usage_stage, read_access)) {
2367 hazard.Set(this, usage_index, WRITE_AFTER_READ, read_access.access, read_access.tag);
2368 break;
2369 }
John Zulaufd14743a2020-07-03 09:42:39 -06002370 }
2371 }
John Zulauf4285ee92020-09-23 10:20:52 -06002372 } else if (!(last_write_is_ordered && usage_write_is_ordered)) {
Jeremy Gebbend0de1f82020-11-09 08:21:07 -07002373 if (last_write.any() && IsWriteHazard(usage_bit)) {
John Zulauf4285ee92020-09-23 10:20:52 -06002374 hazard.Set(this, usage_index, WRITE_AFTER_WRITE, last_write, write_tag);
John Zulauf361fb532020-07-22 10:45:39 -06002375 }
John Zulauf69133422020-05-20 14:55:53 -06002376 }
2377 }
2378 return hazard;
2379}
2380
John Zulauf2f952d22020-02-10 11:34:51 -07002381// Asynchronous Hazards occur between subpasses with no connection through the DAG
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -07002382HazardResult ResourceAccessState::DetectAsyncHazard(SyncStageAccessIndex usage_index, const ResourceUsageTag &start_tag) const {
John Zulauf2f952d22020-02-10 11:34:51 -07002383 HazardResult hazard;
2384 auto usage = FlagBit(usage_index);
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -07002385 // Async checks need to not go back further than the start of the subpass, as we only want to find hazards between the async
2386 // subpasses. Anything older than that should have been checked at the start of each subpass, taking into account all of
2387 // the raster ordering rules.
John Zulauf2f952d22020-02-10 11:34:51 -07002388 if (IsRead(usage)) {
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -07002389 if (last_write.any() && (write_tag.index >= start_tag.index)) {
John Zulauf59e25072020-07-17 10:55:21 -06002390 hazard.Set(this, usage_index, READ_RACING_WRITE, last_write, write_tag);
John Zulauf2f952d22020-02-10 11:34:51 -07002391 }
2392 } else {
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -07002393 if (last_write.any() && (write_tag.index >= start_tag.index)) {
John Zulauf59e25072020-07-17 10:55:21 -06002394 hazard.Set(this, usage_index, WRITE_RACING_WRITE, last_write, write_tag);
John Zulauf2f952d22020-02-10 11:34:51 -07002395 } else if (last_read_count > 0) {
Jeremy Gebbenc4b78c52020-12-11 09:39:47 -07002396 // Any reads during the other subpass will conflict with this write, so we need to check them all.
2397 for (uint32_t i = 0; i < last_read_count; i++) {
2398 if (last_reads[i].tag.index >= start_tag.index) {
2399 hazard.Set(this, usage_index, WRITE_RACING_READ, last_reads[i].access, last_reads[i].tag);
2400 break;
2401 }
2402 }
John Zulauf2f952d22020-02-10 11:34:51 -07002403 }
2404 }
2405 return hazard;
2406}
2407
John Zulauf36bcf6a2020-02-03 15:12:52 -07002408HazardResult ResourceAccessState::DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags src_exec_scope,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -07002409 const SyncStageAccessFlags &src_access_scope) const {
John Zulauf0cb5be22020-01-23 12:18:22 -07002410 // Only supporting image layout transitions for now
2411 assert(usage_index == SyncStageAccessIndex::SYNC_IMAGE_LAYOUT_TRANSITION);
2412 HazardResult hazard;
John Zulauf361fb532020-07-22 10:45:39 -06002413 // only test for WAW if there no intervening read operations.
2414 // See DetectHazard(SyncStagetAccessIndex) above for more details.
2415 if (last_read_count) {
John Zulauf355e49b2020-04-24 15:11:15 -06002416 // Look at the reads if any
John Zulauf0cb5be22020-01-23 12:18:22 -07002417 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
John Zulauf36bcf6a2020-02-03 15:12:52 -07002418 const auto &read_access = last_reads[read_index];
2419 // If the read stage is not in the src sync sync
2420 // *AND* not execution chained with an existing sync barrier (that's the or)
2421 // then the barrier access is unsafe (R/W after R)
2422 if ((src_exec_scope & (read_access.stage | read_access.barriers)) == 0) {
John Zulauf59e25072020-07-17 10:55:21 -06002423 hazard.Set(this, usage_index, WRITE_AFTER_READ, read_access.access, read_access.tag);
John Zulauf0cb5be22020-01-23 12:18:22 -07002424 break;
2425 }
2426 }
Jeremy Gebbend0de1f82020-11-09 08:21:07 -07002427 } else if (last_write.any()) {
John Zulauf361fb532020-07-22 10:45:39 -06002428 // If the previous write is *not* in the 1st access scope
2429 // *AND* the current barrier is not in the dependency chain
2430 // *AND* the there is no prior memory barrier for the previous write in the dependency chain
2431 // then the barrier access is unsafe (R/W after W)
2432 if (((last_write & src_access_scope) == 0) && ((src_exec_scope & write_dependency_chain) == 0) && (write_barriers == 0)) {
2433 // TODO: Do we need a difference hazard name for this?
2434 hazard.Set(this, usage_index, WRITE_AFTER_WRITE, last_write, write_tag);
2435 }
John Zulaufd14743a2020-07-03 09:42:39 -06002436 }
John Zulauf361fb532020-07-22 10:45:39 -06002437
John Zulauf0cb5be22020-01-23 12:18:22 -07002438 return hazard;
2439}
2440
John Zulauf5f13a792020-03-10 07:31:21 -06002441// The logic behind resolves is the same as update, we assume that earlier hazards have be reported, and that no
2442// tranistive hazard can exists with a hazard between the earlier operations. Yes, an early hazard can mask that another
2443// exists, but if you fix *that* hazard it either fixes or unmasks the subsequent ones.
2444void ResourceAccessState::Resolve(const ResourceAccessState &other) {
2445 if (write_tag.IsBefore(other.write_tag)) {
John Zulauf4285ee92020-09-23 10:20:52 -06002446 // If this is a later write, we've reported any exsiting hazard, and we can just overwrite as the more recent
2447 // operation
John Zulauf5f13a792020-03-10 07:31:21 -06002448 *this = other;
2449 } else if (!other.write_tag.IsBefore(write_tag)) {
2450 // This is the *equals* case for write operations, we merged the write barriers and the read state (but without the
2451 // dependency chaining logic or any stage expansion)
2452 write_barriers |= other.write_barriers;
John Zulaufb02c1eb2020-10-06 16:33:36 -06002453 pending_write_barriers |= other.pending_write_barriers;
2454 pending_layout_transition |= other.pending_layout_transition;
2455 pending_write_dep_chain |= other.pending_write_dep_chain;
John Zulauf5f13a792020-03-10 07:31:21 -06002456
John Zulaufd14743a2020-07-03 09:42:39 -06002457 // Merge the read states
John Zulauf4285ee92020-09-23 10:20:52 -06002458 const auto pre_merge_count = last_read_count;
2459 const auto pre_merge_stages = last_read_stages;
John Zulauf5f13a792020-03-10 07:31:21 -06002460 for (uint32_t other_read_index = 0; other_read_index < other.last_read_count; other_read_index++) {
2461 auto &other_read = other.last_reads[other_read_index];
John Zulauf4285ee92020-09-23 10:20:52 -06002462 if (pre_merge_stages & other_read.stage) {
John Zulauf5f13a792020-03-10 07:31:21 -06002463 // Merge in the barriers for read stages that exist in *both* this and other
John Zulauf4285ee92020-09-23 10:20:52 -06002464 // TODO: This is N^2 with stages... perhaps the ReadStates should be sorted by stage index.
2465 // but we should wait on profiling data for that.
2466 for (uint32_t my_read_index = 0; my_read_index < pre_merge_count; my_read_index++) {
John Zulauf5f13a792020-03-10 07:31:21 -06002467 auto &my_read = last_reads[my_read_index];
2468 if (other_read.stage == my_read.stage) {
2469 if (my_read.tag.IsBefore(other_read.tag)) {
John Zulauf4285ee92020-09-23 10:20:52 -06002470 // Other is more recent, copy in the state
John Zulauf37ceaed2020-07-03 16:18:15 -06002471 my_read.access = other_read.access;
John Zulauf4285ee92020-09-23 10:20:52 -06002472 my_read.tag = other_read.tag;
John Zulaufb02c1eb2020-10-06 16:33:36 -06002473 my_read.pending_dep_chain = other_read.pending_dep_chain;
2474 // TODO: Phase 2 -- review the state merge logic to avoid false positive from overwriting the barriers
2475 // May require tracking more than one access per stage.
2476 my_read.barriers = other_read.barriers;
John Zulauf4285ee92020-09-23 10:20:52 -06002477 if (my_read.stage == VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT) {
2478 // Since I'm overwriting the fragement stage read, also update the input attachment info
2479 // as this is the only stage that affects it.
John Zulauff51fbb62020-10-02 14:43:24 -06002480 input_attachment_read = other.input_attachment_read;
John Zulauf4285ee92020-09-23 10:20:52 -06002481 }
John Zulaufb02c1eb2020-10-06 16:33:36 -06002482 } else if (other_read.tag.IsBefore(my_read.tag)) {
2483 // The read tags match so merge the barriers
2484 my_read.barriers |= other_read.barriers;
2485 my_read.pending_dep_chain |= other_read.pending_dep_chain;
John Zulauf5f13a792020-03-10 07:31:21 -06002486 }
John Zulaufb02c1eb2020-10-06 16:33:36 -06002487
John Zulauf5f13a792020-03-10 07:31:21 -06002488 break;
2489 }
2490 }
2491 } else {
2492 // The other read stage doesn't exist in this, so add it.
2493 last_reads[last_read_count] = other_read;
2494 last_read_count++;
2495 last_read_stages |= other_read.stage;
John Zulauf4285ee92020-09-23 10:20:52 -06002496 if (other_read.stage == VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT) {
John Zulauff51fbb62020-10-02 14:43:24 -06002497 input_attachment_read = other.input_attachment_read;
John Zulauf4285ee92020-09-23 10:20:52 -06002498 }
John Zulauf5f13a792020-03-10 07:31:21 -06002499 }
2500 }
John Zulauf361fb532020-07-22 10:45:39 -06002501 read_execution_barriers |= other.read_execution_barriers;
John Zulauf4285ee92020-09-23 10:20:52 -06002502 } // the else clause would be that other write is before this write... in which case we supercede the other state and
2503 // ignore it.
John Zulauf5f13a792020-03-10 07:31:21 -06002504}
2505
John Zulauf9cb530d2019-09-30 14:14:10 -06002506void ResourceAccessState::Update(SyncStageAccessIndex usage_index, const ResourceUsageTag &tag) {
2507 // Move this logic in the ResourceStateTracker as methods, thereof (or we'll repeat it for every flavor of resource...
2508 const auto usage_bit = FlagBit(usage_index);
John Zulauf4285ee92020-09-23 10:20:52 -06002509 if (IsRead(usage_index)) {
John Zulauf9cb530d2019-09-30 14:14:10 -06002510 // Mulitple outstanding reads may be of interest and do dependency chains independently
2511 // However, for purposes of barrier tracking, only one read per pipeline stage matters
2512 const auto usage_stage = PipelineStageBit(usage_index);
John Zulauf4285ee92020-09-23 10:20:52 -06002513 uint32_t update_index = kStageCount;
John Zulauf9cb530d2019-09-30 14:14:10 -06002514 if (usage_stage & last_read_stages) {
2515 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
John Zulauf4285ee92020-09-23 10:20:52 -06002516 if (last_reads[read_index].stage == usage_stage) {
2517 update_index = read_index;
John Zulauf9cb530d2019-09-30 14:14:10 -06002518 break;
2519 }
2520 }
John Zulauf4285ee92020-09-23 10:20:52 -06002521 assert(update_index < last_read_count);
John Zulauf9cb530d2019-09-30 14:14:10 -06002522 } else {
John Zulauf9cb530d2019-09-30 14:14:10 -06002523 assert(last_read_count < last_reads.size());
John Zulauf4285ee92020-09-23 10:20:52 -06002524 update_index = last_read_count++;
John Zulauf9cb530d2019-09-30 14:14:10 -06002525 last_read_stages |= usage_stage;
2526 }
John Zulauf4285ee92020-09-23 10:20:52 -06002527 last_reads[update_index].Set(usage_stage, usage_bit, 0, tag);
2528
2529 // Fragment shader reads come in two flavors, and we need to track if the one we're tracking is the special one.
2530 if (usage_stage == VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT) {
John Zulauff51fbb62020-10-02 14:43:24 -06002531 // TODO Revisit re: multiple reads for a given stage
2532 input_attachment_read = (usage_bit == SYNC_FRAGMENT_SHADER_INPUT_ATTACHMENT_READ_BIT);
John Zulauf4285ee92020-09-23 10:20:52 -06002533 }
John Zulauf9cb530d2019-09-30 14:14:10 -06002534 } else {
2535 // Assume write
2536 // TODO determine what to do with READ-WRITE operations if any
John Zulauf89311b42020-09-29 16:28:47 -06002537 SetWrite(usage_bit, tag);
John Zulauf9cb530d2019-09-30 14:14:10 -06002538 }
2539}
John Zulauf5f13a792020-03-10 07:31:21 -06002540
John Zulauf89311b42020-09-29 16:28:47 -06002541// Clobber last read and all barriers... because all we have is DANGER, DANGER, WILL ROBINSON!!!
2542// if the last_reads/last_write were unsafe, we've reported them, in either case the prior access is irrelevant.
2543// We can overwrite them as *this* write is now after them.
2544//
2545// Note: intentionally ignore pending barriers and chains (i.e. don't apply or clear them), let ApplyPendingBarriers handle them.
Jeremy Gebbend0de1f82020-11-09 08:21:07 -07002546void ResourceAccessState::SetWrite(const SyncStageAccessFlags &usage_bit, const ResourceUsageTag &tag) {
John Zulauf89311b42020-09-29 16:28:47 -06002547 last_read_count = 0;
2548 last_read_stages = 0;
2549 read_execution_barriers = 0;
John Zulauff51fbb62020-10-02 14:43:24 -06002550 input_attachment_read = false; // Denotes no outstanding input attachment read after the last write.
John Zulauf89311b42020-09-29 16:28:47 -06002551
2552 write_barriers = 0;
2553 write_dependency_chain = 0;
2554 write_tag = tag;
2555 last_write = usage_bit;
John Zulauf9cb530d2019-09-30 14:14:10 -06002556}
2557
John Zulauf89311b42020-09-29 16:28:47 -06002558// Apply the memory barrier without updating the existing barriers. The execution barrier
2559// changes the "chaining" state, but to keep barriers independent, we defer this until all barriers
2560// of the batch have been processed. Also, depending on whether layout transition happens, we'll either
2561// replace the current write barriers or add to them, so accumulate to pending as well.
2562void ResourceAccessState::ApplyBarrier(const SyncBarrier &barrier, bool layout_transition) {
2563 // For independent barriers we need to track what the new barriers and dependency chain *will* be when we're done
2564 // applying the memory barriers
John Zulauf86356ca2020-10-19 11:46:41 -06002565 // NOTE: We update the write barrier if the write is in the first access scope or if there is a layout
2566 // transistion, under the theory of "most recent access". If the read/write *isn't* safe
2567 // vs. this layout transition DetectBarrierHazard should report it. We treat the layout
2568 // transistion *as* a write and in scope with the barrier (it's before visibility).
2569 if (layout_transition || InSourceScopeOrChain(barrier.src_exec_scope, barrier.src_access_scope)) {
John Zulauf89311b42020-09-29 16:28:47 -06002570 pending_write_barriers |= barrier.dst_access_scope;
2571 pending_write_dep_chain |= barrier.dst_exec_scope;
John Zulaufa0a98292020-09-18 09:30:10 -06002572 }
John Zulauf89311b42020-09-29 16:28:47 -06002573 // Track layout transistion as pending as we can't modify last_write until all barriers processed
2574 pending_layout_transition |= layout_transition;
John Zulaufa0a98292020-09-18 09:30:10 -06002575
John Zulauf89311b42020-09-29 16:28:47 -06002576 if (!pending_layout_transition) {
2577 // Once we're dealing with a layout transition (which is modelled as a *write*) then the last reads/writes/chains
2578 // don't need to be tracked as we're just going to zero them.
John Zulaufa0a98292020-09-18 09:30:10 -06002579 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
John Zulauf89311b42020-09-29 16:28:47 -06002580 ReadState &access = last_reads[read_index];
2581 // The | implements the "dependency chain" logic for this access, as the barriers field stores the second sync scope
2582 if (barrier.src_exec_scope & (access.stage | access.barriers)) {
2583 access.pending_dep_chain |= barrier.dst_exec_scope;
John Zulaufa0a98292020-09-18 09:30:10 -06002584 }
2585 }
John Zulaufa0a98292020-09-18 09:30:10 -06002586 }
John Zulaufa0a98292020-09-18 09:30:10 -06002587}
2588
John Zulauf89311b42020-09-29 16:28:47 -06002589void ResourceAccessState::ApplyPendingBarriers(const ResourceUsageTag &tag) {
2590 if (pending_layout_transition) {
John Zulauf89311b42020-09-29 16:28:47 -06002591 // SetWrite clobbers the read count, and thus we don't have to clear the read_state out.
2592 SetWrite(SYNC_IMAGE_LAYOUT_TRANSITION_BIT, tag); // Side effect notes below
2593 pending_layout_transition = false;
John Zulauf9cb530d2019-09-30 14:14:10 -06002594 }
John Zulauf89311b42020-09-29 16:28:47 -06002595
2596 // Apply the accumulate execution barriers (and thus update chaining information)
2597 // for layout transition, read count is zeroed by SetWrite, so this will be skipped.
2598 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
2599 ReadState &access = last_reads[read_index];
2600 access.barriers |= access.pending_dep_chain;
2601 read_execution_barriers |= access.barriers;
2602 access.pending_dep_chain = 0;
2603 }
2604
2605 // We OR in the accumulated write chain and barriers even in the case of a layout transition as SetWrite zeros them.
2606 write_dependency_chain |= pending_write_dep_chain;
2607 write_barriers |= pending_write_barriers;
2608 pending_write_dep_chain = 0;
2609 pending_write_barriers = 0;
John Zulauf9cb530d2019-09-30 14:14:10 -06002610}
2611
John Zulauf59e25072020-07-17 10:55:21 -06002612// This should be just Bits or Index, but we don't have an invalid state for Index
Jeremy Gebbend0de1f82020-11-09 08:21:07 -07002613VkPipelineStageFlags ResourceAccessState::GetReadBarriers(const SyncStageAccessFlags &usage_bit) const {
John Zulauf59e25072020-07-17 10:55:21 -06002614 VkPipelineStageFlags barriers = 0U;
John Zulauf4285ee92020-09-23 10:20:52 -06002615
2616 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
2617 const auto &read_access = last_reads[read_index];
Jeremy Gebbend0de1f82020-11-09 08:21:07 -07002618 if ((read_access.access & usage_bit).any()) {
John Zulauf4285ee92020-09-23 10:20:52 -06002619 barriers = read_access.barriers;
2620 break;
John Zulauf59e25072020-07-17 10:55:21 -06002621 }
2622 }
John Zulauf4285ee92020-09-23 10:20:52 -06002623
John Zulauf59e25072020-07-17 10:55:21 -06002624 return barriers;
2625}
2626
Jeremy Gebbend0de1f82020-11-09 08:21:07 -07002627inline bool ResourceAccessState::IsRAWHazard(VkPipelineStageFlagBits usage_stage, const SyncStageAccessFlags &usage) const {
John Zulauf4285ee92020-09-23 10:20:52 -06002628 assert(IsRead(usage));
2629 // Only RAW vs. last_write if it doesn't happen-after any other read because either:
2630 // * the previous reads are not hazards, and thus last_write must be visible and available to
2631 // any reads that happen after.
2632 // * the previous reads *are* hazards to last_write, have been reported, and if that hazard is fixed
2633 // the current read will be also not be a hazard, thus reporting a hazard here adds no needed information.
Jeremy Gebbend0de1f82020-11-09 08:21:07 -07002634 return last_write.any() && (0 == (read_execution_barriers & usage_stage)) && IsWriteHazard(usage);
John Zulauf4285ee92020-09-23 10:20:52 -06002635}
2636
John Zulauf4285ee92020-09-23 10:20:52 -06002637VkPipelineStageFlags ResourceAccessState::GetOrderedStages(const SyncOrderingBarrier &ordering) const {
2638 // Whether the stage are in the ordering scope only matters if the current write is ordered
2639 VkPipelineStageFlags ordered_stages = last_read_stages & ordering.exec_scope;
2640 // Special input attachment handling as always (not encoded in exec_scop)
Jeremy Gebbend0de1f82020-11-09 08:21:07 -07002641 const bool input_attachment_ordering = (ordering.access_scope & SYNC_FRAGMENT_SHADER_INPUT_ATTACHMENT_READ_BIT).any();
John Zulauff51fbb62020-10-02 14:43:24 -06002642 if (input_attachment_ordering && input_attachment_read) {
John Zulauf4285ee92020-09-23 10:20:52 -06002643 // If we have an input attachment in last_reads and input attachments are ordered we all that stage
2644 ordered_stages |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
2645 }
2646
2647 return ordered_stages;
2648}
2649
2650inline ResourceAccessState::ReadState *ResourceAccessState::GetReadStateForStage(VkPipelineStageFlagBits stage,
2651 uint32_t search_limit) {
2652 ReadState *read_state = nullptr;
2653 search_limit = std::min(search_limit, last_read_count);
2654 for (uint32_t i = 0; i < search_limit; i++) {
2655 if (last_reads[i].stage == stage) {
2656 read_state = &last_reads[i];
2657 break;
2658 }
2659 }
2660 return read_state;
2661}
2662
John Zulaufd1f85d42020-04-15 12:23:15 -06002663void SyncValidator::ResetCommandBufferCallback(VkCommandBuffer command_buffer) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06002664 auto *access_context = GetAccessContextNoInsert(command_buffer);
2665 if (access_context) {
2666 access_context->Reset();
John Zulauf9cb530d2019-09-30 14:14:10 -06002667 }
2668}
2669
John Zulaufd1f85d42020-04-15 12:23:15 -06002670void SyncValidator::FreeCommandBufferCallback(VkCommandBuffer command_buffer) {
2671 auto access_found = cb_access_state.find(command_buffer);
2672 if (access_found != cb_access_state.end()) {
2673 access_found->second->Reset();
2674 cb_access_state.erase(access_found);
2675 }
2676}
2677
John Zulauf89311b42020-09-29 16:28:47 -06002678void SyncValidator::ApplyGlobalBarriers(AccessContext *context, VkPipelineStageFlags src_exec_scope,
2679 VkPipelineStageFlags dst_exec_scope, SyncStageAccessFlags src_access_scope,
2680 SyncStageAccessFlags dst_access_scope, uint32_t memory_barrier_count,
2681 const VkMemoryBarrier *pMemoryBarriers, const ResourceUsageTag &tag) {
2682 ApplyBarrierOpsFunctor barriers_functor(true /* resolve */, std::min<uint32_t>(1, memory_barrier_count), tag);
2683 for (uint32_t barrier_index = 0; barrier_index < memory_barrier_count; barrier_index++) {
2684 const auto &barrier = pMemoryBarriers[barrier_index];
2685 SyncBarrier sync_barrier(src_exec_scope, SyncStageAccess::AccessScope(src_access_scope, barrier.srcAccessMask),
2686 dst_exec_scope, SyncStageAccess::AccessScope(dst_access_scope, barrier.dstAccessMask));
2687 barriers_functor.PushBack(sync_barrier, false);
2688 }
2689 if (0 == memory_barrier_count) {
2690 // If there are no global memory barriers, force an exec barrier
2691 barriers_functor.PushBack(SyncBarrier(src_exec_scope, 0, dst_exec_scope, 0), false);
2692 }
John Zulauf540266b2020-04-06 18:54:53 -06002693 context->ApplyGlobalBarriers(barriers_functor);
John Zulauf9cb530d2019-09-30 14:14:10 -06002694}
2695
John Zulauf540266b2020-04-06 18:54:53 -06002696void SyncValidator::ApplyBufferBarriers(AccessContext *context, VkPipelineStageFlags src_exec_scope,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -07002697 const SyncStageAccessFlags &src_stage_accesses, VkPipelineStageFlags dst_exec_scope,
2698 const SyncStageAccessFlags &dst_stage_accesses, uint32_t barrier_count,
John Zulauf9cb530d2019-09-30 14:14:10 -06002699 const VkBufferMemoryBarrier *barriers) {
John Zulauf9cb530d2019-09-30 14:14:10 -06002700 for (uint32_t index = 0; index < barrier_count; index++) {
John Zulauf3e86bf02020-09-12 10:47:57 -06002701 auto barrier = barriers[index]; // barrier is a copy
John Zulauf9cb530d2019-09-30 14:14:10 -06002702 const auto *buffer = Get<BUFFER_STATE>(barrier.buffer);
2703 if (!buffer) continue;
John Zulauf3e86bf02020-09-12 10:47:57 -06002704 barrier.size = GetBufferWholeSize(*buffer, barrier.offset, barrier.size);
2705 const ResourceAccessRange range = MakeRange(barrier);
John Zulauf540266b2020-04-06 18:54:53 -06002706 const auto src_access_scope = AccessScope(src_stage_accesses, barrier.srcAccessMask);
2707 const auto dst_access_scope = AccessScope(dst_stage_accesses, barrier.dstAccessMask);
John Zulauf89311b42020-09-29 16:28:47 -06002708 const SyncBarrier sync_barrier(src_exec_scope, src_access_scope, dst_exec_scope, dst_access_scope);
2709 const ApplyBarrierFunctor update_action(sync_barrier, false /* layout_transition */);
2710 context->UpdateResourceAccess(*buffer, range, update_action);
John Zulauf9cb530d2019-09-30 14:14:10 -06002711 }
2712}
2713
John Zulauf540266b2020-04-06 18:54:53 -06002714void SyncValidator::ApplyImageBarriers(AccessContext *context, VkPipelineStageFlags src_exec_scope,
Jeremy Gebbend0de1f82020-11-09 08:21:07 -07002715 const SyncStageAccessFlags &src_stage_accesses, VkPipelineStageFlags dst_exec_scope,
2716 const SyncStageAccessFlags &dst_stage_accesses, uint32_t barrier_count,
John Zulauf355e49b2020-04-24 15:11:15 -06002717 const VkImageMemoryBarrier *barriers, const ResourceUsageTag &tag) {
John Zulauf5c5e88d2019-12-26 11:22:02 -07002718 for (uint32_t index = 0; index < barrier_count; index++) {
2719 const auto &barrier = barriers[index];
2720 const auto *image = Get<IMAGE_STATE>(barrier.image);
2721 if (!image) continue;
John Zulauf540266b2020-04-06 18:54:53 -06002722 auto subresource_range = NormalizeSubresourceRange(image->createInfo, barrier.subresourceRange);
John Zulauf355e49b2020-04-24 15:11:15 -06002723 bool layout_transition = barrier.oldLayout != barrier.newLayout;
2724 const auto src_access_scope = AccessScope(src_stage_accesses, barrier.srcAccessMask);
2725 const auto dst_access_scope = AccessScope(dst_stage_accesses, barrier.dstAccessMask);
John Zulauf89311b42020-09-29 16:28:47 -06002726 const SyncBarrier sync_barrier(src_exec_scope, src_access_scope, dst_exec_scope, dst_access_scope);
2727 const ApplyBarrierFunctor barrier_action(sync_barrier, layout_transition);
2728 context->UpdateResourceAccess(*image, subresource_range, barrier_action);
John Zulauf9cb530d2019-09-30 14:14:10 -06002729 }
John Zulauf9cb530d2019-09-30 14:14:10 -06002730}
2731
2732bool SyncValidator::PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
2733 uint32_t regionCount, const VkBufferCopy *pRegions) const {
2734 bool skip = false;
John Zulauf3d84f1b2020-03-09 13:33:25 -06002735 const auto *cb_context = GetAccessContext(commandBuffer);
2736 assert(cb_context);
2737 if (!cb_context) return skip;
2738 const auto *context = cb_context->GetCurrentAccessContext();
John Zulauf9cb530d2019-09-30 14:14:10 -06002739
John Zulauf3d84f1b2020-03-09 13:33:25 -06002740 // If we have no previous accesses, we have no hazards
John Zulauf3d84f1b2020-03-09 13:33:25 -06002741 const auto *src_buffer = Get<BUFFER_STATE>(srcBuffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002742 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002743
2744 for (uint32_t region = 0; region < regionCount; region++) {
2745 const auto &copy_region = pRegions[region];
John Zulauf16adfc92020-04-08 10:28:33 -06002746 if (src_buffer) {
John Zulauf3e86bf02020-09-12 10:47:57 -06002747 const ResourceAccessRange src_range = MakeRange(*src_buffer, copy_region.srcOffset, copy_region.size);
John Zulauf16adfc92020-04-08 10:28:33 -06002748 auto hazard = context->DetectHazard(*src_buffer, SYNC_TRANSFER_TRANSFER_READ, src_range);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002749 if (hazard.hazard) {
2750 // TODO -- add tag information to log msg when useful.
locke-lunarga0003652020-03-10 11:38:51 -06002751 skip |= LogError(srcBuffer, string_SyncHazardVUID(hazard.hazard),
John Zulauf59e25072020-07-17 10:55:21 -06002752 "vkCmdCopyBuffer: Hazard %s for srcBuffer %s, region %" PRIu32 ". Access info %s.",
John Zulauf1dae9192020-06-16 15:46:44 -06002753 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcBuffer).c_str(), region,
John Zulauf37ceaed2020-07-03 16:18:15 -06002754 string_UsageTag(hazard).c_str());
John Zulauf9cb530d2019-09-30 14:14:10 -06002755 }
John Zulauf9cb530d2019-09-30 14:14:10 -06002756 }
John Zulauf16adfc92020-04-08 10:28:33 -06002757 if (dst_buffer && !skip) {
John Zulauf3e86bf02020-09-12 10:47:57 -06002758 const ResourceAccessRange dst_range = MakeRange(*dst_buffer, copy_region.dstOffset, copy_region.size);
John Zulauf355e49b2020-04-24 15:11:15 -06002759 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, dst_range);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002760 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002761 skip |= LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
John Zulauf59e25072020-07-17 10:55:21 -06002762 "vkCmdCopyBuffer: Hazard %s for dstBuffer %s, region %" PRIu32 ". Access info %s.",
John Zulauf1dae9192020-06-16 15:46:44 -06002763 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstBuffer).c_str(), region,
John Zulauf37ceaed2020-07-03 16:18:15 -06002764 string_UsageTag(hazard).c_str());
John Zulauf3d84f1b2020-03-09 13:33:25 -06002765 }
2766 }
2767 if (skip) break;
John Zulauf9cb530d2019-09-30 14:14:10 -06002768 }
2769 return skip;
2770}
2771
2772void SyncValidator::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
2773 uint32_t regionCount, const VkBufferCopy *pRegions) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06002774 auto *cb_context = GetAccessContext(commandBuffer);
2775 assert(cb_context);
John Zulauf2b151bf2020-04-24 15:37:44 -06002776 const auto tag = cb_context->NextCommandTag(CMD_COPYBUFFER);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002777 auto *context = cb_context->GetCurrentAccessContext();
2778
John Zulauf9cb530d2019-09-30 14:14:10 -06002779 const auto *src_buffer = Get<BUFFER_STATE>(srcBuffer);
John Zulauf9cb530d2019-09-30 14:14:10 -06002780 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
John Zulauf9cb530d2019-09-30 14:14:10 -06002781
2782 for (uint32_t region = 0; region < regionCount; region++) {
2783 const auto &copy_region = pRegions[region];
John Zulauf16adfc92020-04-08 10:28:33 -06002784 if (src_buffer) {
John Zulauf3e86bf02020-09-12 10:47:57 -06002785 const ResourceAccessRange src_range = MakeRange(*src_buffer, copy_region.srcOffset, copy_region.size);
John Zulauf16adfc92020-04-08 10:28:33 -06002786 context->UpdateAccessState(*src_buffer, SYNC_TRANSFER_TRANSFER_READ, src_range, tag);
John Zulauf9cb530d2019-09-30 14:14:10 -06002787 }
John Zulauf16adfc92020-04-08 10:28:33 -06002788 if (dst_buffer) {
John Zulauf3e86bf02020-09-12 10:47:57 -06002789 const ResourceAccessRange dst_range = MakeRange(*dst_buffer, copy_region.dstOffset, copy_region.size);
John Zulauf16adfc92020-04-08 10:28:33 -06002790 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, dst_range, tag);
John Zulauf5c5e88d2019-12-26 11:22:02 -07002791 }
2792 }
2793}
2794
Jeff Leger178b1e52020-10-05 12:22:23 -04002795bool SyncValidator::PreCallValidateCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer,
2796 const VkCopyBufferInfo2KHR *pCopyBufferInfos) const {
2797 bool skip = false;
2798 const auto *cb_context = GetAccessContext(commandBuffer);
2799 assert(cb_context);
2800 if (!cb_context) return skip;
2801 const auto *context = cb_context->GetCurrentAccessContext();
2802
2803 // If we have no previous accesses, we have no hazards
2804 const auto *src_buffer = Get<BUFFER_STATE>(pCopyBufferInfos->srcBuffer);
2805 const auto *dst_buffer = Get<BUFFER_STATE>(pCopyBufferInfos->dstBuffer);
2806
2807 for (uint32_t region = 0; region < pCopyBufferInfos->regionCount; region++) {
2808 const auto &copy_region = pCopyBufferInfos->pRegions[region];
2809 if (src_buffer) {
2810 const ResourceAccessRange src_range = MakeRange(*src_buffer, copy_region.srcOffset, copy_region.size);
2811 auto hazard = context->DetectHazard(*src_buffer, SYNC_TRANSFER_TRANSFER_READ, src_range);
2812 if (hazard.hazard) {
2813 // TODO -- add tag information to log msg when useful.
2814 skip |= LogError(pCopyBufferInfos->srcBuffer, string_SyncHazardVUID(hazard.hazard),
2815 "vkCmdCopyBuffer2KHR(): Hazard %s for srcBuffer %s, region %" PRIu32 ". Access info %s.",
2816 string_SyncHazard(hazard.hazard), report_data->FormatHandle(pCopyBufferInfos->srcBuffer).c_str(),
2817 region, string_UsageTag(hazard).c_str());
2818 }
2819 }
2820 if (dst_buffer && !skip) {
2821 const ResourceAccessRange dst_range = MakeRange(*dst_buffer, copy_region.dstOffset, copy_region.size);
2822 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, dst_range);
2823 if (hazard.hazard) {
2824 skip |= LogError(pCopyBufferInfos->dstBuffer, string_SyncHazardVUID(hazard.hazard),
2825 "vkCmdCopyBuffer2KHR(): Hazard %s for dstBuffer %s, region %" PRIu32 ". Access info %s.",
2826 string_SyncHazard(hazard.hazard), report_data->FormatHandle(pCopyBufferInfos->dstBuffer).c_str(),
2827 region, string_UsageTag(hazard).c_str());
2828 }
2829 }
2830 if (skip) break;
2831 }
2832 return skip;
2833}
2834
2835void SyncValidator::PreCallRecordCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR *pCopyBufferInfos) {
2836 auto *cb_context = GetAccessContext(commandBuffer);
2837 assert(cb_context);
2838 const auto tag = cb_context->NextCommandTag(CMD_COPYBUFFER2KHR);
2839 auto *context = cb_context->GetCurrentAccessContext();
2840
2841 const auto *src_buffer = Get<BUFFER_STATE>(pCopyBufferInfos->srcBuffer);
2842 const auto *dst_buffer = Get<BUFFER_STATE>(pCopyBufferInfos->dstBuffer);
2843
2844 for (uint32_t region = 0; region < pCopyBufferInfos->regionCount; region++) {
2845 const auto &copy_region = pCopyBufferInfos->pRegions[region];
2846 if (src_buffer) {
2847 const ResourceAccessRange src_range = MakeRange(*src_buffer, copy_region.srcOffset, copy_region.size);
2848 context->UpdateAccessState(*src_buffer, SYNC_TRANSFER_TRANSFER_READ, src_range, tag);
2849 }
2850 if (dst_buffer) {
2851 const ResourceAccessRange dst_range = MakeRange(*dst_buffer, copy_region.dstOffset, copy_region.size);
2852 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, dst_range, tag);
2853 }
2854 }
2855}
2856
John Zulauf5c5e88d2019-12-26 11:22:02 -07002857bool SyncValidator::PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2858 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2859 const VkImageCopy *pRegions) const {
2860 bool skip = false;
John Zulauf3d84f1b2020-03-09 13:33:25 -06002861 const auto *cb_access_context = GetAccessContext(commandBuffer);
2862 assert(cb_access_context);
2863 if (!cb_access_context) return skip;
John Zulauf5c5e88d2019-12-26 11:22:02 -07002864
John Zulauf3d84f1b2020-03-09 13:33:25 -06002865 const auto *context = cb_access_context->GetCurrentAccessContext();
2866 assert(context);
2867 if (!context) return skip;
2868
2869 const auto *src_image = Get<IMAGE_STATE>(srcImage);
2870 const auto *dst_image = Get<IMAGE_STATE>(dstImage);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002871 for (uint32_t region = 0; region < regionCount; region++) {
2872 const auto &copy_region = pRegions[region];
2873 if (src_image) {
John Zulauf540266b2020-04-06 18:54:53 -06002874 auto hazard = context->DetectHazard(*src_image, SYNC_TRANSFER_TRANSFER_READ, copy_region.srcSubresource,
John Zulauf3d84f1b2020-03-09 13:33:25 -06002875 copy_region.srcOffset, copy_region.extent);
2876 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002877 skip |= LogError(srcImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf59e25072020-07-17 10:55:21 -06002878 "vkCmdCopyImage: Hazard %s for srcImage %s, region %" PRIu32 ". Access info %s.",
John Zulauf1dae9192020-06-16 15:46:44 -06002879 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcImage).c_str(), region,
John Zulauf37ceaed2020-07-03 16:18:15 -06002880 string_UsageTag(hazard).c_str());
John Zulauf5c5e88d2019-12-26 11:22:02 -07002881 }
John Zulauf3d84f1b2020-03-09 13:33:25 -06002882 }
2883
2884 if (dst_image) {
locke-lunarg1df1f882020-03-02 16:42:08 -07002885 VkExtent3D dst_copy_extent =
2886 GetAdjustedDestImageExtent(src_image->createInfo.format, dst_image->createInfo.format, copy_region.extent);
John Zulauf540266b2020-04-06 18:54:53 -06002887 auto hazard = context->DetectHazard(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, copy_region.dstSubresource,
locke-lunarg1df1f882020-03-02 16:42:08 -07002888 copy_region.dstOffset, dst_copy_extent);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002889 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002890 skip |= LogError(dstImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf59e25072020-07-17 10:55:21 -06002891 "vkCmdCopyImage: Hazard %s for dstImage %s, region %" PRIu32 ". Access info %s.",
John Zulauf1dae9192020-06-16 15:46:44 -06002892 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstImage).c_str(), region,
John Zulauf37ceaed2020-07-03 16:18:15 -06002893 string_UsageTag(hazard).c_str());
John Zulauf5c5e88d2019-12-26 11:22:02 -07002894 }
locke-lunarg1dbbb9e2020-02-28 22:43:53 -07002895 if (skip) break;
John Zulauf5c5e88d2019-12-26 11:22:02 -07002896 }
2897 }
John Zulauf3d84f1b2020-03-09 13:33:25 -06002898
John Zulauf5c5e88d2019-12-26 11:22:02 -07002899 return skip;
2900}
2901
2902void SyncValidator::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2903 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2904 const VkImageCopy *pRegions) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06002905 auto *cb_access_context = GetAccessContext(commandBuffer);
2906 assert(cb_access_context);
John Zulauf2b151bf2020-04-24 15:37:44 -06002907 const auto tag = cb_access_context->NextCommandTag(CMD_COPYIMAGE);
John Zulauf3d84f1b2020-03-09 13:33:25 -06002908 auto *context = cb_access_context->GetCurrentAccessContext();
2909 assert(context);
2910
John Zulauf5c5e88d2019-12-26 11:22:02 -07002911 auto *src_image = Get<IMAGE_STATE>(srcImage);
John Zulauf5c5e88d2019-12-26 11:22:02 -07002912 auto *dst_image = Get<IMAGE_STATE>(dstImage);
John Zulauf5c5e88d2019-12-26 11:22:02 -07002913
2914 for (uint32_t region = 0; region < regionCount; region++) {
2915 const auto &copy_region = pRegions[region];
John Zulauf3d84f1b2020-03-09 13:33:25 -06002916 if (src_image) {
John Zulauf540266b2020-04-06 18:54:53 -06002917 context->UpdateAccessState(*src_image, SYNC_TRANSFER_TRANSFER_READ, copy_region.srcSubresource, copy_region.srcOffset,
2918 copy_region.extent, tag);
John Zulauf5c5e88d2019-12-26 11:22:02 -07002919 }
John Zulauf3d84f1b2020-03-09 13:33:25 -06002920 if (dst_image) {
locke-lunarg1df1f882020-03-02 16:42:08 -07002921 VkExtent3D dst_copy_extent =
2922 GetAdjustedDestImageExtent(src_image->createInfo.format, dst_image->createInfo.format, copy_region.extent);
John Zulauf540266b2020-04-06 18:54:53 -06002923 context->UpdateAccessState(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, copy_region.dstSubresource, copy_region.dstOffset,
2924 dst_copy_extent, tag);
John Zulauf9cb530d2019-09-30 14:14:10 -06002925 }
2926 }
2927}
2928
Jeff Leger178b1e52020-10-05 12:22:23 -04002929bool SyncValidator::PreCallValidateCmdCopyImage2KHR(VkCommandBuffer commandBuffer,
2930 const VkCopyImageInfo2KHR *pCopyImageInfo) const {
2931 bool skip = false;
2932 const auto *cb_access_context = GetAccessContext(commandBuffer);
2933 assert(cb_access_context);
2934 if (!cb_access_context) return skip;
2935
2936 const auto *context = cb_access_context->GetCurrentAccessContext();
2937 assert(context);
2938 if (!context) return skip;
2939
2940 const auto *src_image = Get<IMAGE_STATE>(pCopyImageInfo->srcImage);
2941 const auto *dst_image = Get<IMAGE_STATE>(pCopyImageInfo->dstImage);
2942 for (uint32_t region = 0; region < pCopyImageInfo->regionCount; region++) {
2943 const auto &copy_region = pCopyImageInfo->pRegions[region];
2944 if (src_image) {
2945 auto hazard = context->DetectHazard(*src_image, SYNC_TRANSFER_TRANSFER_READ, copy_region.srcSubresource,
2946 copy_region.srcOffset, copy_region.extent);
2947 if (hazard.hazard) {
2948 skip |= LogError(pCopyImageInfo->srcImage, string_SyncHazardVUID(hazard.hazard),
2949 "vkCmdCopyImage2KHR: Hazard %s for srcImage %s, region %" PRIu32 ". Access info %s.",
2950 string_SyncHazard(hazard.hazard), report_data->FormatHandle(pCopyImageInfo->srcImage).c_str(),
2951 region, string_UsageTag(hazard).c_str());
2952 }
2953 }
2954
2955 if (dst_image) {
2956 VkExtent3D dst_copy_extent =
2957 GetAdjustedDestImageExtent(src_image->createInfo.format, dst_image->createInfo.format, copy_region.extent);
2958 auto hazard = context->DetectHazard(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, copy_region.dstSubresource,
2959 copy_region.dstOffset, dst_copy_extent);
2960 if (hazard.hazard) {
2961 skip |= LogError(pCopyImageInfo->dstImage, string_SyncHazardVUID(hazard.hazard),
2962 "vkCmdCopyImage2KHR: Hazard %s for dstImage %s, region %" PRIu32 ". Access info %s.",
2963 string_SyncHazard(hazard.hazard), report_data->FormatHandle(pCopyImageInfo->dstImage).c_str(),
2964 region, string_UsageTag(hazard).c_str());
2965 }
2966 if (skip) break;
2967 }
2968 }
2969
2970 return skip;
2971}
2972
2973void SyncValidator::PreCallRecordCmdCopyImage2KHR(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR *pCopyImageInfo) {
2974 auto *cb_access_context = GetAccessContext(commandBuffer);
2975 assert(cb_access_context);
2976 const auto tag = cb_access_context->NextCommandTag(CMD_COPYIMAGE2KHR);
2977 auto *context = cb_access_context->GetCurrentAccessContext();
2978 assert(context);
2979
2980 auto *src_image = Get<IMAGE_STATE>(pCopyImageInfo->srcImage);
2981 auto *dst_image = Get<IMAGE_STATE>(pCopyImageInfo->dstImage);
2982
2983 for (uint32_t region = 0; region < pCopyImageInfo->regionCount; region++) {
2984 const auto &copy_region = pCopyImageInfo->pRegions[region];
2985 if (src_image) {
2986 context->UpdateAccessState(*src_image, SYNC_TRANSFER_TRANSFER_READ, copy_region.srcSubresource, copy_region.srcOffset,
2987 copy_region.extent, tag);
2988 }
2989 if (dst_image) {
2990 VkExtent3D dst_copy_extent =
2991 GetAdjustedDestImageExtent(src_image->createInfo.format, dst_image->createInfo.format, copy_region.extent);
2992 context->UpdateAccessState(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, copy_region.dstSubresource, copy_region.dstOffset,
2993 dst_copy_extent, tag);
2994 }
2995 }
2996}
2997
John Zulauf9cb530d2019-09-30 14:14:10 -06002998bool SyncValidator::PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
2999 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
3000 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3001 uint32_t bufferMemoryBarrierCount,
3002 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3003 uint32_t imageMemoryBarrierCount,
3004 const VkImageMemoryBarrier *pImageMemoryBarriers) const {
3005 bool skip = false;
John Zulauf3d84f1b2020-03-09 13:33:25 -06003006 const auto *cb_access_context = GetAccessContext(commandBuffer);
3007 assert(cb_access_context);
3008 if (!cb_access_context) return skip;
John Zulauf0cb5be22020-01-23 12:18:22 -07003009
John Zulauf3d84f1b2020-03-09 13:33:25 -06003010 const auto *context = cb_access_context->GetCurrentAccessContext();
3011 assert(context);
3012 if (!context) return skip;
John Zulauf0cb5be22020-01-23 12:18:22 -07003013
John Zulauf3d84f1b2020-03-09 13:33:25 -06003014 const auto src_stage_mask = ExpandPipelineStages(cb_access_context->GetQueueFlags(), srcStageMask);
John Zulauf36bcf6a2020-02-03 15:12:52 -07003015 const auto src_exec_scope = WithEarlierPipelineStages(src_stage_mask);
3016 auto src_stage_accesses = AccessScopeByStage(src_stage_mask);
John Zulauf0cb5be22020-01-23 12:18:22 -07003017 // Validate Image Layout transitions
3018 for (uint32_t index = 0; index < imageMemoryBarrierCount; index++) {
3019 const auto &barrier = pImageMemoryBarriers[index];
3020 if (barrier.newLayout == barrier.oldLayout) continue; // Only interested in layout transitions at this point.
3021 const auto *image_state = Get<IMAGE_STATE>(barrier.image);
3022 if (!image_state) continue;
John Zulauf16adfc92020-04-08 10:28:33 -06003023 const auto hazard = context->DetectImageBarrierHazard(*image_state, src_exec_scope, src_stage_accesses, barrier);
John Zulauf0cb5be22020-01-23 12:18:22 -07003024 if (hazard.hazard) {
John Zulauf7635de32020-05-29 17:14:15 -06003025 // PHASE1 TODO -- add tag information to log msg when useful.
locke-lunarga0003652020-03-10 11:38:51 -06003026 skip |= LogError(barrier.image, string_SyncHazardVUID(hazard.hazard),
John Zulauf59e25072020-07-17 10:55:21 -06003027 "vkCmdPipelineBarrier: Hazard %s for image barrier %" PRIu32 " %s. Access info %s.",
John Zulauf1dae9192020-06-16 15:46:44 -06003028 string_SyncHazard(hazard.hazard), index, report_data->FormatHandle(barrier.image).c_str(),
John Zulauf37ceaed2020-07-03 16:18:15 -06003029 string_UsageTag(hazard).c_str());
John Zulauf0cb5be22020-01-23 12:18:22 -07003030 }
3031 }
John Zulauf9cb530d2019-09-30 14:14:10 -06003032
3033 return skip;
3034}
3035
3036void SyncValidator::PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
3037 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
3038 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3039 uint32_t bufferMemoryBarrierCount,
3040 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3041 uint32_t imageMemoryBarrierCount,
3042 const VkImageMemoryBarrier *pImageMemoryBarriers) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06003043 auto *cb_access_context = GetAccessContext(commandBuffer);
3044 assert(cb_access_context);
3045 if (!cb_access_context) return;
John Zulauf2b151bf2020-04-24 15:37:44 -06003046 const auto tag = cb_access_context->NextCommandTag(CMD_PIPELINEBARRIER);
John Zulauf3d84f1b2020-03-09 13:33:25 -06003047 auto access_context = cb_access_context->GetCurrentAccessContext();
3048 assert(access_context);
3049 if (!access_context) return;
John Zulauf9cb530d2019-09-30 14:14:10 -06003050
John Zulauf3d84f1b2020-03-09 13:33:25 -06003051 const auto src_stage_mask = ExpandPipelineStages(cb_access_context->GetQueueFlags(), srcStageMask);
John Zulauf36bcf6a2020-02-03 15:12:52 -07003052 auto src_stage_accesses = AccessScopeByStage(src_stage_mask);
John Zulauf3d84f1b2020-03-09 13:33:25 -06003053 const auto dst_stage_mask = ExpandPipelineStages(cb_access_context->GetQueueFlags(), dstStageMask);
John Zulauf36bcf6a2020-02-03 15:12:52 -07003054 auto dst_stage_accesses = AccessScopeByStage(dst_stage_mask);
3055 const auto src_exec_scope = WithEarlierPipelineStages(src_stage_mask);
3056 const auto dst_exec_scope = WithLaterPipelineStages(dst_stage_mask);
John Zulauf89311b42020-09-29 16:28:47 -06003057
3058 // These two apply barriers one at a time as the are restricted to the resource ranges specified per each barrier,
3059 // but do not update the dependency chain information (but set the "pending" state) // s.t. the order independence
3060 // of the barriers is maintained.
John Zulauf3d84f1b2020-03-09 13:33:25 -06003061 ApplyBufferBarriers(access_context, src_exec_scope, src_stage_accesses, dst_exec_scope, dst_stage_accesses,
3062 bufferMemoryBarrierCount, pBufferMemoryBarriers);
John Zulauf540266b2020-04-06 18:54:53 -06003063 ApplyImageBarriers(access_context, src_exec_scope, src_stage_accesses, dst_exec_scope, dst_stage_accesses,
John Zulauf355e49b2020-04-24 15:11:15 -06003064 imageMemoryBarrierCount, pImageMemoryBarriers, tag);
John Zulauf9cb530d2019-09-30 14:14:10 -06003065
John Zulauf89311b42020-09-29 16:28:47 -06003066 // Apply the global barriers last as is it walks all memory, it can also clean up the "pending" state without requiring an
3067 // additional pass, updating the dependency chains *last* as it goes along.
3068 // This is needed to guarantee order independence of the three lists.
John Zulauf3d84f1b2020-03-09 13:33:25 -06003069 ApplyGlobalBarriers(access_context, src_exec_scope, dst_exec_scope, src_stage_accesses, dst_stage_accesses, memoryBarrierCount,
John Zulauf89311b42020-09-29 16:28:47 -06003070 pMemoryBarriers, tag);
John Zulauf9cb530d2019-09-30 14:14:10 -06003071}
3072
3073void SyncValidator::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
3074 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result) {
3075 // The state tracker sets up the device state
3076 StateTracker::PostCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, result);
3077
John Zulauf5f13a792020-03-10 07:31:21 -06003078 // Add the callback hooks for the functions that are either broadly or deeply used and that the ValidationStateTracker
3079 // refactor would be messier without.
John Zulauf9cb530d2019-09-30 14:14:10 -06003080 // TODO: Find a good way to do this hooklessly.
3081 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
3082 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, LayerObjectTypeSyncValidation);
3083 SyncValidator *sync_device_state = static_cast<SyncValidator *>(validation_data);
3084
John Zulaufd1f85d42020-04-15 12:23:15 -06003085 sync_device_state->SetCommandBufferResetCallback([sync_device_state](VkCommandBuffer command_buffer) -> void {
3086 sync_device_state->ResetCommandBufferCallback(command_buffer);
3087 });
3088 sync_device_state->SetCommandBufferFreeCallback([sync_device_state](VkCommandBuffer command_buffer) -> void {
3089 sync_device_state->FreeCommandBufferCallback(command_buffer);
3090 });
John Zulauf9cb530d2019-09-30 14:14:10 -06003091}
John Zulauf3d84f1b2020-03-09 13:33:25 -06003092
John Zulauf355e49b2020-04-24 15:11:15 -06003093bool SyncValidator::ValidateBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08003094 const VkSubpassBeginInfo *pSubpassBeginInfo, const char *func_name) const {
John Zulauf355e49b2020-04-24 15:11:15 -06003095 bool skip = false;
3096 const auto rp_state = Get<RENDER_PASS_STATE>(pRenderPassBegin->renderPass);
3097 auto cb_context = GetAccessContext(commandBuffer);
3098
3099 if (rp_state && cb_context) {
3100 skip |= cb_context->ValidateBeginRenderPass(*rp_state, pRenderPassBegin, pSubpassBeginInfo, func_name);
3101 }
3102
3103 return skip;
3104}
3105
3106bool SyncValidator::PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
3107 VkSubpassContents contents) const {
3108 bool skip = StateTracker::PreCallValidateCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
3109 auto subpass_begin_info = lvl_init_struct<VkSubpassBeginInfo>();
3110 subpass_begin_info.contents = contents;
3111 skip |= ValidateBeginRenderPass(commandBuffer, pRenderPassBegin, &subpass_begin_info, "vkCmdBeginRenderPass");
3112 return skip;
3113}
3114
3115bool SyncValidator::PreCallValidateCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08003116 const VkSubpassBeginInfo *pSubpassBeginInfo) const {
John Zulauf355e49b2020-04-24 15:11:15 -06003117 bool skip = StateTracker::PreCallValidateCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
3118 skip |= ValidateBeginRenderPass(commandBuffer, pRenderPassBegin, pSubpassBeginInfo, "vkCmdBeginRenderPass2");
3119 return skip;
3120}
3121
3122bool SyncValidator::PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
3123 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08003124 const VkSubpassBeginInfo *pSubpassBeginInfo) const {
John Zulauf355e49b2020-04-24 15:11:15 -06003125 bool skip = StateTracker::PreCallValidateCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
3126 skip |= ValidateBeginRenderPass(commandBuffer, pRenderPassBegin, pSubpassBeginInfo, "vkCmdBeginRenderPass2KHR");
3127 return skip;
3128}
3129
John Zulauf3d84f1b2020-03-09 13:33:25 -06003130void SyncValidator::PostCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo,
3131 VkResult result) {
3132 // The state tracker sets up the command buffer state
3133 StateTracker::PostCallRecordBeginCommandBuffer(commandBuffer, pBeginInfo, result);
3134
3135 // Create/initialize the structure that trackers accesses at the command buffer scope.
3136 auto cb_access_context = GetAccessContext(commandBuffer);
3137 assert(cb_access_context);
3138 cb_access_context->Reset();
3139}
3140
3141void SyncValidator::RecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
John Zulauf355e49b2020-04-24 15:11:15 -06003142 const VkSubpassBeginInfo *pSubpassBeginInfo, CMD_TYPE command) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06003143 auto cb_context = GetAccessContext(commandBuffer);
John Zulauf355e49b2020-04-24 15:11:15 -06003144 if (cb_context) {
3145 cb_context->RecordBeginRenderPass(cb_context->NextCommandTag(command));
John Zulauf3d84f1b2020-03-09 13:33:25 -06003146 }
3147}
3148
3149void SyncValidator::PostCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
3150 VkSubpassContents contents) {
3151 StateTracker::PostCallRecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
3152 auto subpass_begin_info = lvl_init_struct<VkSubpassBeginInfo>();
3153 subpass_begin_info.contents = contents;
John Zulauf355e49b2020-04-24 15:11:15 -06003154 RecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, &subpass_begin_info, CMD_BEGINRENDERPASS);
John Zulauf3d84f1b2020-03-09 13:33:25 -06003155}
3156
3157void SyncValidator::PostCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
3158 const VkSubpassBeginInfo *pSubpassBeginInfo) {
3159 StateTracker::PostCallRecordCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06003160 RecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, pSubpassBeginInfo, CMD_BEGINRENDERPASS2);
John Zulauf3d84f1b2020-03-09 13:33:25 -06003161}
3162
3163void SyncValidator::PostCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
3164 const VkRenderPassBeginInfo *pRenderPassBegin,
3165 const VkSubpassBeginInfo *pSubpassBeginInfo) {
3166 StateTracker::PostCallRecordCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06003167 RecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, pSubpassBeginInfo, CMD_BEGINRENDERPASS2);
3168}
3169
Mike Schuchardt2df08912020-12-15 16:28:09 -08003170bool SyncValidator::ValidateCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
3171 const VkSubpassEndInfo *pSubpassEndInfo, const char *func_name) const {
John Zulauf355e49b2020-04-24 15:11:15 -06003172 bool skip = false;
3173
3174 auto cb_context = GetAccessContext(commandBuffer);
3175 assert(cb_context);
3176 auto cb_state = cb_context->GetCommandBufferState();
3177 if (!cb_state) return skip;
3178
3179 auto rp_state = cb_state->activeRenderPass;
3180 if (!rp_state) return skip;
3181
3182 skip |= cb_context->ValidateNextSubpass(func_name);
3183
3184 return skip;
3185}
3186
3187bool SyncValidator::PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const {
3188 bool skip = StateTracker::PreCallValidateCmdNextSubpass(commandBuffer, contents);
3189 auto subpass_begin_info = lvl_init_struct<VkSubpassBeginInfo>();
3190 subpass_begin_info.contents = contents;
3191 skip |= ValidateCmdNextSubpass(commandBuffer, &subpass_begin_info, nullptr, "vkCmdNextSubpass");
3192 return skip;
3193}
3194
Mike Schuchardt2df08912020-12-15 16:28:09 -08003195bool SyncValidator::PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
3196 const VkSubpassEndInfo *pSubpassEndInfo) const {
John Zulauf355e49b2020-04-24 15:11:15 -06003197 bool skip = StateTracker::PreCallValidateCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
3198 skip |= ValidateCmdNextSubpass(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo, "vkCmdNextSubpass2KHR");
3199 return skip;
3200}
3201
3202bool SyncValidator::PreCallValidateCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
3203 const VkSubpassEndInfo *pSubpassEndInfo) const {
3204 bool skip = StateTracker::PreCallValidateCmdNextSubpass2(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
3205 skip |= ValidateCmdNextSubpass(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo, "vkCmdNextSubpass2");
3206 return skip;
John Zulauf3d84f1b2020-03-09 13:33:25 -06003207}
3208
3209void SyncValidator::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
John Zulauf355e49b2020-04-24 15:11:15 -06003210 const VkSubpassEndInfo *pSubpassEndInfo, CMD_TYPE command) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06003211 auto cb_context = GetAccessContext(commandBuffer);
3212 assert(cb_context);
3213 auto cb_state = cb_context->GetCommandBufferState();
3214 if (!cb_state) return;
3215
3216 auto rp_state = cb_state->activeRenderPass;
3217 if (!rp_state) return;
3218
John Zulauf355e49b2020-04-24 15:11:15 -06003219 cb_context->RecordNextSubpass(*rp_state, cb_context->NextCommandTag(command));
John Zulauf3d84f1b2020-03-09 13:33:25 -06003220}
3221
3222void SyncValidator::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3223 StateTracker::PostCallRecordCmdNextSubpass(commandBuffer, contents);
3224 auto subpass_begin_info = lvl_init_struct<VkSubpassBeginInfo>();
3225 subpass_begin_info.contents = contents;
John Zulauf355e49b2020-04-24 15:11:15 -06003226 RecordCmdNextSubpass(commandBuffer, &subpass_begin_info, nullptr, CMD_NEXTSUBPASS);
John Zulauf3d84f1b2020-03-09 13:33:25 -06003227}
3228
3229void SyncValidator::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
3230 const VkSubpassEndInfo *pSubpassEndInfo) {
3231 StateTracker::PostCallRecordCmdNextSubpass2(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06003232 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo, CMD_NEXTSUBPASS2);
John Zulauf3d84f1b2020-03-09 13:33:25 -06003233}
3234
3235void SyncValidator::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
3236 const VkSubpassEndInfo *pSubpassEndInfo) {
3237 StateTracker::PostCallRecordCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06003238 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo, CMD_NEXTSUBPASS2);
John Zulauf3d84f1b2020-03-09 13:33:25 -06003239}
3240
Mike Schuchardt2df08912020-12-15 16:28:09 -08003241bool SyncValidator::ValidateCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo,
John Zulauf355e49b2020-04-24 15:11:15 -06003242 const char *func_name) const {
3243 bool skip = false;
3244
3245 auto cb_context = GetAccessContext(commandBuffer);
3246 assert(cb_context);
3247 auto cb_state = cb_context->GetCommandBufferState();
3248 if (!cb_state) return skip;
3249
3250 auto rp_state = cb_state->activeRenderPass;
3251 if (!rp_state) return skip;
3252
3253 skip |= cb_context->ValidateEndRenderpass(func_name);
3254 return skip;
3255}
3256
3257bool SyncValidator::PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) const {
3258 bool skip = StateTracker::PreCallValidateCmdEndRenderPass(commandBuffer);
3259 skip |= ValidateCmdEndRenderPass(commandBuffer, nullptr, "vkEndRenderPass");
3260 return skip;
3261}
3262
Mike Schuchardt2df08912020-12-15 16:28:09 -08003263bool SyncValidator::PreCallValidateCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) const {
John Zulauf355e49b2020-04-24 15:11:15 -06003264 bool skip = StateTracker::PreCallValidateCmdEndRenderPass2(commandBuffer, pSubpassEndInfo);
3265 skip |= ValidateCmdEndRenderPass(commandBuffer, pSubpassEndInfo, "vkEndRenderPass2");
3266 return skip;
3267}
3268
3269bool SyncValidator::PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08003270 const VkSubpassEndInfo *pSubpassEndInfo) const {
John Zulauf355e49b2020-04-24 15:11:15 -06003271 bool skip = StateTracker::PreCallValidateCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
3272 skip |= ValidateCmdEndRenderPass(commandBuffer, pSubpassEndInfo, "vkEndRenderPass2KHR");
3273 return skip;
3274}
3275
3276void SyncValidator::RecordCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo,
3277 CMD_TYPE command) {
John Zulaufe5da6e52020-03-18 15:32:18 -06003278 // Resolve the all subpass contexts to the command buffer contexts
3279 auto cb_context = GetAccessContext(commandBuffer);
3280 assert(cb_context);
3281 auto cb_state = cb_context->GetCommandBufferState();
3282 if (!cb_state) return;
3283
locke-lunargaecf2152020-05-12 17:15:41 -06003284 const auto *rp_state = cb_state->activeRenderPass.get();
John Zulaufe5da6e52020-03-18 15:32:18 -06003285 if (!rp_state) return;
3286
John Zulauf355e49b2020-04-24 15:11:15 -06003287 cb_context->RecordEndRenderPass(*rp_state, cb_context->NextCommandTag(command));
John Zulaufe5da6e52020-03-18 15:32:18 -06003288}
John Zulauf3d84f1b2020-03-09 13:33:25 -06003289
John Zulauf33fc1d52020-07-17 11:01:10 -06003290// Simple heuristic rule to detect WAW operations representing algorithmically safe or increment
3291// updates to a resource which do not conflict at the byte level.
3292// TODO: Revisit this rule to see if it needs to be tighter or looser
3293// TODO: Add programatic control over suppression heuristics
3294bool SyncValidator::SupressedBoundDescriptorWAW(const HazardResult &hazard) const {
3295 return (hazard.hazard == WRITE_AFTER_WRITE) && (FlagBit(hazard.usage_index) == hazard.prior_access);
3296}
3297
John Zulauf3d84f1b2020-03-09 13:33:25 -06003298void SyncValidator::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
John Zulauf355e49b2020-04-24 15:11:15 -06003299 RecordCmdEndRenderPass(commandBuffer, nullptr, CMD_ENDRENDERPASS);
John Zulauf5a1a5382020-06-22 17:23:25 -06003300 StateTracker::PostCallRecordCmdEndRenderPass(commandBuffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -06003301}
3302
3303void SyncValidator::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) {
John Zulauf355e49b2020-04-24 15:11:15 -06003304 RecordCmdEndRenderPass(commandBuffer, pSubpassEndInfo, CMD_ENDRENDERPASS2);
John Zulauf5a1a5382020-06-22 17:23:25 -06003305 StateTracker::PostCallRecordCmdEndRenderPass2(commandBuffer, pSubpassEndInfo);
John Zulauf3d84f1b2020-03-09 13:33:25 -06003306}
3307
3308void SyncValidator::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) {
John Zulauf355e49b2020-04-24 15:11:15 -06003309 RecordCmdEndRenderPass(commandBuffer, pSubpassEndInfo, CMD_ENDRENDERPASS2);
John Zulauf5a1a5382020-06-22 17:23:25 -06003310 StateTracker::PostCallRecordCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
John Zulauf3d84f1b2020-03-09 13:33:25 -06003311}
locke-lunarga19c71d2020-03-02 18:17:04 -07003312
Jeff Leger178b1e52020-10-05 12:22:23 -04003313template <typename BufferImageCopyRegionType>
3314bool SyncValidator::ValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
3315 VkImageLayout dstImageLayout, uint32_t regionCount,
3316 const BufferImageCopyRegionType *pRegions, CopyCommandVersion version) const {
locke-lunarga19c71d2020-03-02 18:17:04 -07003317 bool skip = false;
3318 const auto *cb_access_context = GetAccessContext(commandBuffer);
3319 assert(cb_access_context);
3320 if (!cb_access_context) return skip;
3321
Jeff Leger178b1e52020-10-05 12:22:23 -04003322 const bool is_2khr = (version == COPY_COMMAND_VERSION_2);
3323 const char *func_name = is_2khr ? "vkCmdCopyBufferToImage2KHR()" : "vkCmdCopyBufferToImage()";
3324
locke-lunarga19c71d2020-03-02 18:17:04 -07003325 const auto *context = cb_access_context->GetCurrentAccessContext();
3326 assert(context);
3327 if (!context) return skip;
3328
3329 const auto *src_buffer = Get<BUFFER_STATE>(srcBuffer);
locke-lunarga19c71d2020-03-02 18:17:04 -07003330 const auto *dst_image = Get<IMAGE_STATE>(dstImage);
3331
3332 for (uint32_t region = 0; region < regionCount; region++) {
3333 const auto &copy_region = pRegions[region];
John Zulauf16adfc92020-04-08 10:28:33 -06003334 if (src_buffer) {
John Zulauf355e49b2020-04-24 15:11:15 -06003335 ResourceAccessRange src_range =
3336 MakeRange(copy_region.bufferOffset, GetBufferSizeFromCopyImage(copy_region, dst_image->createInfo.format));
John Zulauf16adfc92020-04-08 10:28:33 -06003337 auto hazard = context->DetectHazard(*src_buffer, SYNC_TRANSFER_TRANSFER_READ, src_range);
locke-lunarga19c71d2020-03-02 18:17:04 -07003338 if (hazard.hazard) {
John Zulauf7635de32020-05-29 17:14:15 -06003339 // PHASE1 TODO -- add tag information to log msg when useful.
locke-lunarga0003652020-03-10 11:38:51 -06003340 skip |= LogError(srcBuffer, string_SyncHazardVUID(hazard.hazard),
Jeff Leger178b1e52020-10-05 12:22:23 -04003341 "%s: Hazard %s for srcBuffer %s, region %" PRIu32 ". Access info %s.", func_name,
John Zulauf1dae9192020-06-16 15:46:44 -06003342 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcBuffer).c_str(), region,
John Zulauf37ceaed2020-07-03 16:18:15 -06003343 string_UsageTag(hazard).c_str());
locke-lunarga19c71d2020-03-02 18:17:04 -07003344 }
3345 }
3346 if (dst_image) {
John Zulauf540266b2020-04-06 18:54:53 -06003347 auto hazard = context->DetectHazard(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, copy_region.imageSubresource,
locke-lunarga19c71d2020-03-02 18:17:04 -07003348 copy_region.imageOffset, copy_region.imageExtent);
3349 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06003350 skip |= LogError(dstImage, string_SyncHazardVUID(hazard.hazard),
Jeff Leger178b1e52020-10-05 12:22:23 -04003351 "%s: Hazard %s for dstImage %s, region %" PRIu32 ". Access info %s.", func_name,
John Zulauf1dae9192020-06-16 15:46:44 -06003352 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstImage).c_str(), region,
John Zulauf37ceaed2020-07-03 16:18:15 -06003353 string_UsageTag(hazard).c_str());
locke-lunarga19c71d2020-03-02 18:17:04 -07003354 }
3355 if (skip) break;
3356 }
3357 if (skip) break;
3358 }
3359 return skip;
3360}
3361
Jeff Leger178b1e52020-10-05 12:22:23 -04003362bool SyncValidator::PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
3363 VkImageLayout dstImageLayout, uint32_t regionCount,
3364 const VkBufferImageCopy *pRegions) const {
3365 return ValidateCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions,
3366 COPY_COMMAND_VERSION_1);
3367}
3368
3369bool SyncValidator::PreCallValidateCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
3370 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) const {
3371 return ValidateCmdCopyBufferToImage(commandBuffer, pCopyBufferToImageInfo->srcBuffer, pCopyBufferToImageInfo->dstImage,
3372 pCopyBufferToImageInfo->dstImageLayout, pCopyBufferToImageInfo->regionCount,
3373 pCopyBufferToImageInfo->pRegions, COPY_COMMAND_VERSION_2);
3374}
3375
3376template <typename BufferImageCopyRegionType>
3377void SyncValidator::RecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
3378 VkImageLayout dstImageLayout, uint32_t regionCount,
3379 const BufferImageCopyRegionType *pRegions, CopyCommandVersion version) {
locke-lunarga19c71d2020-03-02 18:17:04 -07003380 auto *cb_access_context = GetAccessContext(commandBuffer);
3381 assert(cb_access_context);
Jeff Leger178b1e52020-10-05 12:22:23 -04003382
3383 const bool is_2khr = (version == COPY_COMMAND_VERSION_2);
3384 const CMD_TYPE cmd_type = is_2khr ? CMD_COPYBUFFERTOIMAGE2KHR : CMD_COPYBUFFERTOIMAGE;
3385
3386 const auto tag = cb_access_context->NextCommandTag(cmd_type);
locke-lunarga19c71d2020-03-02 18:17:04 -07003387 auto *context = cb_access_context->GetCurrentAccessContext();
3388 assert(context);
3389
3390 const auto *src_buffer = Get<BUFFER_STATE>(srcBuffer);
John Zulauf16adfc92020-04-08 10:28:33 -06003391 const auto *dst_image = Get<IMAGE_STATE>(dstImage);
locke-lunarga19c71d2020-03-02 18:17:04 -07003392
3393 for (uint32_t region = 0; region < regionCount; region++) {
3394 const auto &copy_region = pRegions[region];
3395 if (src_buffer) {
John Zulauf355e49b2020-04-24 15:11:15 -06003396 ResourceAccessRange src_range =
3397 MakeRange(copy_region.bufferOffset, GetBufferSizeFromCopyImage(copy_region, dst_image->createInfo.format));
John Zulauf16adfc92020-04-08 10:28:33 -06003398 context->UpdateAccessState(*src_buffer, SYNC_TRANSFER_TRANSFER_READ, src_range, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07003399 }
3400 if (dst_image) {
John Zulauf540266b2020-04-06 18:54:53 -06003401 context->UpdateAccessState(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, copy_region.imageSubresource,
John Zulauf5f13a792020-03-10 07:31:21 -06003402 copy_region.imageOffset, copy_region.imageExtent, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07003403 }
3404 }
3405}
3406
Jeff Leger178b1e52020-10-05 12:22:23 -04003407void SyncValidator::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
3408 VkImageLayout dstImageLayout, uint32_t regionCount,
3409 const VkBufferImageCopy *pRegions) {
3410 StateTracker::PreCallRecordCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
3411 RecordCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions, COPY_COMMAND_VERSION_1);
3412}
3413
3414void SyncValidator::PreCallRecordCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
3415 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) {
3416 StateTracker::PreCallRecordCmdCopyBufferToImage2KHR(commandBuffer, pCopyBufferToImageInfo);
3417 RecordCmdCopyBufferToImage(commandBuffer, pCopyBufferToImageInfo->srcBuffer, pCopyBufferToImageInfo->dstImage,
3418 pCopyBufferToImageInfo->dstImageLayout, pCopyBufferToImageInfo->regionCount,
3419 pCopyBufferToImageInfo->pRegions, COPY_COMMAND_VERSION_2);
3420}
3421
3422template <typename BufferImageCopyRegionType>
3423bool SyncValidator::ValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
3424 VkBuffer dstBuffer, uint32_t regionCount,
3425 const BufferImageCopyRegionType *pRegions, CopyCommandVersion version) const {
locke-lunarga19c71d2020-03-02 18:17:04 -07003426 bool skip = false;
3427 const auto *cb_access_context = GetAccessContext(commandBuffer);
3428 assert(cb_access_context);
3429 if (!cb_access_context) return skip;
3430
Jeff Leger178b1e52020-10-05 12:22:23 -04003431 const bool is_2khr = (version == COPY_COMMAND_VERSION_2);
3432 const char *func_name = is_2khr ? "vkCmdCopyImageToBuffer2KHR()" : "vkCmdCopyImageToBuffer()";
3433
locke-lunarga19c71d2020-03-02 18:17:04 -07003434 const auto *context = cb_access_context->GetCurrentAccessContext();
3435 assert(context);
3436 if (!context) return skip;
3437
3438 const auto *src_image = Get<IMAGE_STATE>(srcImage);
3439 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
3440 const auto dst_mem = (dst_buffer && !dst_buffer->sparse) ? dst_buffer->binding.mem_state->mem : VK_NULL_HANDLE;
3441 for (uint32_t region = 0; region < regionCount; region++) {
3442 const auto &copy_region = pRegions[region];
3443 if (src_image) {
John Zulauf540266b2020-04-06 18:54:53 -06003444 auto hazard = context->DetectHazard(*src_image, SYNC_TRANSFER_TRANSFER_READ, copy_region.imageSubresource,
locke-lunarga19c71d2020-03-02 18:17:04 -07003445 copy_region.imageOffset, copy_region.imageExtent);
3446 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06003447 skip |= LogError(srcImage, string_SyncHazardVUID(hazard.hazard),
Jeff Leger178b1e52020-10-05 12:22:23 -04003448 "%s: Hazard %s for srcImage %s, region %" PRIu32 ". Access info %s.", func_name,
John Zulauf1dae9192020-06-16 15:46:44 -06003449 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcImage).c_str(), region,
John Zulauf37ceaed2020-07-03 16:18:15 -06003450 string_UsageTag(hazard).c_str());
locke-lunarga19c71d2020-03-02 18:17:04 -07003451 }
3452 }
3453 if (dst_mem) {
John Zulauf355e49b2020-04-24 15:11:15 -06003454 ResourceAccessRange dst_range =
3455 MakeRange(copy_region.bufferOffset, GetBufferSizeFromCopyImage(copy_region, src_image->createInfo.format));
John Zulauf16adfc92020-04-08 10:28:33 -06003456 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, dst_range);
locke-lunarga19c71d2020-03-02 18:17:04 -07003457 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06003458 skip |= LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
Jeff Leger178b1e52020-10-05 12:22:23 -04003459 "%s: Hazard %s for dstBuffer %s, region %" PRIu32 ". Access info %s.", func_name,
John Zulauf1dae9192020-06-16 15:46:44 -06003460 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstBuffer).c_str(), region,
John Zulauf37ceaed2020-07-03 16:18:15 -06003461 string_UsageTag(hazard).c_str());
locke-lunarga19c71d2020-03-02 18:17:04 -07003462 }
3463 }
3464 if (skip) break;
3465 }
3466 return skip;
3467}
3468
Jeff Leger178b1e52020-10-05 12:22:23 -04003469bool SyncValidator::PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
3470 VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount,
3471 const VkBufferImageCopy *pRegions) const {
3472 return ValidateCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions,
3473 COPY_COMMAND_VERSION_1);
3474}
3475
3476bool SyncValidator::PreCallValidateCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
3477 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) const {
3478 return ValidateCmdCopyImageToBuffer(commandBuffer, pCopyImageToBufferInfo->srcImage, pCopyImageToBufferInfo->srcImageLayout,
3479 pCopyImageToBufferInfo->dstBuffer, pCopyImageToBufferInfo->regionCount,
3480 pCopyImageToBufferInfo->pRegions, COPY_COMMAND_VERSION_2);
3481}
3482
3483template <typename BufferImageCopyRegionType>
3484void SyncValidator::RecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
3485 VkBuffer dstBuffer, uint32_t regionCount, const BufferImageCopyRegionType *pRegions,
3486 CopyCommandVersion version) {
locke-lunarga19c71d2020-03-02 18:17:04 -07003487 auto *cb_access_context = GetAccessContext(commandBuffer);
3488 assert(cb_access_context);
Jeff Leger178b1e52020-10-05 12:22:23 -04003489
3490 const bool is_2khr = (version == COPY_COMMAND_VERSION_2);
3491 const CMD_TYPE cmd_type = is_2khr ? CMD_COPYIMAGETOBUFFER2KHR : CMD_COPYIMAGETOBUFFER;
3492
3493 const auto tag = cb_access_context->NextCommandTag(cmd_type);
locke-lunarga19c71d2020-03-02 18:17:04 -07003494 auto *context = cb_access_context->GetCurrentAccessContext();
3495 assert(context);
3496
3497 const auto *src_image = Get<IMAGE_STATE>(srcImage);
locke-lunarga19c71d2020-03-02 18:17:04 -07003498 auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
3499 const auto dst_mem = (dst_buffer && !dst_buffer->sparse) ? dst_buffer->binding.mem_state->mem : VK_NULL_HANDLE;
John Zulauf5f13a792020-03-10 07:31:21 -06003500 const VulkanTypedHandle dst_handle(dst_mem, kVulkanObjectTypeDeviceMemory);
locke-lunarga19c71d2020-03-02 18:17:04 -07003501
3502 for (uint32_t region = 0; region < regionCount; region++) {
3503 const auto &copy_region = pRegions[region];
3504 if (src_image) {
John Zulauf540266b2020-04-06 18:54:53 -06003505 context->UpdateAccessState(*src_image, SYNC_TRANSFER_TRANSFER_READ, copy_region.imageSubresource,
John Zulauf5f13a792020-03-10 07:31:21 -06003506 copy_region.imageOffset, copy_region.imageExtent, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07003507 }
3508 if (dst_buffer) {
John Zulauf355e49b2020-04-24 15:11:15 -06003509 ResourceAccessRange dst_range =
3510 MakeRange(copy_region.bufferOffset, GetBufferSizeFromCopyImage(copy_region, src_image->createInfo.format));
John Zulauf16adfc92020-04-08 10:28:33 -06003511 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, dst_range, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07003512 }
3513 }
3514}
3515
Jeff Leger178b1e52020-10-05 12:22:23 -04003516void SyncValidator::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
3517 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) {
3518 StateTracker::PreCallRecordCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
3519 RecordCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions, COPY_COMMAND_VERSION_1);
3520}
3521
3522void SyncValidator::PreCallRecordCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
3523 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) {
3524 StateTracker::PreCallRecordCmdCopyImageToBuffer2KHR(commandBuffer, pCopyImageToBufferInfo);
3525 RecordCmdCopyImageToBuffer(commandBuffer, pCopyImageToBufferInfo->srcImage, pCopyImageToBufferInfo->srcImageLayout,
3526 pCopyImageToBufferInfo->dstBuffer, pCopyImageToBufferInfo->regionCount,
3527 pCopyImageToBufferInfo->pRegions, COPY_COMMAND_VERSION_2);
3528}
3529
3530template <typename RegionType>
3531bool SyncValidator::ValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
3532 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
3533 const RegionType *pRegions, VkFilter filter, const char *apiName) const {
locke-lunarga19c71d2020-03-02 18:17:04 -07003534 bool skip = false;
3535 const auto *cb_access_context = GetAccessContext(commandBuffer);
3536 assert(cb_access_context);
3537 if (!cb_access_context) return skip;
3538
3539 const auto *context = cb_access_context->GetCurrentAccessContext();
3540 assert(context);
3541 if (!context) return skip;
3542
3543 const auto *src_image = Get<IMAGE_STATE>(srcImage);
3544 const auto *dst_image = Get<IMAGE_STATE>(dstImage);
3545
3546 for (uint32_t region = 0; region < regionCount; region++) {
3547 const auto &blit_region = pRegions[region];
3548 if (src_image) {
locke-lunarg8f93acc2020-06-18 21:26:46 -06003549 VkOffset3D offset = {std::min(blit_region.srcOffsets[0].x, blit_region.srcOffsets[1].x),
3550 std::min(blit_region.srcOffsets[0].y, blit_region.srcOffsets[1].y),
3551 std::min(blit_region.srcOffsets[0].z, blit_region.srcOffsets[1].z)};
3552 VkExtent3D extent = {static_cast<uint32_t>(abs(blit_region.srcOffsets[1].x - blit_region.srcOffsets[0].x)),
3553 static_cast<uint32_t>(abs(blit_region.srcOffsets[1].y - blit_region.srcOffsets[0].y)),
3554 static_cast<uint32_t>(abs(blit_region.srcOffsets[1].z - blit_region.srcOffsets[0].z))};
3555 auto hazard =
3556 context->DetectHazard(*src_image, SYNC_TRANSFER_TRANSFER_READ, blit_region.srcSubresource, offset, extent);
locke-lunarga19c71d2020-03-02 18:17:04 -07003557 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06003558 skip |= LogError(srcImage, string_SyncHazardVUID(hazard.hazard),
Jeff Leger178b1e52020-10-05 12:22:23 -04003559 "%s: Hazard %s for srcImage %s, region %" PRIu32 ". Access info %s.", apiName,
John Zulauf1dae9192020-06-16 15:46:44 -06003560 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcImage).c_str(), region,
John Zulauf37ceaed2020-07-03 16:18:15 -06003561 string_UsageTag(hazard).c_str());
locke-lunarga19c71d2020-03-02 18:17:04 -07003562 }
3563 }
3564
3565 if (dst_image) {
locke-lunarg8f93acc2020-06-18 21:26:46 -06003566 VkOffset3D offset = {std::min(blit_region.dstOffsets[0].x, blit_region.dstOffsets[1].x),
3567 std::min(blit_region.dstOffsets[0].y, blit_region.dstOffsets[1].y),
3568 std::min(blit_region.dstOffsets[0].z, blit_region.dstOffsets[1].z)};
3569 VkExtent3D extent = {static_cast<uint32_t>(abs(blit_region.dstOffsets[1].x - blit_region.dstOffsets[0].x)),
3570 static_cast<uint32_t>(abs(blit_region.dstOffsets[1].y - blit_region.dstOffsets[0].y)),
3571 static_cast<uint32_t>(abs(blit_region.dstOffsets[1].z - blit_region.dstOffsets[0].z))};
3572 auto hazard =
3573 context->DetectHazard(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, blit_region.dstSubresource, offset, extent);
locke-lunarga19c71d2020-03-02 18:17:04 -07003574 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06003575 skip |= LogError(dstImage, string_SyncHazardVUID(hazard.hazard),
Jeff Leger178b1e52020-10-05 12:22:23 -04003576 "%s: Hazard %s for dstImage %s, region %" PRIu32 ". Access info %s.", apiName,
John Zulauf1dae9192020-06-16 15:46:44 -06003577 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstImage).c_str(), region,
John Zulauf37ceaed2020-07-03 16:18:15 -06003578 string_UsageTag(hazard).c_str());
locke-lunarga19c71d2020-03-02 18:17:04 -07003579 }
3580 if (skip) break;
3581 }
3582 }
3583
3584 return skip;
3585}
3586
Jeff Leger178b1e52020-10-05 12:22:23 -04003587bool SyncValidator::PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
3588 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
3589 const VkImageBlit *pRegions, VkFilter filter) const {
3590 return ValidateCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter,
3591 "vkCmdBlitImage");
3592}
3593
3594bool SyncValidator::PreCallValidateCmdBlitImage2KHR(VkCommandBuffer commandBuffer,
3595 const VkBlitImageInfo2KHR *pBlitImageInfo) const {
3596 return ValidateCmdBlitImage(commandBuffer, pBlitImageInfo->srcImage, pBlitImageInfo->srcImageLayout, pBlitImageInfo->dstImage,
3597 pBlitImageInfo->dstImageLayout, pBlitImageInfo->regionCount, pBlitImageInfo->pRegions,
3598 pBlitImageInfo->filter, "vkCmdBlitImage2KHR");
3599}
3600
3601template <typename RegionType>
3602void SyncValidator::RecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
3603 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
3604 const RegionType *pRegions, VkFilter filter, ResourceUsageTag tag) {
locke-lunarga19c71d2020-03-02 18:17:04 -07003605 auto *cb_access_context = GetAccessContext(commandBuffer);
3606 assert(cb_access_context);
3607 auto *context = cb_access_context->GetCurrentAccessContext();
3608 assert(context);
3609
3610 auto *src_image = Get<IMAGE_STATE>(srcImage);
locke-lunarga19c71d2020-03-02 18:17:04 -07003611 auto *dst_image = Get<IMAGE_STATE>(dstImage);
locke-lunarga19c71d2020-03-02 18:17:04 -07003612
3613 for (uint32_t region = 0; region < regionCount; region++) {
3614 const auto &blit_region = pRegions[region];
3615 if (src_image) {
locke-lunarg8f93acc2020-06-18 21:26:46 -06003616 VkOffset3D offset = {std::min(blit_region.srcOffsets[0].x, blit_region.srcOffsets[1].x),
3617 std::min(blit_region.srcOffsets[0].y, blit_region.srcOffsets[1].y),
3618 std::min(blit_region.srcOffsets[0].z, blit_region.srcOffsets[1].z)};
3619 VkExtent3D extent = {static_cast<uint32_t>(abs(blit_region.srcOffsets[1].x - blit_region.srcOffsets[0].x)),
3620 static_cast<uint32_t>(abs(blit_region.srcOffsets[1].y - blit_region.srcOffsets[0].y)),
3621 static_cast<uint32_t>(abs(blit_region.srcOffsets[1].z - blit_region.srcOffsets[0].z))};
3622 context->UpdateAccessState(*src_image, SYNC_TRANSFER_TRANSFER_READ, blit_region.srcSubresource, offset, extent, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07003623 }
3624 if (dst_image) {
locke-lunarg8f93acc2020-06-18 21:26:46 -06003625 VkOffset3D offset = {std::min(blit_region.dstOffsets[0].x, blit_region.dstOffsets[1].x),
3626 std::min(blit_region.dstOffsets[0].y, blit_region.dstOffsets[1].y),
3627 std::min(blit_region.dstOffsets[0].z, blit_region.dstOffsets[1].z)};
3628 VkExtent3D extent = {static_cast<uint32_t>(abs(blit_region.dstOffsets[1].x - blit_region.dstOffsets[0].x)),
3629 static_cast<uint32_t>(abs(blit_region.dstOffsets[1].y - blit_region.dstOffsets[0].y)),
3630 static_cast<uint32_t>(abs(blit_region.dstOffsets[1].z - blit_region.dstOffsets[0].z))};
3631 context->UpdateAccessState(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, blit_region.dstSubresource, offset, extent, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07003632 }
3633 }
3634}
locke-lunarg36ba2592020-04-03 09:42:04 -06003635
Jeff Leger178b1e52020-10-05 12:22:23 -04003636void SyncValidator::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
3637 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
3638 const VkImageBlit *pRegions, VkFilter filter) {
3639 auto *cb_access_context = GetAccessContext(commandBuffer);
3640 assert(cb_access_context);
3641 const auto tag = cb_access_context->NextCommandTag(CMD_BLITIMAGE);
3642 StateTracker::PreCallRecordCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount,
3643 pRegions, filter);
3644 RecordCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter, tag);
3645}
3646
3647void SyncValidator::PreCallRecordCmdBlitImage2KHR(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR *pBlitImageInfo) {
3648 StateTracker::PreCallRecordCmdBlitImage2KHR(commandBuffer, pBlitImageInfo);
3649 auto *cb_access_context = GetAccessContext(commandBuffer);
3650 assert(cb_access_context);
3651 const auto tag = cb_access_context->NextCommandTag(CMD_BLITIMAGE2KHR);
3652 RecordCmdBlitImage(commandBuffer, pBlitImageInfo->srcImage, pBlitImageInfo->srcImageLayout, pBlitImageInfo->dstImage,
3653 pBlitImageInfo->dstImageLayout, pBlitImageInfo->regionCount, pBlitImageInfo->pRegions,
3654 pBlitImageInfo->filter, tag);
3655}
3656
locke-lunarg61870c22020-06-09 14:51:50 -06003657bool SyncValidator::ValidateIndirectBuffer(const AccessContext &context, VkCommandBuffer commandBuffer,
3658 const VkDeviceSize struct_size, const VkBuffer buffer, const VkDeviceSize offset,
3659 const uint32_t drawCount, const uint32_t stride, const char *function) const {
locke-lunargff255f92020-05-13 18:53:52 -06003660 bool skip = false;
3661 if (drawCount == 0) return skip;
3662
3663 const auto *buf_state = Get<BUFFER_STATE>(buffer);
3664 VkDeviceSize size = struct_size;
3665 if (drawCount == 1 || stride == size) {
3666 if (drawCount > 1) size *= drawCount;
John Zulauf3e86bf02020-09-12 10:47:57 -06003667 const ResourceAccessRange range = MakeRange(offset, size);
locke-lunargff255f92020-05-13 18:53:52 -06003668 auto hazard = context.DetectHazard(*buf_state, SYNC_DRAW_INDIRECT_INDIRECT_COMMAND_READ, range);
3669 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06003670 skip |= LogError(buf_state->buffer, string_SyncHazardVUID(hazard.hazard),
John Zulauf59e25072020-07-17 10:55:21 -06003671 "%s: Hazard %s for indirect %s in %s. Access info %s.", function, string_SyncHazard(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06003672 report_data->FormatHandle(buffer).c_str(), report_data->FormatHandle(commandBuffer).c_str(),
John Zulauf37ceaed2020-07-03 16:18:15 -06003673 string_UsageTag(hazard).c_str());
locke-lunargff255f92020-05-13 18:53:52 -06003674 }
3675 } else {
3676 for (uint32_t i = 0; i < drawCount; ++i) {
John Zulauf3e86bf02020-09-12 10:47:57 -06003677 const ResourceAccessRange range = MakeRange(offset + i * stride, size);
locke-lunargff255f92020-05-13 18:53:52 -06003678 auto hazard = context.DetectHazard(*buf_state, SYNC_DRAW_INDIRECT_INDIRECT_COMMAND_READ, range);
3679 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06003680 skip |= LogError(buf_state->buffer, string_SyncHazardVUID(hazard.hazard),
John Zulauf59e25072020-07-17 10:55:21 -06003681 "%s: Hazard %s for indirect %s in %s. Access info %s.", function, string_SyncHazard(hazard.hazard),
3682 report_data->FormatHandle(buffer).c_str(), report_data->FormatHandle(commandBuffer).c_str(),
3683 string_UsageTag(hazard).c_str());
locke-lunargff255f92020-05-13 18:53:52 -06003684 break;
3685 }
3686 }
3687 }
3688 return skip;
3689}
3690
locke-lunarg61870c22020-06-09 14:51:50 -06003691void SyncValidator::RecordIndirectBuffer(AccessContext &context, const ResourceUsageTag &tag, const VkDeviceSize struct_size,
3692 const VkBuffer buffer, const VkDeviceSize offset, const uint32_t drawCount,
3693 uint32_t stride) {
locke-lunargff255f92020-05-13 18:53:52 -06003694 const auto *buf_state = Get<BUFFER_STATE>(buffer);
3695 VkDeviceSize size = struct_size;
3696 if (drawCount == 1 || stride == size) {
3697 if (drawCount > 1) size *= drawCount;
John Zulauf3e86bf02020-09-12 10:47:57 -06003698 const ResourceAccessRange range = MakeRange(offset, size);
locke-lunargff255f92020-05-13 18:53:52 -06003699 context.UpdateAccessState(*buf_state, SYNC_DRAW_INDIRECT_INDIRECT_COMMAND_READ, range, tag);
3700 } else {
3701 for (uint32_t i = 0; i < drawCount; ++i) {
John Zulauf3e86bf02020-09-12 10:47:57 -06003702 const ResourceAccessRange range = MakeRange(offset + i * stride, size);
locke-lunargff255f92020-05-13 18:53:52 -06003703 context.UpdateAccessState(*buf_state, SYNC_DRAW_INDIRECT_INDIRECT_COMMAND_READ, range, tag);
3704 }
3705 }
3706}
3707
locke-lunarg61870c22020-06-09 14:51:50 -06003708bool SyncValidator::ValidateCountBuffer(const AccessContext &context, VkCommandBuffer commandBuffer, VkBuffer buffer,
3709 VkDeviceSize offset, const char *function) const {
locke-lunargff255f92020-05-13 18:53:52 -06003710 bool skip = false;
3711
3712 const auto *count_buf_state = Get<BUFFER_STATE>(buffer);
John Zulauf3e86bf02020-09-12 10:47:57 -06003713 const ResourceAccessRange range = MakeRange(offset, 4);
locke-lunargff255f92020-05-13 18:53:52 -06003714 auto hazard = context.DetectHazard(*count_buf_state, SYNC_DRAW_INDIRECT_INDIRECT_COMMAND_READ, range);
3715 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06003716 skip |= LogError(count_buf_state->buffer, string_SyncHazardVUID(hazard.hazard),
John Zulauf59e25072020-07-17 10:55:21 -06003717 "%s: Hazard %s for countBuffer %s in %s. Access info %s.", function, string_SyncHazard(hazard.hazard),
John Zulauf1dae9192020-06-16 15:46:44 -06003718 report_data->FormatHandle(buffer).c_str(), report_data->FormatHandle(commandBuffer).c_str(),
John Zulauf37ceaed2020-07-03 16:18:15 -06003719 string_UsageTag(hazard).c_str());
locke-lunargff255f92020-05-13 18:53:52 -06003720 }
3721 return skip;
3722}
3723
locke-lunarg61870c22020-06-09 14:51:50 -06003724void SyncValidator::RecordCountBuffer(AccessContext &context, const ResourceUsageTag &tag, VkBuffer buffer, VkDeviceSize offset) {
locke-lunargff255f92020-05-13 18:53:52 -06003725 const auto *count_buf_state = Get<BUFFER_STATE>(buffer);
John Zulauf3e86bf02020-09-12 10:47:57 -06003726 const ResourceAccessRange range = MakeRange(offset, 4);
locke-lunargff255f92020-05-13 18:53:52 -06003727 context.UpdateAccessState(*count_buf_state, SYNC_DRAW_INDIRECT_INDIRECT_COMMAND_READ, range, tag);
3728}
3729
locke-lunarg36ba2592020-04-03 09:42:04 -06003730bool SyncValidator::PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) const {
locke-lunargff255f92020-05-13 18:53:52 -06003731 bool skip = false;
locke-lunargff255f92020-05-13 18:53:52 -06003732 const auto *cb_access_context = GetAccessContext(commandBuffer);
3733 assert(cb_access_context);
3734 if (!cb_access_context) return skip;
3735
locke-lunarg61870c22020-06-09 14:51:50 -06003736 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatch");
locke-lunargff255f92020-05-13 18:53:52 -06003737 return skip;
locke-lunarg36ba2592020-04-03 09:42:04 -06003738}
3739
3740void SyncValidator::PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003741 StateTracker::PreCallRecordCmdDispatch(commandBuffer, x, y, z);
locke-lunargff255f92020-05-13 18:53:52 -06003742 auto *cb_access_context = GetAccessContext(commandBuffer);
3743 assert(cb_access_context);
3744 const auto tag = cb_access_context->NextCommandTag(CMD_DISPATCH);
locke-lunargff255f92020-05-13 18:53:52 -06003745
locke-lunarg61870c22020-06-09 14:51:50 -06003746 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_COMPUTE, tag);
locke-lunarg36ba2592020-04-03 09:42:04 -06003747}
locke-lunarge1a67022020-04-29 00:15:36 -06003748
3749bool SyncValidator::PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const {
locke-lunargff255f92020-05-13 18:53:52 -06003750 bool skip = false;
locke-lunargff255f92020-05-13 18:53:52 -06003751 const auto *cb_access_context = GetAccessContext(commandBuffer);
3752 assert(cb_access_context);
3753 if (!cb_access_context) return skip;
3754
3755 const auto *context = cb_access_context->GetCurrentAccessContext();
3756 assert(context);
3757 if (!context) return skip;
3758
locke-lunarg61870c22020-06-09 14:51:50 -06003759 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatchIndirect");
3760 skip |= ValidateIndirectBuffer(*context, commandBuffer, sizeof(VkDispatchIndirectCommand), buffer, offset, 1,
3761 sizeof(VkDispatchIndirectCommand), "vkCmdDispatchIndirect");
locke-lunargff255f92020-05-13 18:53:52 -06003762 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003763}
3764
3765void SyncValidator::PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003766 StateTracker::PreCallRecordCmdDispatchIndirect(commandBuffer, buffer, offset);
locke-lunargff255f92020-05-13 18:53:52 -06003767 auto *cb_access_context = GetAccessContext(commandBuffer);
3768 assert(cb_access_context);
3769 const auto tag = cb_access_context->NextCommandTag(CMD_DISPATCHINDIRECT);
3770 auto *context = cb_access_context->GetCurrentAccessContext();
3771 assert(context);
3772
locke-lunarg61870c22020-06-09 14:51:50 -06003773 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_COMPUTE, tag);
3774 RecordIndirectBuffer(*context, tag, sizeof(VkDispatchIndirectCommand), buffer, offset, 1, sizeof(VkDispatchIndirectCommand));
locke-lunarge1a67022020-04-29 00:15:36 -06003775}
3776
3777bool SyncValidator::PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
3778 uint32_t firstVertex, uint32_t firstInstance) const {
locke-lunarga4d39ea2020-05-22 14:17:29 -06003779 bool skip = false;
locke-lunargff255f92020-05-13 18:53:52 -06003780 const auto *cb_access_context = GetAccessContext(commandBuffer);
3781 assert(cb_access_context);
3782 if (!cb_access_context) return skip;
3783
locke-lunarg61870c22020-06-09 14:51:50 -06003784 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDraw");
3785 skip |= cb_access_context->ValidateDrawVertex(vertexCount, firstVertex, "vkCmdDraw");
3786 skip |= cb_access_context->ValidateDrawSubpassAttachment("vkCmdDraw");
locke-lunarga4d39ea2020-05-22 14:17:29 -06003787 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003788}
3789
3790void SyncValidator::PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
3791 uint32_t firstVertex, uint32_t firstInstance) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003792 StateTracker::PreCallRecordCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
locke-lunargff255f92020-05-13 18:53:52 -06003793 auto *cb_access_context = GetAccessContext(commandBuffer);
3794 assert(cb_access_context);
3795 const auto tag = cb_access_context->NextCommandTag(CMD_DRAW);
locke-lunargff255f92020-05-13 18:53:52 -06003796
locke-lunarg61870c22020-06-09 14:51:50 -06003797 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, tag);
3798 cb_access_context->RecordDrawVertex(vertexCount, firstVertex, tag);
3799 cb_access_context->RecordDrawSubpassAttachment(tag);
locke-lunarge1a67022020-04-29 00:15:36 -06003800}
3801
3802bool SyncValidator::PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
3803 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) const {
locke-lunarga4d39ea2020-05-22 14:17:29 -06003804 bool skip = false;
locke-lunargff255f92020-05-13 18:53:52 -06003805 const auto *cb_access_context = GetAccessContext(commandBuffer);
3806 assert(cb_access_context);
3807 if (!cb_access_context) return skip;
3808
locke-lunarg61870c22020-06-09 14:51:50 -06003809 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexed");
3810 skip |= cb_access_context->ValidateDrawVertexIndex(indexCount, firstIndex, "vkCmdDrawIndexed");
3811 skip |= cb_access_context->ValidateDrawSubpassAttachment("vkCmdDrawIndexed");
locke-lunarga4d39ea2020-05-22 14:17:29 -06003812 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003813}
3814
3815void SyncValidator::PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
3816 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003817 StateTracker::PreCallRecordCmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
locke-lunargff255f92020-05-13 18:53:52 -06003818 auto *cb_access_context = GetAccessContext(commandBuffer);
3819 assert(cb_access_context);
3820 const auto tag = cb_access_context->NextCommandTag(CMD_DRAWINDEXED);
locke-lunargff255f92020-05-13 18:53:52 -06003821
locke-lunarg61870c22020-06-09 14:51:50 -06003822 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, tag);
3823 cb_access_context->RecordDrawVertexIndex(indexCount, firstIndex, tag);
3824 cb_access_context->RecordDrawSubpassAttachment(tag);
locke-lunarge1a67022020-04-29 00:15:36 -06003825}
3826
3827bool SyncValidator::PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3828 uint32_t drawCount, uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003829 bool skip = false;
3830 if (drawCount == 0) return skip;
3831
locke-lunargff255f92020-05-13 18:53:52 -06003832 const auto *cb_access_context = GetAccessContext(commandBuffer);
3833 assert(cb_access_context);
3834 if (!cb_access_context) return skip;
3835
3836 const auto *context = cb_access_context->GetCurrentAccessContext();
3837 assert(context);
3838 if (!context) return skip;
3839
locke-lunarg61870c22020-06-09 14:51:50 -06003840 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndirect");
3841 skip |= cb_access_context->ValidateDrawSubpassAttachment("vkCmdDrawIndirect");
3842 skip |= ValidateIndirectBuffer(*context, commandBuffer, sizeof(VkDrawIndirectCommand), buffer, offset, drawCount, stride,
3843 "vkCmdDrawIndirect");
locke-lunargff255f92020-05-13 18:53:52 -06003844
3845 // TODO: For now, we validate the whole vertex buffer. It might cause some false positive.
3846 // VkDrawIndirectCommand buffer could be changed until SubmitQueue.
3847 // We will validate the vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003848 skip |= cb_access_context->ValidateDrawVertex(UINT32_MAX, 0, "vkCmdDrawIndirect");
locke-lunargff255f92020-05-13 18:53:52 -06003849 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003850}
3851
3852void SyncValidator::PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3853 uint32_t drawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003854 StateTracker::PreCallRecordCmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
locke-lunargff255f92020-05-13 18:53:52 -06003855 if (drawCount == 0) return;
locke-lunargff255f92020-05-13 18:53:52 -06003856 auto *cb_access_context = GetAccessContext(commandBuffer);
3857 assert(cb_access_context);
3858 const auto tag = cb_access_context->NextCommandTag(CMD_DRAWINDIRECT);
3859 auto *context = cb_access_context->GetCurrentAccessContext();
3860 assert(context);
3861
locke-lunarg61870c22020-06-09 14:51:50 -06003862 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, tag);
3863 cb_access_context->RecordDrawSubpassAttachment(tag);
3864 RecordIndirectBuffer(*context, tag, sizeof(VkDrawIndirectCommand), buffer, offset, drawCount, stride);
locke-lunargff255f92020-05-13 18:53:52 -06003865
3866 // TODO: For now, we record the whole vertex buffer. It might cause some false positive.
3867 // VkDrawIndirectCommand buffer could be changed until SubmitQueue.
3868 // We will record the vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003869 cb_access_context->RecordDrawVertex(UINT32_MAX, 0, tag);
locke-lunarge1a67022020-04-29 00:15:36 -06003870}
3871
3872bool SyncValidator::PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3873 uint32_t drawCount, uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003874 bool skip = false;
3875 if (drawCount == 0) return skip;
locke-lunargff255f92020-05-13 18:53:52 -06003876 const auto *cb_access_context = GetAccessContext(commandBuffer);
3877 assert(cb_access_context);
3878 if (!cb_access_context) return skip;
3879
3880 const auto *context = cb_access_context->GetCurrentAccessContext();
3881 assert(context);
3882 if (!context) return skip;
3883
locke-lunarg61870c22020-06-09 14:51:50 -06003884 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexedIndirect");
3885 skip |= cb_access_context->ValidateDrawSubpassAttachment("vkCmdDrawIndexedIndirect");
3886 skip |= ValidateIndirectBuffer(*context, commandBuffer, sizeof(VkDrawIndexedIndirectCommand), buffer, offset, drawCount, stride,
3887 "vkCmdDrawIndexedIndirect");
locke-lunargff255f92020-05-13 18:53:52 -06003888
3889 // TODO: For now, we validate the whole index and vertex buffer. It might cause some false positive.
3890 // VkDrawIndexedIndirectCommand buffer could be changed until SubmitQueue.
3891 // We will validate the index and vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003892 skip |= cb_access_context->ValidateDrawVertexIndex(UINT32_MAX, 0, "vkCmdDrawIndexedIndirect");
locke-lunargff255f92020-05-13 18:53:52 -06003893 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003894}
3895
3896void SyncValidator::PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3897 uint32_t drawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003898 StateTracker::PreCallRecordCmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
locke-lunargff255f92020-05-13 18:53:52 -06003899 auto *cb_access_context = GetAccessContext(commandBuffer);
3900 assert(cb_access_context);
3901 const auto tag = cb_access_context->NextCommandTag(CMD_DRAWINDEXEDINDIRECT);
3902 auto *context = cb_access_context->GetCurrentAccessContext();
3903 assert(context);
3904
locke-lunarg61870c22020-06-09 14:51:50 -06003905 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, tag);
3906 cb_access_context->RecordDrawSubpassAttachment(tag);
3907 RecordIndirectBuffer(*context, tag, sizeof(VkDrawIndexedIndirectCommand), buffer, offset, drawCount, stride);
locke-lunargff255f92020-05-13 18:53:52 -06003908
3909 // TODO: For now, we record the whole index and vertex buffer. It might cause some false positive.
3910 // VkDrawIndexedIndirectCommand buffer could be changed until SubmitQueue.
3911 // We will record the index and vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003912 cb_access_context->RecordDrawVertexIndex(UINT32_MAX, 0, tag);
locke-lunargff255f92020-05-13 18:53:52 -06003913}
3914
3915bool SyncValidator::ValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3916 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
3917 uint32_t stride, const char *function) const {
3918 bool skip = false;
locke-lunargff255f92020-05-13 18:53:52 -06003919 const auto *cb_access_context = GetAccessContext(commandBuffer);
3920 assert(cb_access_context);
3921 if (!cb_access_context) return skip;
3922
3923 const auto *context = cb_access_context->GetCurrentAccessContext();
3924 assert(context);
3925 if (!context) return skip;
3926
locke-lunarg61870c22020-06-09 14:51:50 -06003927 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, function);
3928 skip |= cb_access_context->ValidateDrawSubpassAttachment(function);
3929 skip |= ValidateIndirectBuffer(*context, commandBuffer, sizeof(VkDrawIndirectCommand), buffer, offset, maxDrawCount, stride,
3930 function);
3931 skip |= ValidateCountBuffer(*context, commandBuffer, countBuffer, countBufferOffset, function);
locke-lunargff255f92020-05-13 18:53:52 -06003932
3933 // TODO: For now, we validate the whole vertex buffer. It might cause some false positive.
3934 // VkDrawIndirectCommand buffer could be changed until SubmitQueue.
3935 // We will validate the vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003936 skip |= cb_access_context->ValidateDrawVertex(UINT32_MAX, 0, function);
locke-lunargff255f92020-05-13 18:53:52 -06003937 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06003938}
3939
3940bool SyncValidator::PreCallValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3941 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
3942 uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003943 return ValidateCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
3944 "vkCmdDrawIndirectCount");
locke-lunarge1a67022020-04-29 00:15:36 -06003945}
3946
3947void SyncValidator::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3948 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
3949 uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003950 StateTracker::PreCallRecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount,
3951 stride);
locke-lunargff255f92020-05-13 18:53:52 -06003952 auto *cb_access_context = GetAccessContext(commandBuffer);
3953 assert(cb_access_context);
3954 const auto tag = cb_access_context->NextCommandTag(CMD_DRAWINDIRECTCOUNT);
3955 auto *context = cb_access_context->GetCurrentAccessContext();
3956 assert(context);
3957
locke-lunarg61870c22020-06-09 14:51:50 -06003958 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, tag);
3959 cb_access_context->RecordDrawSubpassAttachment(tag);
3960 RecordIndirectBuffer(*context, tag, sizeof(VkDrawIndirectCommand), buffer, offset, 1, stride);
3961 RecordCountBuffer(*context, tag, countBuffer, countBufferOffset);
locke-lunargff255f92020-05-13 18:53:52 -06003962
3963 // TODO: For now, we record the whole vertex buffer. It might cause some false positive.
3964 // VkDrawIndirectCommand buffer could be changed until SubmitQueue.
3965 // We will record the vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06003966 cb_access_context->RecordDrawVertex(UINT32_MAX, 0, tag);
locke-lunarge1a67022020-04-29 00:15:36 -06003967}
3968
3969bool SyncValidator::PreCallValidateCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3970 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3971 uint32_t maxDrawCount, uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003972 return ValidateCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
3973 "vkCmdDrawIndirectCountKHR");
locke-lunarge1a67022020-04-29 00:15:36 -06003974}
3975
3976void SyncValidator::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3977 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3978 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003979 StateTracker::PreCallRecordCmdDrawIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount,
3980 stride);
locke-lunargff255f92020-05-13 18:53:52 -06003981 PreCallRecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
locke-lunarge1a67022020-04-29 00:15:36 -06003982}
3983
3984bool SyncValidator::PreCallValidateCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3985 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3986 uint32_t maxDrawCount, uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06003987 return ValidateCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
3988 "vkCmdDrawIndirectCountAMD");
locke-lunarge1a67022020-04-29 00:15:36 -06003989}
3990
3991void SyncValidator::PreCallRecordCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3992 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3993 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06003994 StateTracker::PreCallRecordCmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount,
3995 stride);
locke-lunargff255f92020-05-13 18:53:52 -06003996 PreCallRecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
3997}
3998
3999bool SyncValidator::ValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4000 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4001 uint32_t stride, const char *function) const {
4002 bool skip = false;
locke-lunargff255f92020-05-13 18:53:52 -06004003 const auto *cb_access_context = GetAccessContext(commandBuffer);
4004 assert(cb_access_context);
4005 if (!cb_access_context) return skip;
4006
4007 const auto *context = cb_access_context->GetCurrentAccessContext();
4008 assert(context);
4009 if (!context) return skip;
4010
locke-lunarg61870c22020-06-09 14:51:50 -06004011 skip |= cb_access_context->ValidateDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, function);
4012 skip |= cb_access_context->ValidateDrawSubpassAttachment(function);
4013 skip |= ValidateIndirectBuffer(*context, commandBuffer, sizeof(VkDrawIndexedIndirectCommand), buffer, offset, maxDrawCount,
4014 stride, function);
4015 skip |= ValidateCountBuffer(*context, commandBuffer, countBuffer, countBufferOffset, function);
locke-lunargff255f92020-05-13 18:53:52 -06004016
4017 // TODO: For now, we validate the whole index and vertex buffer. It might cause some false positive.
4018 // VkDrawIndexedIndirectCommand buffer could be changed until SubmitQueue.
4019 // We will validate the index and vertex buffer in SubmitQueue in the future.
locke-lunarg61870c22020-06-09 14:51:50 -06004020 skip |= cb_access_context->ValidateDrawVertexIndex(UINT32_MAX, 0, function);
locke-lunargff255f92020-05-13 18:53:52 -06004021 return skip;
locke-lunarge1a67022020-04-29 00:15:36 -06004022}
4023
4024bool SyncValidator::PreCallValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4025 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
4026 uint32_t maxDrawCount, uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06004027 return ValidateCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
4028 "vkCmdDrawIndexedIndirectCount");
locke-lunarge1a67022020-04-29 00:15:36 -06004029}
4030
4031void SyncValidator::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4032 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
4033 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06004034 StateTracker::PreCallRecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset,
4035 maxDrawCount, stride);
locke-lunargff255f92020-05-13 18:53:52 -06004036 auto *cb_access_context = GetAccessContext(commandBuffer);
4037 assert(cb_access_context);
4038 const auto tag = cb_access_context->NextCommandTag(CMD_DRAWINDEXEDINDIRECTCOUNT);
4039 auto *context = cb_access_context->GetCurrentAccessContext();
4040 assert(context);
4041
locke-lunarg61870c22020-06-09 14:51:50 -06004042 cb_access_context->RecordDispatchDrawDescriptorSet(VK_PIPELINE_BIND_POINT_GRAPHICS, tag);
4043 cb_access_context->RecordDrawSubpassAttachment(tag);
4044 RecordIndirectBuffer(*context, tag, sizeof(VkDrawIndexedIndirectCommand), buffer, offset, 1, stride);
4045 RecordCountBuffer(*context, tag, countBuffer, countBufferOffset);
locke-lunargff255f92020-05-13 18:53:52 -06004046
4047 // TODO: For now, we record the whole index and vertex buffer. It might cause some false positive.
4048 // VkDrawIndexedIndirectCommand buffer could be changed until SubmitQueue.
locke-lunarg61870c22020-06-09 14:51:50 -06004049 // We will update the index and vertex buffer in SubmitQueue in the future.
4050 cb_access_context->RecordDrawVertexIndex(UINT32_MAX, 0, tag);
locke-lunarge1a67022020-04-29 00:15:36 -06004051}
4052
4053bool SyncValidator::PreCallValidateCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4054 VkDeviceSize offset, VkBuffer countBuffer,
4055 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4056 uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06004057 return ValidateCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
4058 "vkCmdDrawIndexedIndirectCountKHR");
locke-lunarge1a67022020-04-29 00:15:36 -06004059}
4060
4061void SyncValidator::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4062 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
4063 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06004064 StateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset,
4065 maxDrawCount, stride);
locke-lunarge1a67022020-04-29 00:15:36 -06004066 PreCallRecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
4067}
4068
4069bool SyncValidator::PreCallValidateCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer,
4070 VkDeviceSize offset, VkBuffer countBuffer,
4071 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4072 uint32_t stride) const {
locke-lunargff255f92020-05-13 18:53:52 -06004073 return ValidateCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
4074 "vkCmdDrawIndexedIndirectCountAMD");
locke-lunarge1a67022020-04-29 00:15:36 -06004075}
4076
4077void SyncValidator::PreCallRecordCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4078 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
4079 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg8ec19162020-06-16 18:48:34 -06004080 StateTracker::PreCallRecordCmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset,
4081 maxDrawCount, stride);
locke-lunarge1a67022020-04-29 00:15:36 -06004082 PreCallRecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
4083}
4084
4085bool SyncValidator::PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
4086 const VkClearColorValue *pColor, uint32_t rangeCount,
4087 const VkImageSubresourceRange *pRanges) const {
4088 bool skip = false;
4089 const auto *cb_access_context = GetAccessContext(commandBuffer);
4090 assert(cb_access_context);
4091 if (!cb_access_context) return skip;
4092
4093 const auto *context = cb_access_context->GetCurrentAccessContext();
4094 assert(context);
4095 if (!context) return skip;
4096
4097 const auto *image_state = Get<IMAGE_STATE>(image);
4098
4099 for (uint32_t index = 0; index < rangeCount; index++) {
4100 const auto &range = pRanges[index];
4101 if (image_state) {
4102 auto hazard =
4103 context->DetectHazard(*image_state, SYNC_TRANSFER_TRANSFER_WRITE, range, {0, 0, 0}, image_state->createInfo.extent);
4104 if (hazard.hazard) {
4105 skip |= LogError(image, string_SyncHazardVUID(hazard.hazard),
John Zulauf59e25072020-07-17 10:55:21 -06004106 "vkCmdClearColorImage: Hazard %s for %s, range index %" PRIu32 ". Access info %s.",
John Zulauf1dae9192020-06-16 15:46:44 -06004107 string_SyncHazard(hazard.hazard), report_data->FormatHandle(image).c_str(), index,
John Zulauf37ceaed2020-07-03 16:18:15 -06004108 string_UsageTag(hazard).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06004109 }
4110 }
4111 }
4112 return skip;
4113}
4114
4115void SyncValidator::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
4116 const VkClearColorValue *pColor, uint32_t rangeCount,
4117 const VkImageSubresourceRange *pRanges) {
locke-lunarg8ec19162020-06-16 18:48:34 -06004118 StateTracker::PreCallRecordCmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
locke-lunarge1a67022020-04-29 00:15:36 -06004119 auto *cb_access_context = GetAccessContext(commandBuffer);
4120 assert(cb_access_context);
4121 const auto tag = cb_access_context->NextCommandTag(CMD_CLEARCOLORIMAGE);
4122 auto *context = cb_access_context->GetCurrentAccessContext();
4123 assert(context);
4124
4125 const auto *image_state = Get<IMAGE_STATE>(image);
4126
4127 for (uint32_t index = 0; index < rangeCount; index++) {
4128 const auto &range = pRanges[index];
4129 if (image_state) {
4130 context->UpdateAccessState(*image_state, SYNC_TRANSFER_TRANSFER_WRITE, range, {0, 0, 0}, image_state->createInfo.extent,
4131 tag);
4132 }
4133 }
4134}
4135
4136bool SyncValidator::PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
4137 VkImageLayout imageLayout,
4138 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
4139 const VkImageSubresourceRange *pRanges) const {
4140 bool skip = false;
4141 const auto *cb_access_context = GetAccessContext(commandBuffer);
4142 assert(cb_access_context);
4143 if (!cb_access_context) return skip;
4144
4145 const auto *context = cb_access_context->GetCurrentAccessContext();
4146 assert(context);
4147 if (!context) return skip;
4148
4149 const auto *image_state = Get<IMAGE_STATE>(image);
4150
4151 for (uint32_t index = 0; index < rangeCount; index++) {
4152 const auto &range = pRanges[index];
4153 if (image_state) {
4154 auto hazard =
4155 context->DetectHazard(*image_state, SYNC_TRANSFER_TRANSFER_WRITE, range, {0, 0, 0}, image_state->createInfo.extent);
4156 if (hazard.hazard) {
4157 skip |= LogError(image, string_SyncHazardVUID(hazard.hazard),
John Zulauf59e25072020-07-17 10:55:21 -06004158 "vkCmdClearDepthStencilImage: Hazard %s for %s, range index %" PRIu32 ". Access info %s.",
John Zulauf1dae9192020-06-16 15:46:44 -06004159 string_SyncHazard(hazard.hazard), report_data->FormatHandle(image).c_str(), index,
John Zulauf37ceaed2020-07-03 16:18:15 -06004160 string_UsageTag(hazard).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06004161 }
4162 }
4163 }
4164 return skip;
4165}
4166
4167void SyncValidator::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
4168 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
4169 const VkImageSubresourceRange *pRanges) {
locke-lunarg8ec19162020-06-16 18:48:34 -06004170 StateTracker::PreCallRecordCmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
locke-lunarge1a67022020-04-29 00:15:36 -06004171 auto *cb_access_context = GetAccessContext(commandBuffer);
4172 assert(cb_access_context);
4173 const auto tag = cb_access_context->NextCommandTag(CMD_CLEARDEPTHSTENCILIMAGE);
4174 auto *context = cb_access_context->GetCurrentAccessContext();
4175 assert(context);
4176
4177 const auto *image_state = Get<IMAGE_STATE>(image);
4178
4179 for (uint32_t index = 0; index < rangeCount; index++) {
4180 const auto &range = pRanges[index];
4181 if (image_state) {
4182 context->UpdateAccessState(*image_state, SYNC_TRANSFER_TRANSFER_WRITE, range, {0, 0, 0}, image_state->createInfo.extent,
4183 tag);
4184 }
4185 }
4186}
4187
4188bool SyncValidator::PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4189 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
4190 VkDeviceSize dstOffset, VkDeviceSize stride,
4191 VkQueryResultFlags flags) const {
4192 bool skip = false;
4193 const auto *cb_access_context = GetAccessContext(commandBuffer);
4194 assert(cb_access_context);
4195 if (!cb_access_context) return skip;
4196
4197 const auto *context = cb_access_context->GetCurrentAccessContext();
4198 assert(context);
4199 if (!context) return skip;
4200
4201 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
4202
4203 if (dst_buffer) {
John Zulauf3e86bf02020-09-12 10:47:57 -06004204 const ResourceAccessRange range = MakeRange(dstOffset, stride * queryCount);
locke-lunarge1a67022020-04-29 00:15:36 -06004205 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range);
4206 if (hazard.hazard) {
John Zulauf59e25072020-07-17 10:55:21 -06004207 skip |=
4208 LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
4209 "vkCmdCopyQueryPoolResults: Hazard %s for dstBuffer %s. Access info %s.", string_SyncHazard(hazard.hazard),
4210 report_data->FormatHandle(dstBuffer).c_str(), string_UsageTag(hazard).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06004211 }
4212 }
locke-lunargff255f92020-05-13 18:53:52 -06004213
4214 // TODO:Track VkQueryPool
locke-lunarge1a67022020-04-29 00:15:36 -06004215 return skip;
4216}
4217
4218void SyncValidator::PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
4219 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
4220 VkDeviceSize stride, VkQueryResultFlags flags) {
locke-lunarg8ec19162020-06-16 18:48:34 -06004221 StateTracker::PreCallRecordCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset,
4222 stride, flags);
locke-lunarge1a67022020-04-29 00:15:36 -06004223 auto *cb_access_context = GetAccessContext(commandBuffer);
4224 assert(cb_access_context);
locke-lunargff255f92020-05-13 18:53:52 -06004225 const auto tag = cb_access_context->NextCommandTag(CMD_COPYQUERYPOOLRESULTS);
locke-lunarge1a67022020-04-29 00:15:36 -06004226 auto *context = cb_access_context->GetCurrentAccessContext();
4227 assert(context);
4228
4229 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
4230
4231 if (dst_buffer) {
John Zulauf3e86bf02020-09-12 10:47:57 -06004232 const ResourceAccessRange range = MakeRange(dstOffset, stride * queryCount);
locke-lunarge1a67022020-04-29 00:15:36 -06004233 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range, tag);
4234 }
locke-lunargff255f92020-05-13 18:53:52 -06004235
4236 // TODO:Track VkQueryPool
locke-lunarge1a67022020-04-29 00:15:36 -06004237}
4238
4239bool SyncValidator::PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
4240 VkDeviceSize size, uint32_t data) const {
4241 bool skip = false;
4242 const auto *cb_access_context = GetAccessContext(commandBuffer);
4243 assert(cb_access_context);
4244 if (!cb_access_context) return skip;
4245
4246 const auto *context = cb_access_context->GetCurrentAccessContext();
4247 assert(context);
4248 if (!context) return skip;
4249
4250 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
4251
4252 if (dst_buffer) {
John Zulauf3e86bf02020-09-12 10:47:57 -06004253 const ResourceAccessRange range = MakeRange(*dst_buffer, dstOffset, size);
locke-lunarge1a67022020-04-29 00:15:36 -06004254 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range);
4255 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06004256 skip |= LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
John Zulauf59e25072020-07-17 10:55:21 -06004257 "vkCmdFillBuffer: Hazard %s for dstBuffer %s. Access info %s.", string_SyncHazard(hazard.hazard),
John Zulauf37ceaed2020-07-03 16:18:15 -06004258 report_data->FormatHandle(dstBuffer).c_str(), string_UsageTag(hazard).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06004259 }
4260 }
4261 return skip;
4262}
4263
4264void SyncValidator::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
4265 VkDeviceSize size, uint32_t data) {
locke-lunarg8ec19162020-06-16 18:48:34 -06004266 StateTracker::PreCallRecordCmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
locke-lunarge1a67022020-04-29 00:15:36 -06004267 auto *cb_access_context = GetAccessContext(commandBuffer);
4268 assert(cb_access_context);
4269 const auto tag = cb_access_context->NextCommandTag(CMD_FILLBUFFER);
4270 auto *context = cb_access_context->GetCurrentAccessContext();
4271 assert(context);
4272
4273 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
4274
4275 if (dst_buffer) {
John Zulauf3e86bf02020-09-12 10:47:57 -06004276 const ResourceAccessRange range = MakeRange(*dst_buffer, dstOffset, size);
locke-lunarge1a67022020-04-29 00:15:36 -06004277 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range, tag);
4278 }
4279}
4280
4281bool SyncValidator::PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
4282 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
4283 const VkImageResolve *pRegions) const {
4284 bool skip = false;
4285 const auto *cb_access_context = GetAccessContext(commandBuffer);
4286 assert(cb_access_context);
4287 if (!cb_access_context) return skip;
4288
4289 const auto *context = cb_access_context->GetCurrentAccessContext();
4290 assert(context);
4291 if (!context) return skip;
4292
4293 const auto *src_image = Get<IMAGE_STATE>(srcImage);
4294 const auto *dst_image = Get<IMAGE_STATE>(dstImage);
4295
4296 for (uint32_t region = 0; region < regionCount; region++) {
4297 const auto &resolve_region = pRegions[region];
4298 if (src_image) {
4299 auto hazard = context->DetectHazard(*src_image, SYNC_TRANSFER_TRANSFER_READ, resolve_region.srcSubresource,
4300 resolve_region.srcOffset, resolve_region.extent);
4301 if (hazard.hazard) {
4302 skip |= LogError(srcImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf59e25072020-07-17 10:55:21 -06004303 "vkCmdResolveImage: Hazard %s for srcImage %s, region %" PRIu32 ". Access info %s.",
John Zulauf1dae9192020-06-16 15:46:44 -06004304 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcImage).c_str(), region,
John Zulauf37ceaed2020-07-03 16:18:15 -06004305 string_UsageTag(hazard).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06004306 }
4307 }
4308
4309 if (dst_image) {
4310 auto hazard = context->DetectHazard(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, resolve_region.dstSubresource,
4311 resolve_region.dstOffset, resolve_region.extent);
4312 if (hazard.hazard) {
4313 skip |= LogError(dstImage, string_SyncHazardVUID(hazard.hazard),
John Zulauf59e25072020-07-17 10:55:21 -06004314 "vkCmdResolveImage: Hazard %s for dstImage %s, region %" PRIu32 ". Access info %s.",
John Zulauf1dae9192020-06-16 15:46:44 -06004315 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstImage).c_str(), region,
John Zulauf37ceaed2020-07-03 16:18:15 -06004316 string_UsageTag(hazard).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06004317 }
4318 if (skip) break;
4319 }
4320 }
4321
4322 return skip;
4323}
4324
4325void SyncValidator::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
4326 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
4327 const VkImageResolve *pRegions) {
locke-lunarg8ec19162020-06-16 18:48:34 -06004328 StateTracker::PreCallRecordCmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount,
4329 pRegions);
locke-lunarge1a67022020-04-29 00:15:36 -06004330 auto *cb_access_context = GetAccessContext(commandBuffer);
4331 assert(cb_access_context);
4332 const auto tag = cb_access_context->NextCommandTag(CMD_RESOLVEIMAGE);
4333 auto *context = cb_access_context->GetCurrentAccessContext();
4334 assert(context);
4335
4336 auto *src_image = Get<IMAGE_STATE>(srcImage);
4337 auto *dst_image = Get<IMAGE_STATE>(dstImage);
4338
4339 for (uint32_t region = 0; region < regionCount; region++) {
4340 const auto &resolve_region = pRegions[region];
4341 if (src_image) {
4342 context->UpdateAccessState(*src_image, SYNC_TRANSFER_TRANSFER_READ, resolve_region.srcSubresource,
4343 resolve_region.srcOffset, resolve_region.extent, tag);
4344 }
4345 if (dst_image) {
4346 context->UpdateAccessState(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, resolve_region.dstSubresource,
4347 resolve_region.dstOffset, resolve_region.extent, tag);
4348 }
4349 }
4350}
4351
Jeff Leger178b1e52020-10-05 12:22:23 -04004352bool SyncValidator::PreCallValidateCmdResolveImage2KHR(VkCommandBuffer commandBuffer,
4353 const VkResolveImageInfo2KHR *pResolveImageInfo) const {
4354 bool skip = false;
4355 const auto *cb_access_context = GetAccessContext(commandBuffer);
4356 assert(cb_access_context);
4357 if (!cb_access_context) return skip;
4358
4359 const auto *context = cb_access_context->GetCurrentAccessContext();
4360 assert(context);
4361 if (!context) return skip;
4362
4363 const auto *src_image = Get<IMAGE_STATE>(pResolveImageInfo->srcImage);
4364 const auto *dst_image = Get<IMAGE_STATE>(pResolveImageInfo->dstImage);
4365
4366 for (uint32_t region = 0; region < pResolveImageInfo->regionCount; region++) {
4367 const auto &resolve_region = pResolveImageInfo->pRegions[region];
4368 if (src_image) {
4369 auto hazard = context->DetectHazard(*src_image, SYNC_TRANSFER_TRANSFER_READ, resolve_region.srcSubresource,
4370 resolve_region.srcOffset, resolve_region.extent);
4371 if (hazard.hazard) {
4372 skip |= LogError(pResolveImageInfo->srcImage, string_SyncHazardVUID(hazard.hazard),
4373 "vkCmdResolveImage2KHR: Hazard %s for srcImage %s, region %" PRIu32 ". Access info %s.",
4374 string_SyncHazard(hazard.hazard), report_data->FormatHandle(pResolveImageInfo->srcImage).c_str(),
4375 region, string_UsageTag(hazard).c_str());
4376 }
4377 }
4378
4379 if (dst_image) {
4380 auto hazard = context->DetectHazard(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, resolve_region.dstSubresource,
4381 resolve_region.dstOffset, resolve_region.extent);
4382 if (hazard.hazard) {
4383 skip |= LogError(pResolveImageInfo->dstImage, string_SyncHazardVUID(hazard.hazard),
4384 "vkCmdResolveImage2KHR: Hazard %s for dstImage %s, region %" PRIu32 ". Access info %s.",
4385 string_SyncHazard(hazard.hazard), report_data->FormatHandle(pResolveImageInfo->dstImage).c_str(),
4386 region, string_UsageTag(hazard).c_str());
4387 }
4388 if (skip) break;
4389 }
4390 }
4391
4392 return skip;
4393}
4394
4395void SyncValidator::PreCallRecordCmdResolveImage2KHR(VkCommandBuffer commandBuffer,
4396 const VkResolveImageInfo2KHR *pResolveImageInfo) {
4397 StateTracker::PreCallRecordCmdResolveImage2KHR(commandBuffer, pResolveImageInfo);
4398 auto *cb_access_context = GetAccessContext(commandBuffer);
4399 assert(cb_access_context);
4400 const auto tag = cb_access_context->NextCommandTag(CMD_RESOLVEIMAGE2KHR);
4401 auto *context = cb_access_context->GetCurrentAccessContext();
4402 assert(context);
4403
4404 auto *src_image = Get<IMAGE_STATE>(pResolveImageInfo->srcImage);
4405 auto *dst_image = Get<IMAGE_STATE>(pResolveImageInfo->dstImage);
4406
4407 for (uint32_t region = 0; region < pResolveImageInfo->regionCount; region++) {
4408 const auto &resolve_region = pResolveImageInfo->pRegions[region];
4409 if (src_image) {
4410 context->UpdateAccessState(*src_image, SYNC_TRANSFER_TRANSFER_READ, resolve_region.srcSubresource,
4411 resolve_region.srcOffset, resolve_region.extent, tag);
4412 }
4413 if (dst_image) {
4414 context->UpdateAccessState(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, resolve_region.dstSubresource,
4415 resolve_region.dstOffset, resolve_region.extent, tag);
4416 }
4417 }
4418}
4419
locke-lunarge1a67022020-04-29 00:15:36 -06004420bool SyncValidator::PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
4421 VkDeviceSize dataSize, const void *pData) const {
4422 bool skip = false;
4423 const auto *cb_access_context = GetAccessContext(commandBuffer);
4424 assert(cb_access_context);
4425 if (!cb_access_context) return skip;
4426
4427 const auto *context = cb_access_context->GetCurrentAccessContext();
4428 assert(context);
4429 if (!context) return skip;
4430
4431 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
4432
4433 if (dst_buffer) {
John Zulauf3e86bf02020-09-12 10:47:57 -06004434 // VK_WHOLE_SIZE not allowed
4435 const ResourceAccessRange range = MakeRange(dstOffset, dataSize);
locke-lunarge1a67022020-04-29 00:15:36 -06004436 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range);
4437 if (hazard.hazard) {
John Zulauf1dae9192020-06-16 15:46:44 -06004438 skip |= LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
John Zulauf59e25072020-07-17 10:55:21 -06004439 "vkCmdUpdateBuffer: Hazard %s for dstBuffer %s. Access info %s.", string_SyncHazard(hazard.hazard),
John Zulauf37ceaed2020-07-03 16:18:15 -06004440 report_data->FormatHandle(dstBuffer).c_str(), string_UsageTag(hazard).c_str());
locke-lunarge1a67022020-04-29 00:15:36 -06004441 }
4442 }
4443 return skip;
4444}
4445
4446void SyncValidator::PreCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
4447 VkDeviceSize dataSize, const void *pData) {
locke-lunarg8ec19162020-06-16 18:48:34 -06004448 StateTracker::PreCallRecordCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
locke-lunarge1a67022020-04-29 00:15:36 -06004449 auto *cb_access_context = GetAccessContext(commandBuffer);
4450 assert(cb_access_context);
4451 const auto tag = cb_access_context->NextCommandTag(CMD_UPDATEBUFFER);
4452 auto *context = cb_access_context->GetCurrentAccessContext();
4453 assert(context);
4454
4455 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
4456
4457 if (dst_buffer) {
John Zulauf3e86bf02020-09-12 10:47:57 -06004458 // VK_WHOLE_SIZE not allowed
4459 const ResourceAccessRange range = MakeRange(dstOffset, dataSize);
locke-lunarge1a67022020-04-29 00:15:36 -06004460 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range, tag);
4461 }
4462}
locke-lunargff255f92020-05-13 18:53:52 -06004463
4464bool SyncValidator::PreCallValidateCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
4465 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) const {
4466 bool skip = false;
4467 const auto *cb_access_context = GetAccessContext(commandBuffer);
4468 assert(cb_access_context);
4469 if (!cb_access_context) return skip;
4470
4471 const auto *context = cb_access_context->GetCurrentAccessContext();
4472 assert(context);
4473 if (!context) return skip;
4474
4475 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
4476
4477 if (dst_buffer) {
John Zulauf3e86bf02020-09-12 10:47:57 -06004478 const ResourceAccessRange range = MakeRange(dstOffset, 4);
locke-lunargff255f92020-05-13 18:53:52 -06004479 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range);
4480 if (hazard.hazard) {
John Zulauf59e25072020-07-17 10:55:21 -06004481 skip |=
4482 LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
4483 "vkCmdWriteBufferMarkerAMD: Hazard %s for dstBuffer %s. Access info %s.", string_SyncHazard(hazard.hazard),
4484 report_data->FormatHandle(dstBuffer).c_str(), string_UsageTag(hazard).c_str());
locke-lunargff255f92020-05-13 18:53:52 -06004485 }
4486 }
4487 return skip;
4488}
4489
4490void SyncValidator::PreCallRecordCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
4491 VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) {
locke-lunarg8ec19162020-06-16 18:48:34 -06004492 StateTracker::PreCallRecordCmdWriteBufferMarkerAMD(commandBuffer, pipelineStage, dstBuffer, dstOffset, marker);
locke-lunargff255f92020-05-13 18:53:52 -06004493 auto *cb_access_context = GetAccessContext(commandBuffer);
4494 assert(cb_access_context);
4495 const auto tag = cb_access_context->NextCommandTag(CMD_WRITEBUFFERMARKERAMD);
4496 auto *context = cb_access_context->GetCurrentAccessContext();
4497 assert(context);
4498
4499 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
4500
4501 if (dst_buffer) {
John Zulauf3e86bf02020-09-12 10:47:57 -06004502 const ResourceAccessRange range = MakeRange(dstOffset, 4);
locke-lunargff255f92020-05-13 18:53:52 -06004503 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, range, tag);
4504 }
4505}
John Zulauf49beb112020-11-04 16:06:31 -07004506
4507bool SyncValidator::PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const {
4508 bool skip = false;
4509 const auto *cb_context = GetAccessContext(commandBuffer);
4510 assert(cb_context);
4511 if (!cb_context) return skip;
4512
4513 return cb_context->ValidateSetEvent(commandBuffer, event, stageMask);
4514}
4515
4516void SyncValidator::PostCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
4517 StateTracker::PostCallRecordCmdSetEvent(commandBuffer, event, stageMask);
4518 auto *cb_context = GetAccessContext(commandBuffer);
4519 assert(cb_context);
4520 if (!cb_context) return;
4521
4522 cb_context->RecordSetEvent(commandBuffer, event, stageMask);
4523}
4524
4525bool SyncValidator::PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
4526 VkPipelineStageFlags stageMask) const {
4527 bool skip = false;
4528 const auto *cb_context = GetAccessContext(commandBuffer);
4529 assert(cb_context);
4530 if (!cb_context) return skip;
4531
4532 return cb_context->ValidateResetEvent(commandBuffer, event, stageMask);
4533}
4534
4535void SyncValidator::PostCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
4536 StateTracker::PostCallRecordCmdResetEvent(commandBuffer, event, stageMask);
4537 auto *cb_context = GetAccessContext(commandBuffer);
4538 assert(cb_context);
4539 if (!cb_context) return;
4540
4541 cb_context->RecordResetEvent(commandBuffer, event, stageMask);
4542}
4543
4544bool SyncValidator::PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
4545 VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
4546 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
4547 uint32_t bufferMemoryBarrierCount,
4548 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
4549 uint32_t imageMemoryBarrierCount,
4550 const VkImageMemoryBarrier *pImageMemoryBarriers) const {
4551 bool skip = false;
4552 const auto *cb_context = GetAccessContext(commandBuffer);
4553 assert(cb_context);
4554 if (!cb_context) return skip;
4555
4556 return cb_context->ValidateWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount,
4557 pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount,
4558 pImageMemoryBarriers);
4559}
4560
4561void SyncValidator::PostCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
4562 VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
4563 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
4564 uint32_t bufferMemoryBarrierCount,
4565 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
4566 uint32_t imageMemoryBarrierCount,
4567 const VkImageMemoryBarrier *pImageMemoryBarriers) {
4568 StateTracker::PostCallRecordCmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount,
4569 pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers,
4570 imageMemoryBarrierCount, pImageMemoryBarriers);
4571
4572 auto *cb_context = GetAccessContext(commandBuffer);
4573 assert(cb_context);
4574 if (!cb_context) return;
4575
4576 cb_context->RecordWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount,
4577 pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount,
4578 pImageMemoryBarriers);
4579}