blob: a9bab5ac6f239fe5b2230a96503aba7e9d6aff4d [file] [log] [blame]
John Zulauf9cb530d2019-09-30 14:14:10 -06001/* Copyright (c) 2019 The Khronos Group Inc.
2 * Copyright (c) 2019 Valve Corporation
3 * Copyright (c) 2019 LunarG, Inc.
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 * Author: John Zulauf <jzulauf@lunarg.com>
18 */
19
20#include <limits>
21#include <vector>
locke-lunarg296a3c92020-03-25 01:04:29 -060022#include <memory>
23#include <bitset>
John Zulauf9cb530d2019-09-30 14:14:10 -060024#include "synchronization_validation.h"
25
26static const char *string_SyncHazardVUID(SyncHazard hazard) {
27 switch (hazard) {
28 case SyncHazard::NONE:
John Zulauf2f952d22020-02-10 11:34:51 -070029 return "SYNC-HAZARD-NONE";
John Zulauf9cb530d2019-09-30 14:14:10 -060030 break;
31 case SyncHazard::READ_AFTER_WRITE:
32 return "SYNC-HAZARD-READ_AFTER_WRITE";
33 break;
34 case SyncHazard::WRITE_AFTER_READ:
35 return "SYNC-HAZARD-WRITE_AFTER_READ";
36 break;
37 case SyncHazard::WRITE_AFTER_WRITE:
38 return "SYNC-HAZARD-WRITE_AFTER_WRITE";
39 break;
John Zulauf2f952d22020-02-10 11:34:51 -070040 case SyncHazard::READ_RACING_WRITE:
41 return "SYNC-HAZARD-READ-RACING-WRITE";
42 break;
43 case SyncHazard::WRITE_RACING_WRITE:
44 return "SYNC-HAZARD-WRITE-RACING-WRITE";
45 break;
46 case SyncHazard::WRITE_RACING_READ:
47 return "SYNC-HAZARD-WRITE-RACING-READ";
48 break;
John Zulauf9cb530d2019-09-30 14:14:10 -060049 default:
50 assert(0);
51 }
52 return "SYNC-HAZARD-INVALID";
53}
54
55static const char *string_SyncHazard(SyncHazard hazard) {
56 switch (hazard) {
57 case SyncHazard::NONE:
58 return "NONR";
59 break;
60 case SyncHazard::READ_AFTER_WRITE:
61 return "READ_AFTER_WRITE";
62 break;
63 case SyncHazard::WRITE_AFTER_READ:
64 return "WRITE_AFTER_READ";
65 break;
66 case SyncHazard::WRITE_AFTER_WRITE:
67 return "WRITE_AFTER_WRITE";
68 break;
John Zulauf2f952d22020-02-10 11:34:51 -070069 case SyncHazard::READ_RACING_WRITE:
70 return "READ_RACING_WRITE";
71 break;
72 case SyncHazard::WRITE_RACING_WRITE:
73 return "WRITE_RACING_WRITE";
74 break;
75 case SyncHazard::WRITE_RACING_READ:
76 return "WRITE_RACING_READ";
77 break;
John Zulauf9cb530d2019-09-30 14:14:10 -060078 default:
79 assert(0);
80 }
81 return "INVALID HAZARD";
82}
83
John Zulaufb027cdb2020-05-21 14:25:22 -060084static constexpr VkPipelineStageFlags kColorAttachmentExecScope = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
85static constexpr SyncStageAccessFlags kColorAttachmentAccessScope =
86 SyncStageAccessFlagBits::SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ_BIT |
87 SyncStageAccessFlagBits::SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT |
88 SyncStageAccessFlagBits::SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE_BIT;
89static constexpr VkPipelineStageFlags kDepthStencilAttachmentExecScope =
90 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
91static constexpr SyncStageAccessFlags kDepthStencilAttachmentAccessScope =
92 SyncStageAccessFlagBits::SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
93 SyncStageAccessFlagBits::SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
94 SyncStageAccessFlagBits::SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
95 SyncStageAccessFlagBits::SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
96
97static constexpr SyncOrderingBarrier kColorAttachmentRasterOrder = {kColorAttachmentExecScope, kColorAttachmentAccessScope};
98static constexpr SyncOrderingBarrier kDepthStencilAttachmentRasterOrder = {kDepthStencilAttachmentExecScope,
99 kDepthStencilAttachmentAccessScope};
100static constexpr SyncOrderingBarrier kAttachmentRasterOrder = {kDepthStencilAttachmentExecScope | kColorAttachmentExecScope,
101 kDepthStencilAttachmentAccessScope | kColorAttachmentAccessScope};
102
locke-lunarg3c038002020-04-30 23:08:08 -0600103inline VkDeviceSize GetRealWholeSize(VkDeviceSize offset, VkDeviceSize size, VkDeviceSize whole_size) {
104 if (size == VK_WHOLE_SIZE) {
105 return (whole_size - offset);
106 }
107 return size;
108}
109
John Zulauf16adfc92020-04-08 10:28:33 -0600110template <typename T>
John Zulauf355e49b2020-04-24 15:11:15 -0600111static ResourceAccessRange MakeRange(const T &has_offset_and_size) {
John Zulauf16adfc92020-04-08 10:28:33 -0600112 return ResourceAccessRange(has_offset_and_size.offset, (has_offset_and_size.offset + has_offset_and_size.size));
113}
114
John Zulauf355e49b2020-04-24 15:11:15 -0600115static ResourceAccessRange MakeRange(VkDeviceSize start, VkDeviceSize size) { return ResourceAccessRange(start, (start + size)); }
John Zulauf16adfc92020-04-08 10:28:33 -0600116
John Zulauf0cb5be22020-01-23 12:18:22 -0700117// Expand the pipeline stage without regard to whether the are valid w.r.t. queue or extension
118VkPipelineStageFlags ExpandPipelineStages(VkQueueFlags queue_flags, VkPipelineStageFlags stage_mask) {
119 VkPipelineStageFlags expanded = stage_mask;
120 if (VK_PIPELINE_STAGE_ALL_COMMANDS_BIT & stage_mask) {
121 expanded = expanded & ~VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
122 for (const auto &all_commands : syncAllCommandStagesByQueueFlags) {
123 if (all_commands.first & queue_flags) {
124 expanded |= all_commands.second;
125 }
126 }
127 }
128 if (VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT & stage_mask) {
129 expanded = expanded & ~VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT;
130 expanded |= syncAllCommandStagesByQueueFlags.at(VK_QUEUE_GRAPHICS_BIT) & ~VK_PIPELINE_STAGE_HOST_BIT;
131 }
132 return expanded;
133}
134
John Zulauf36bcf6a2020-02-03 15:12:52 -0700135VkPipelineStageFlags RelatedPipelineStages(VkPipelineStageFlags stage_mask,
136 std::map<VkPipelineStageFlagBits, VkPipelineStageFlags> &map) {
137 VkPipelineStageFlags unscanned = stage_mask;
138 VkPipelineStageFlags related = 0;
139 for (const auto entry : map) {
140 const auto stage = entry.first;
141 if (stage & unscanned) {
142 related = related | entry.second;
143 unscanned = unscanned & ~stage;
144 if (!unscanned) break;
145 }
146 }
147 return related;
148}
149
150VkPipelineStageFlags WithEarlierPipelineStages(VkPipelineStageFlags stage_mask) {
151 return stage_mask | RelatedPipelineStages(stage_mask, syncLogicallyEarlierStages);
152}
153
154VkPipelineStageFlags WithLaterPipelineStages(VkPipelineStageFlags stage_mask) {
155 return stage_mask | RelatedPipelineStages(stage_mask, syncLogicallyLaterStages);
156}
157
John Zulauf5c5e88d2019-12-26 11:22:02 -0700158static const ResourceAccessRange full_range(std::numeric_limits<VkDeviceSize>::min(), std::numeric_limits<VkDeviceSize>::max());
John Zulauf5c5e88d2019-12-26 11:22:02 -0700159
John Zulauf355e49b2020-04-24 15:11:15 -0600160// Class AccessContext stores the state of accesses specific to a Command, Subpass, or Queue
161const std::array<AccessContext::AddressType, AccessContext::kAddressTypeCount> AccessContext::kAddressTypes = {
162 AccessContext::AddressType::kLinearAddress, AccessContext::AddressType::kIdealizedAddress};
163
John Zulauf540266b2020-04-06 18:54:53 -0600164AccessContext::AccessContext(uint32_t subpass, VkQueueFlags queue_flags,
165 const std::vector<SubpassDependencyGraphNode> &dependencies,
166 const std::vector<AccessContext> &contexts, AccessContext *external_context) {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600167 Reset();
168 const auto &subpass_dep = dependencies[subpass];
169 prev_.reserve(subpass_dep.prev.size());
John Zulauf355e49b2020-04-24 15:11:15 -0600170 prev_by_subpass_.resize(subpass, nullptr); // Can't be more prevs than the subpass we're on
John Zulauf3d84f1b2020-03-09 13:33:25 -0600171 for (const auto &prev_dep : subpass_dep.prev) {
172 assert(prev_dep.dependency);
173 const auto dep = *prev_dep.dependency;
John Zulauf540266b2020-04-06 18:54:53 -0600174 prev_.emplace_back(const_cast<AccessContext *>(&contexts[dep.srcSubpass]), queue_flags, dep);
John Zulauf355e49b2020-04-24 15:11:15 -0600175 prev_by_subpass_[dep.srcSubpass] = &prev_.back();
John Zulauf5c5e88d2019-12-26 11:22:02 -0700176 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600177
178 async_.reserve(subpass_dep.async.size());
179 for (const auto async_subpass : subpass_dep.async) {
John Zulauf540266b2020-04-06 18:54:53 -0600180 async_.emplace_back(const_cast<AccessContext *>(&contexts[async_subpass]));
John Zulauf3d84f1b2020-03-09 13:33:25 -0600181 }
John Zulaufe5da6e52020-03-18 15:32:18 -0600182 if (subpass_dep.barrier_from_external) {
183 src_external_ = TrackBack(external_context, queue_flags, *subpass_dep.barrier_from_external);
184 } else {
185 src_external_ = TrackBack();
186 }
187 if (subpass_dep.barrier_to_external) {
188 dst_external_ = TrackBack(this, queue_flags, *subpass_dep.barrier_to_external);
189 } else {
190 dst_external_ = TrackBack();
John Zulauf3d84f1b2020-03-09 13:33:25 -0600191 }
John Zulauf5c5e88d2019-12-26 11:22:02 -0700192}
193
John Zulauf5f13a792020-03-10 07:31:21 -0600194template <typename Detector>
John Zulauf16adfc92020-04-08 10:28:33 -0600195HazardResult AccessContext::DetectPreviousHazard(AddressType type, const Detector &detector,
John Zulauf540266b2020-04-06 18:54:53 -0600196 const ResourceAccessRange &range) const {
John Zulauf5f13a792020-03-10 07:31:21 -0600197 ResourceAccessRangeMap descent_map;
John Zulauf69133422020-05-20 14:55:53 -0600198 ResolvePreviousAccess(type, range, &descent_map, nullptr);
John Zulauf5f13a792020-03-10 07:31:21 -0600199
200 HazardResult hazard;
201 for (auto prev = descent_map.begin(); prev != descent_map.end() && !hazard.hazard; ++prev) {
202 hazard = detector.Detect(prev);
203 }
204 return hazard;
205}
206
John Zulauf3d84f1b2020-03-09 13:33:25 -0600207// A recursive range walker for hazard detection, first for the current context and the (DetectHazardRecur) to walk
208// the DAG of the contexts (for example subpasses)
209template <typename Detector>
John Zulauf355e49b2020-04-24 15:11:15 -0600210HazardResult AccessContext::DetectHazard(AddressType type, const Detector &detector, const ResourceAccessRange &range,
211 DetectOptions options) const {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600212 HazardResult hazard;
John Zulauf5f13a792020-03-10 07:31:21 -0600213
John Zulauf355e49b2020-04-24 15:11:15 -0600214 if (static_cast<uint32_t>(options) | DetectOptions::kDetectAsync) {
215 // Async checks don't require recursive lookups, as the async lists are exhaustive for the top-level context
216 // so we'll check these first
217 for (const auto &async_context : async_) {
218 hazard = async_context->DetectAsyncHazard(type, detector, range);
219 if (hazard.hazard) return hazard;
220 }
John Zulauf5f13a792020-03-10 07:31:21 -0600221 }
222
John Zulauf69133422020-05-20 14:55:53 -0600223 const bool detect_prev = (static_cast<uint32_t>(options) | DetectOptions::kDetectPrevious) != 0;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600224
John Zulauf69133422020-05-20 14:55:53 -0600225 const auto &accesses = GetAccessStateMap(type);
226 const auto from = accesses.lower_bound(range);
227 const auto to = accesses.upper_bound(range);
228 ResourceAccessRange gap = {range.begin, range.begin};
John Zulauf5f13a792020-03-10 07:31:21 -0600229
John Zulauf69133422020-05-20 14:55:53 -0600230 for (auto pos = from; pos != to; ++pos) {
231 // Cover any leading gap, or gap between entries
232 if (detect_prev) {
233 // TODO: After profiling we may want to change the descent logic such that we don't recur per gap...
234 // Cover any leading gap, or gap between entries
235 gap.end = pos->first.begin; // We know this begin is < range.end
John Zulauf355e49b2020-04-24 15:11:15 -0600236 if (gap.non_empty()) {
John Zulauf69133422020-05-20 14:55:53 -0600237 // Recur on all gaps
John Zulauf16adfc92020-04-08 10:28:33 -0600238 hazard = DetectPreviousHazard(type, detector, gap);
John Zulauf5f13a792020-03-10 07:31:21 -0600239 if (hazard.hazard) return hazard;
240 }
John Zulauf69133422020-05-20 14:55:53 -0600241 // Set up for the next gap. If pos..end is >= range.end, loop will exit, and trailing gap will be empty
242 gap.begin = pos->first.end;
243 }
244
245 hazard = detector.Detect(pos);
246 if (hazard.hazard) return hazard;
247 }
248
249 if (detect_prev) {
250 // Detect in the trailing empty as needed
251 gap.end = range.end;
252 if (gap.non_empty()) {
253 hazard = DetectPreviousHazard(type, detector, gap);
John Zulauf16adfc92020-04-08 10:28:33 -0600254 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600255 }
256
257 return hazard;
258}
259
260// A non recursive range walker for the asynchronous contexts (those we have no barriers with)
261template <typename Detector>
John Zulauf355e49b2020-04-24 15:11:15 -0600262HazardResult AccessContext::DetectAsyncHazard(AddressType type, const Detector &detector, const ResourceAccessRange &range) const {
John Zulauf16adfc92020-04-08 10:28:33 -0600263 auto &accesses = GetAccessStateMap(type);
264 const auto from = accesses.lower_bound(range);
265 const auto to = accesses.upper_bound(range);
266
John Zulauf3d84f1b2020-03-09 13:33:25 -0600267 HazardResult hazard;
John Zulauf16adfc92020-04-08 10:28:33 -0600268 for (auto pos = from; pos != to && !hazard.hazard; ++pos) {
269 hazard = detector.DetectAsync(pos);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600270 }
John Zulauf16adfc92020-04-08 10:28:33 -0600271
John Zulauf3d84f1b2020-03-09 13:33:25 -0600272 return hazard;
273}
274
John Zulauf355e49b2020-04-24 15:11:15 -0600275// Returns the last resolved entry
276static void ResolveMapToEntry(ResourceAccessRangeMap *dest, ResourceAccessRangeMap::iterator entry,
277 ResourceAccessRangeMap::const_iterator first, ResourceAccessRangeMap::const_iterator last,
278 const SyncBarrier *barrier) {
279 auto at = entry;
280 for (auto pos = first; pos != last; ++pos) {
281 // Every member of the input iterator range must fit within the remaining portion of entry
282 assert(at->first.includes(pos->first));
283 assert(at != dest->end());
284 // Trim up at to the same size as the entry to resolve
285 at = sparse_container::split(at, *dest, pos->first);
286 auto access = pos->second;
287 if (barrier) {
288 access.ApplyBarrier(*barrier);
289 }
290 at->second.Resolve(access);
291 ++at; // Go to the remaining unused section of entry
292 }
293}
294
295void AccessContext::ResolveAccessRange(AddressType type, const ResourceAccessRange &range, const SyncBarrier *barrier,
296 ResourceAccessRangeMap *resolve_map, const ResourceAccessState *infill_state,
297 bool recur_to_infill) const {
298 ResourceRangeMergeIterator current(*resolve_map, GetAccessStateMap(type), range.begin);
299 while (current->range.non_empty() && range.includes(current->range.begin)) {
John Zulauf16adfc92020-04-08 10:28:33 -0600300 if (current->pos_B->valid) {
301 const auto &src_pos = current->pos_B->lower_bound;
John Zulauf355e49b2020-04-24 15:11:15 -0600302 auto access = src_pos->second;
303 if (barrier) {
304 access.ApplyBarrier(*barrier);
305 }
John Zulauf16adfc92020-04-08 10:28:33 -0600306 if (current->pos_A->valid) {
307 current.trim_A();
John Zulauf355e49b2020-04-24 15:11:15 -0600308 current->pos_A->lower_bound->second.Resolve(access);
John Zulauf5f13a792020-03-10 07:31:21 -0600309 } else {
John Zulauf355e49b2020-04-24 15:11:15 -0600310 auto inserted = resolve_map->insert(current->pos_A->lower_bound, std::make_pair(current->range, access));
311 current.invalidate_A(inserted); // Update the parallel iterator to point at the insert segment
John Zulauf5f13a792020-03-10 07:31:21 -0600312 }
John Zulauf16adfc92020-04-08 10:28:33 -0600313 } else {
314 // we have to descend to fill this gap
315 if (recur_to_infill) {
John Zulauf355e49b2020-04-24 15:11:15 -0600316 if (current->pos_A->valid) {
317 // Dest is valid, so we need to accumulate along the DAG and then resolve... in an N-to-1 resolve operation
318 ResourceAccessRangeMap gap_map;
319 ResolvePreviousAccess(type, current->range, &gap_map, infill_state);
320 ResolveMapToEntry(resolve_map, current->pos_A->lower_bound, gap_map.begin(), gap_map.end(), barrier);
321 } else {
322 // There isn't anything in dest in current->range, so we can accumulate directly into it.
323 ResolvePreviousAccess(type, current->range, resolve_map, infill_state);
324 if (barrier) {
325 // Need to apply the barrier to the accesses we accumulated, noting that we haven't updated current
326 for (auto pos = resolve_map->lower_bound(current->range); pos != current->pos_A->lower_bound; ++pos) {
327 pos->second.ApplyBarrier(*barrier);
328 }
329 }
330 }
331 // Given that there could be gaps we need to seek carefully to not repeatedly search the same gaps in the next
332 // iterator of the outer while.
333
334 // Set the parallel iterator to the end of this range s.t. ++ will move us to the next range whether or
335 // not the end of the range is a gap. For the seek to work, first we need to warn the parallel iterator
336 // we stepped on the dest map
337 const auto seek_to = current->range.end - 1; // The subtraction is safe as range can't be empty (loop condition)
338 current.invalidate_A(); // Changes current->range
339 current.seek(seek_to);
340 } else if (!current->pos_A->valid && infill_state) {
341 // If we didn't find anything in the current range, and we aren't reccuring... we infill if required
342 auto inserted = resolve_map->insert(current->pos_A->lower_bound, std::make_pair(current->range, *infill_state));
343 current.invalidate_A(inserted); // Update the parallel iterator to point at the correct segment after insert
John Zulauf16adfc92020-04-08 10:28:33 -0600344 }
John Zulauf5f13a792020-03-10 07:31:21 -0600345 }
John Zulauf16adfc92020-04-08 10:28:33 -0600346 ++current;
John Zulauf3d84f1b2020-03-09 13:33:25 -0600347 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600348}
349
John Zulauf355e49b2020-04-24 15:11:15 -0600350void AccessContext::ResolvePreviousAccess(AddressType type, const ResourceAccessRange &range, ResourceAccessRangeMap *descent_map,
351 const ResourceAccessState *infill_state) const {
John Zulaufe5da6e52020-03-18 15:32:18 -0600352 if ((prev_.size() == 0) && (src_external_.context == nullptr)) {
John Zulauf5f13a792020-03-10 07:31:21 -0600353 if (range.non_empty() && infill_state) {
354 descent_map->insert(std::make_pair(range, *infill_state));
355 }
356 } else {
357 // Look for something to fill the gap further along.
358 for (const auto &prev_dep : prev_) {
John Zulauf355e49b2020-04-24 15:11:15 -0600359 prev_dep.context->ResolveAccessRange(type, range, &prev_dep.barrier, descent_map, infill_state);
John Zulauf5f13a792020-03-10 07:31:21 -0600360 }
361
John Zulaufe5da6e52020-03-18 15:32:18 -0600362 if (src_external_.context) {
John Zulauf355e49b2020-04-24 15:11:15 -0600363 src_external_.context->ResolveAccessRange(type, range, &src_external_.barrier, descent_map, infill_state);
John Zulauf5f13a792020-03-10 07:31:21 -0600364 }
365 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600366}
367
John Zulauf16adfc92020-04-08 10:28:33 -0600368AccessContext::AddressType AccessContext::ImageAddressType(const IMAGE_STATE &image) {
locke-lunarg3f6978b2020-04-16 16:51:35 -0600369 return (image.fragment_encoder->IsLinearImage()) ? AddressType::kLinearAddress : AddressType::kIdealizedAddress;
John Zulauf16adfc92020-04-08 10:28:33 -0600370}
371
372VkDeviceSize AccessContext::ResourceBaseAddress(const BINDABLE &bindable) {
373 return bindable.binding.offset + bindable.binding.mem_state->fake_base_address;
374}
375
John Zulauf355e49b2020-04-24 15:11:15 -0600376static bool SimpleBinding(const BINDABLE &bindable) { return !bindable.sparse && bindable.binding.mem_state; }
John Zulauf16adfc92020-04-08 10:28:33 -0600377
John Zulauf1507ee42020-05-18 11:33:09 -0600378static SyncStageAccessIndex ColorLoadUsage(VkAttachmentLoadOp load_op) {
379 const auto stage_access = (load_op == VK_ATTACHMENT_LOAD_OP_LOAD) ? SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ
380 : SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE;
381 return stage_access;
382}
383static SyncStageAccessIndex DepthStencilLoadUsage(VkAttachmentLoadOp load_op) {
384 const auto stage_access = (load_op == VK_ATTACHMENT_LOAD_OP_LOAD) ? SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_READ
385 : SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE;
386 return stage_access;
387}
388
John Zulauf540266b2020-04-06 18:54:53 -0600389void AccessContext::ResolvePreviousAccess(const IMAGE_STATE &image_state, const VkImageSubresourceRange &subresource_range_arg,
John Zulauf355e49b2020-04-24 15:11:15 -0600390 AddressType address_type, ResourceAccessRangeMap *descent_map,
391 const ResourceAccessState *infill_state) const {
John Zulauf16adfc92020-04-08 10:28:33 -0600392 if (!SimpleBinding(image_state)) return;
393
John Zulauf62f10592020-04-03 12:20:02 -0600394 auto subresource_range = NormalizeSubresourceRange(image_state.createInfo, subresource_range_arg);
locke-lunargae26eac2020-04-16 15:29:05 -0600395 subresource_adapter::ImageRangeGenerator range_gen(*image_state.fragment_encoder.get(), subresource_range, {0, 0, 0},
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600396 image_state.createInfo.extent);
John Zulauf16adfc92020-04-08 10:28:33 -0600397 const auto base_address = ResourceBaseAddress(image_state);
John Zulauf62f10592020-04-03 12:20:02 -0600398 for (; range_gen->non_empty(); ++range_gen) {
John Zulauf16adfc92020-04-08 10:28:33 -0600399 ResolvePreviousAccess(address_type, (*range_gen + base_address), descent_map, infill_state);
John Zulauf62f10592020-04-03 12:20:02 -0600400 }
401}
402
John Zulauf1507ee42020-05-18 11:33:09 -0600403bool AccessContext::ValidateLayoutTransitions(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state,
404 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const char *func_name,
405 uint32_t subpass) const {
John Zulauf355e49b2020-04-24 15:11:15 -0600406 bool skip = false;
407 const auto &transitions = rp_state.subpass_transitions[subpass];
408 for (const auto &transition : transitions) {
John Zulauf1507ee42020-05-18 11:33:09 -0600409 auto hazard = DetectSubpassTransitionHazard(transition, attachment_views);
John Zulauf355e49b2020-04-24 15:11:15 -0600410 if (hazard.hazard) {
411 skip |= sync_state.LogError(rp_state.renderPass, string_SyncHazardVUID(hazard.hazard),
412 "%s: Hazard %s in subpass %" PRIu32 " for attachment %" PRIu32 " image layout transition.",
413 func_name, string_SyncHazard(hazard.hazard), subpass, transition.attachment);
414 }
415 }
416 return skip;
417}
418
John Zulauf1507ee42020-05-18 11:33:09 -0600419bool AccessContext::ValidateLoadOperation(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state,
420 const VkRect2D &render_area,
421 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const char *func_name,
422 uint32_t subpass) const {
423 bool skip = false;
424 const auto *attachment_ci = rp_state.createInfo.pAttachments;
425 VkExtent3D extent = CastTo3D(render_area.extent);
426 VkOffset3D offset = CastTo3D(render_area.offset);
427 const auto external_access_scope = src_external_.barrier.dst_access_scope;
428 HazardResult hazard;
429
430 for (uint32_t i = 0; i < rp_state.createInfo.attachmentCount; i++) {
431 if (subpass == rp_state.attachment_first_subpass[i]) {
432 if (attachment_views[i] == nullptr) continue;
433 const IMAGE_VIEW_STATE &view = *attachment_views[i];
434 const IMAGE_STATE *image = view.image_state.get();
435 if (image == nullptr) continue;
436 const auto &ci = attachment_ci[i];
437 const bool is_transition = rp_state.attachment_first_is_transition[i];
438
439 // Need check in the following way
440 // 1) if the usage bit isn't in the dest_access_scope, and there is layout traniition for initial use, report hazard
441 // vs. transition
442 // 2) if there isn't a layout transition, we need to look at the external context with a "detect hazard" operation
443 // for each aspect loaded.
444
445 const bool has_depth = FormatHasDepth(ci.format);
John Zulaufb027cdb2020-05-21 14:25:22 -0600446 const bool has_stencil = FormatHasStencil(ci.format);
John Zulauf1507ee42020-05-18 11:33:09 -0600447 const bool is_color = !(has_depth || has_stencil);
448
449 const SyncStageAccessIndex load_index = has_depth ? DepthStencilLoadUsage(ci.loadOp) : ColorLoadUsage(ci.loadOp);
450 const SyncStageAccessFlags load_mask = (has_depth || is_color) ? SyncStageAccess::Flags(load_index) : 0U;
451 const SyncStageAccessIndex stencil_load_index = has_stencil ? DepthStencilLoadUsage(ci.stencilLoadOp) : load_index;
452 const SyncStageAccessFlags stencil_mask = has_stencil ? SyncStageAccess::Flags(stencil_load_index) : 0U;
453
454 const char *aspect = nullptr;
455 if (is_transition) {
456 // For transition w
457 SyncHazard transition_hazard = SyncHazard::NONE;
458 bool checked_stencil = false;
459 if (load_mask) {
460 if ((load_mask & external_access_scope) != load_mask) {
461 transition_hazard =
462 SyncStageAccess::HasWrite(load_mask) ? SyncHazard::WRITE_AFTER_WRITE : SyncHazard::READ_AFTER_WRITE;
463 aspect = is_color ? "color" : "depth";
464 }
465 if (!transition_hazard && stencil_mask) {
466 if ((stencil_mask & external_access_scope) != stencil_mask) {
467 transition_hazard = SyncStageAccess::HasWrite(stencil_mask) ? SyncHazard::WRITE_AFTER_WRITE
468 : SyncHazard::READ_AFTER_WRITE;
469 aspect = "stencil";
470 checked_stencil = true;
471 }
472 }
473 }
474 if (transition_hazard) {
475 // Hazard vs. ILT
476 auto load_op_string = string_VkAttachmentLoadOp(checked_stencil ? ci.stencilLoadOp : ci.loadOp);
477 skip |=
478 sync_state.LogError(rp_state.renderPass, string_SyncHazardVUID(hazard.hazard),
479 "%s: Hazard %s vs. layout transition in subpass %" PRIu32 " for attachment %" PRIu32
480 " aspect %s during load with loadOp %s.",
481 func_name, string_SyncHazard(transition_hazard), subpass, i, aspect, load_op_string);
482 }
483 } else {
484 auto hazard_range = view.normalized_subresource_range;
485 bool checked_stencil = false;
486 if (is_color) {
487 hazard = DetectHazard(*image, load_index, view.normalized_subresource_range, offset, extent);
488 aspect = "color";
489 } else {
490 if (has_depth) {
491 hazard_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
492 hazard = DetectHazard(*image, load_index, hazard_range, offset, extent);
493 aspect = "depth";
494 }
495 if (!hazard.hazard && has_stencil) {
496 hazard_range.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
497 hazard = DetectHazard(*image, stencil_load_index, hazard_range, offset, extent);
498 aspect = "stencil";
499 checked_stencil = true;
500 }
501 }
502
503 if (hazard.hazard) {
504 auto load_op_string = string_VkAttachmentLoadOp(checked_stencil ? ci.stencilLoadOp : ci.loadOp);
505 skip |= sync_state.LogError(rp_state.renderPass, string_SyncHazardVUID(hazard.hazard),
506 "%s: Hazard %s in subpass %" PRIu32 " for attachment %" PRIu32
507 " aspect %s during load with loadOp %s.",
508 func_name, string_SyncHazard(hazard.hazard), subpass, i, aspect, load_op_string);
509 }
510 }
511 }
512 }
513 return skip;
514}
515
John Zulaufb027cdb2020-05-21 14:25:22 -0600516bool AccessContext::ValidateResolveOperations(const SyncValidator &sync_state, const RENDER_PASS_STATE &rp_state,
517 const VkRect2D &render_area,
518 const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const char *func_name,
519 uint32_t subpass) const {
520 bool skip = false;
521 VkExtent3D extent = CastTo3D(render_area.extent);
522 VkOffset3D offset = CastTo3D(render_area.offset);
523 const auto &rp_ci = rp_state.createInfo;
524 const auto *attachment_ci = rp_ci.pAttachments;
525 const auto &subpass_ci = rp_ci.pSubpasses[subpass];
526
527 // Color resolves
528 const auto *color_attachments = subpass_ci.pColorAttachments;
529 const auto *color_resolve = subpass_ci.pResolveAttachments;
530 if (color_resolve && color_attachments) {
531 for (uint32_t i = 0; i < subpass_ci.colorAttachmentCount; i++) {
532 const auto &color_attach = color_attachments[i].attachment;
533 const auto &resolve_attach = subpass_ci.pResolveAttachments[i].attachment;
534 if ((color_attach != VK_ATTACHMENT_UNUSED) && (resolve_attach != VK_ATTACHMENT_UNUSED)) {
535 const char *resolve_op = "color attachment read";
536 auto hazard = DetectHazard(attachment_views[color_attach], SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ,
537 kColorAttachmentRasterOrder, offset, extent);
538
539 if (!hazard.hazard) {
540 resolve_op = "resolve attachment write";
541 hazard = DetectHazard(attachment_views[resolve_attach], SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE,
542 kColorAttachmentRasterOrder, offset, extent);
543 }
544 if (hazard.hazard) {
545 skip |= sync_state.LogError(rp_state.renderPass, string_SyncHazardVUID(hazard.hazard),
546 "%s: Hazard %s in subpass %" PRIu32 "during %s, from color attachment %" PRIu32
547 " to resolve attachment %" PRIu32 " during %.",
548 func_name, string_SyncHazard(hazard.hazard), subpass, resolve_op, color_attach,
549 resolve_attach);
550 }
551 }
552 }
553 }
554 const auto ds_resolve = lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolve>(subpass_ci.pNext);
555 if (ds_resolve && ds_resolve->pDepthStencilResolveAttachment &&
556 (ds_resolve->pDepthStencilResolveAttachment->attachment != VK_ATTACHMENT_UNUSED) && subpass_ci.pDepthStencilAttachment &&
557 (subpass_ci.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED)) {
558 const auto src_at = subpass_ci.pDepthStencilAttachment->attachment;
559 const auto src_ci = attachment_ci[src_at];
560 // The formats are required to match so we can pick either
561 const bool resolve_depth = (ds_resolve->depthResolveMode != VK_RESOLVE_MODE_NONE) && FormatHasDepth(src_ci.format);
562 const bool resolve_stencil = (ds_resolve->stencilResolveMode != VK_RESOLVE_MODE_NONE) && FormatHasStencil(src_ci.format);
563 const auto dst_at = ds_resolve->pDepthStencilResolveAttachment->attachment;
564 VkImageAspectFlags aspect_mask = 0u;
565
566 const char *aspect_string = nullptr;
567 if (resolve_depth && resolve_stencil) {
568 // Validate all aspects together
569 aspect_mask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
570 aspect_string = "depth/stencil";
571 } else if (resolve_depth) {
572 // Validate depth only
573 aspect_mask = VK_IMAGE_ASPECT_DEPTH_BIT;
574 aspect_string = "depth";
575 } else if (resolve_stencil) {
576 // Validate all stencil only
577 aspect_mask = VK_IMAGE_ASPECT_STENCIL_BIT;
578 aspect_string = "stencil";
579 }
580
581 if (aspect_mask) {
582 const char *resolve_op = "attachment read";
583 auto hazard = DetectHazard(attachment_views[src_at], SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ,
584 kDepthStencilAttachmentRasterOrder, offset, extent);
585 if (!hazard.hazard) {
586 // Use the broader ordering guarantee as d/s resolve uses the color stage, to process depth/stencil information
587 resolve_op = "resolve attachment write";
588 hazard = DetectHazard(attachment_views[dst_at], SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE,
589 kAttachmentRasterOrder, offset, extent);
590 }
591 if (hazard.hazard) {
592 skip |= sync_state.LogError(
593 rp_state.renderPass, string_SyncHazardVUID(hazard.hazard),
594 "%s: Hazard %s in subpass %" PRIu32 "during %s %s, from depth/stencil attachment %" PRIu32
595 " to resolve attachment %" PRIu32 " during %.",
596 func_name, string_SyncHazard(hazard.hazard), subpass, aspect_string, resolve_op, src_at, dst_at);
597 }
598 }
599 }
600 return skip;
601}
602
John Zulauf3d84f1b2020-03-09 13:33:25 -0600603class HazardDetector {
604 SyncStageAccessIndex usage_index_;
605
606 public:
John Zulauf5f13a792020-03-10 07:31:21 -0600607 HazardResult Detect(const ResourceAccessRangeMap::const_iterator &pos) const { return pos->second.DetectHazard(usage_index_); }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600608 HazardResult DetectAsync(const ResourceAccessRangeMap::const_iterator &pos) const {
609 return pos->second.DetectAsyncHazard(usage_index_);
610 }
611 HazardDetector(SyncStageAccessIndex usage) : usage_index_(usage) {}
612};
613
John Zulauf69133422020-05-20 14:55:53 -0600614class HazardDetectorWithOrdering {
615 const SyncStageAccessIndex usage_index_;
616 const SyncOrderingBarrier &ordering_;
617
618 public:
619 HazardResult Detect(const ResourceAccessRangeMap::const_iterator &pos) const {
620 return pos->second.DetectHazard(usage_index_, ordering_);
621 }
622 HazardResult DetectAsync(const ResourceAccessRangeMap::const_iterator &pos) const {
623 return pos->second.DetectAsyncHazard(usage_index_);
624 }
625 HazardDetectorWithOrdering(SyncStageAccessIndex usage, const SyncOrderingBarrier &ordering)
626 : usage_index_(usage), ordering_(ordering) {}
627};
628
John Zulauf16adfc92020-04-08 10:28:33 -0600629HazardResult AccessContext::DetectHazard(AddressType type, SyncStageAccessIndex usage_index,
John Zulauf540266b2020-04-06 18:54:53 -0600630 const ResourceAccessRange &range) const {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600631 HazardDetector detector(usage_index);
John Zulauf355e49b2020-04-24 15:11:15 -0600632 return DetectHazard(type, detector, range, DetectOptions::kDetectAll);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600633}
634
John Zulauf16adfc92020-04-08 10:28:33 -0600635HazardResult AccessContext::DetectHazard(const BUFFER_STATE &buffer, SyncStageAccessIndex usage_index,
John Zulauf355e49b2020-04-24 15:11:15 -0600636 const ResourceAccessRange &range) const {
John Zulauf16adfc92020-04-08 10:28:33 -0600637 if (!SimpleBinding(buffer)) return HazardResult();
John Zulauf355e49b2020-04-24 15:11:15 -0600638 return DetectHazard(AddressType::kLinearAddress, usage_index, range + ResourceBaseAddress(buffer));
John Zulaufe5da6e52020-03-18 15:32:18 -0600639}
640
John Zulauf69133422020-05-20 14:55:53 -0600641template <typename Detector>
642HazardResult AccessContext::DetectHazard(Detector &detector, const IMAGE_STATE &image,
643 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset,
644 const VkExtent3D &extent, DetectOptions options) const {
645 if (!SimpleBinding(image)) return HazardResult();
646 subresource_adapter::ImageRangeGenerator range_gen(*image.fragment_encoder.get(), subresource_range, offset, extent);
647 const auto address_type = ImageAddressType(image);
648 const auto base_address = ResourceBaseAddress(image);
649 for (; range_gen->non_empty(); ++range_gen) {
650 HazardResult hazard = DetectHazard(address_type, detector, (*range_gen + base_address), options);
651 if (hazard.hazard) return hazard;
652 }
653 return HazardResult();
654}
655
John Zulauf540266b2020-04-06 18:54:53 -0600656HazardResult AccessContext::DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
657 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset,
658 const VkExtent3D &extent) const {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700659 VkImageSubresourceRange subresource_range = {subresource.aspectMask, subresource.mipLevel, 1, subresource.baseArrayLayer,
660 subresource.layerCount};
John Zulauf1507ee42020-05-18 11:33:09 -0600661 return DetectHazard(image, current_usage, subresource_range, offset, extent);
662}
663
664HazardResult AccessContext::DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
665 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset,
666 const VkExtent3D &extent) const {
John Zulauf69133422020-05-20 14:55:53 -0600667 HazardDetector detector(current_usage);
668 return DetectHazard(detector, image, subresource_range, offset, extent, DetectOptions::kDetectAll);
669}
670
671HazardResult AccessContext::DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
672 const VkImageSubresourceRange &subresource_range, const SyncOrderingBarrier &ordering,
673 const VkOffset3D &offset, const VkExtent3D &extent) const {
674 HazardDetectorWithOrdering detector(current_usage, ordering);
675 return DetectHazard(detector, image, subresource_range, offset, extent, DetectOptions::kDetectAll);
John Zulauf9cb530d2019-09-30 14:14:10 -0600676}
677
John Zulaufb027cdb2020-05-21 14:25:22 -0600678// Some common code for looking at attachments, if there's anything wrong, we return no hazard, core validation
679// should have reported the issue regarding an invalid attachment entry
680HazardResult AccessContext::DetectHazard(const IMAGE_VIEW_STATE *view, SyncStageAccessIndex current_usage,
681 const SyncOrderingBarrier &ordering, const VkOffset3D &offset, const VkExtent3D &extent,
682 VkImageAspectFlags aspect_mask) const {
683 if (view != nullptr) {
684 const IMAGE_STATE *image = view->image_state.get();
685 if (image != nullptr) {
686 auto *detect_range = &view->normalized_subresource_range;
687 VkImageSubresourceRange masked_range;
688 if (aspect_mask) { // If present and non-zero, restrict the normalized range to aspects present in aspect_mask
689 masked_range = view->normalized_subresource_range;
690 masked_range.aspectMask = aspect_mask & masked_range.aspectMask;
691 detect_range = &masked_range;
692 }
693
694 // NOTE: The range encoding code is not robust to invalid ranges, so we protect it from our change
695 if (detect_range->aspectMask) {
696 return DetectHazard(*image, current_usage, *detect_range, ordering, offset, extent);
697 }
698 }
699 }
700 return HazardResult();
701}
John Zulauf3d84f1b2020-03-09 13:33:25 -0600702class BarrierHazardDetector {
703 public:
704 BarrierHazardDetector(SyncStageAccessIndex usage_index, VkPipelineStageFlags src_exec_scope,
705 SyncStageAccessFlags src_access_scope)
706 : usage_index_(usage_index), src_exec_scope_(src_exec_scope), src_access_scope_(src_access_scope) {}
707
John Zulauf5f13a792020-03-10 07:31:21 -0600708 HazardResult Detect(const ResourceAccessRangeMap::const_iterator &pos) const {
709 return pos->second.DetectBarrierHazard(usage_index_, src_exec_scope_, src_access_scope_);
John Zulauf0cb5be22020-01-23 12:18:22 -0700710 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600711 HazardResult DetectAsync(const ResourceAccessRangeMap::const_iterator &pos) const {
712 // Async barrier hazard detection can use the same path as the usage index is not IsRead, but is IsWrite
713 return pos->second.DetectAsyncHazard(usage_index_);
714 }
715
716 private:
717 SyncStageAccessIndex usage_index_;
718 VkPipelineStageFlags src_exec_scope_;
719 SyncStageAccessFlags src_access_scope_;
720};
721
John Zulauf16adfc92020-04-08 10:28:33 -0600722HazardResult AccessContext::DetectBarrierHazard(AddressType type, SyncStageAccessIndex current_usage,
John Zulauf540266b2020-04-06 18:54:53 -0600723 VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope,
John Zulauf355e49b2020-04-24 15:11:15 -0600724 const ResourceAccessRange &range, DetectOptions options) const {
John Zulauf3d84f1b2020-03-09 13:33:25 -0600725 BarrierHazardDetector detector(current_usage, src_exec_scope, src_access_scope);
John Zulauf69133422020-05-20 14:55:53 -0600726 return DetectHazard(type, detector, range, options);
John Zulauf0cb5be22020-01-23 12:18:22 -0700727}
728
John Zulauf16adfc92020-04-08 10:28:33 -0600729HazardResult AccessContext::DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
John Zulauf355e49b2020-04-24 15:11:15 -0600730 SyncStageAccessFlags src_access_scope,
731 const VkImageSubresourceRange &subresource_range,
732 DetectOptions options) const {
John Zulauf69133422020-05-20 14:55:53 -0600733 BarrierHazardDetector detector(SyncStageAccessIndex::SYNC_IMAGE_LAYOUT_TRANSITION, src_exec_scope, src_access_scope);
734 VkOffset3D zero_offset = {0, 0, 0};
735 return DetectHazard(detector, image, subresource_range, zero_offset, image.createInfo.extent, options);
John Zulauf0cb5be22020-01-23 12:18:22 -0700736}
737
John Zulauf355e49b2020-04-24 15:11:15 -0600738HazardResult AccessContext::DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
739 SyncStageAccessFlags src_stage_accesses,
740 const VkImageMemoryBarrier &barrier) const {
741 auto subresource_range = NormalizeSubresourceRange(image.createInfo, barrier.subresourceRange);
742 const auto src_access_scope = SyncStageAccess::AccessScope(src_stage_accesses, barrier.srcAccessMask);
743 return DetectImageBarrierHazard(image, src_exec_scope, src_access_scope, subresource_range, kDetectAll);
744}
745
John Zulauf9cb530d2019-09-30 14:14:10 -0600746template <typename Flags, typename Map>
747SyncStageAccessFlags AccessScopeImpl(Flags flag_mask, const Map &map) {
748 SyncStageAccessFlags scope = 0;
749 for (const auto &bit_scope : map) {
750 if (flag_mask < bit_scope.first) break;
751
752 if (flag_mask & bit_scope.first) {
753 scope |= bit_scope.second;
754 }
755 }
756 return scope;
757}
758
759SyncStageAccessFlags SyncStageAccess::AccessScopeByStage(VkPipelineStageFlags stages) {
760 return AccessScopeImpl(stages, syncStageAccessMaskByStageBit);
761}
762
763SyncStageAccessFlags SyncStageAccess::AccessScopeByAccess(VkAccessFlags accesses) {
764 return AccessScopeImpl(accesses, syncStageAccessMaskByAccessBit);
765}
766
767// Getting from stage mask and access mask to stage/acess masks is something we need to be good at...
768SyncStageAccessFlags SyncStageAccess::AccessScope(VkPipelineStageFlags stages, VkAccessFlags accesses) {
John Zulauf5f13a792020-03-10 07:31:21 -0600769 // The access scope is the intersection of all stage/access types possible for the enabled stages and the enables
770 // accesses (after doing a couple factoring of common terms the union of stage/access intersections is the intersections
771 // of the union of all stage/access types for all the stages and the same unions for the access mask...
John Zulauf9cb530d2019-09-30 14:14:10 -0600772 return AccessScopeByStage(stages) & AccessScopeByAccess(accesses);
773}
774
775template <typename Action>
John Zulauf5c5e88d2019-12-26 11:22:02 -0700776void UpdateMemoryAccessState(ResourceAccessRangeMap *accesses, const ResourceAccessRange &range, const Action &action) {
John Zulauf9cb530d2019-09-30 14:14:10 -0600777 // TODO -- region/mem-range accuracte update
778 auto pos = accesses->lower_bound(range);
779 if (pos == accesses->end() || !pos->first.intersects(range)) {
780 // The range is empty, fill it with a default value.
781 pos = action.Infill(accesses, pos, range);
782 } else if (range.begin < pos->first.begin) {
783 // Leading empty space, infill
John Zulauf5c5e88d2019-12-26 11:22:02 -0700784 pos = action.Infill(accesses, pos, ResourceAccessRange(range.begin, pos->first.begin));
John Zulauf9cb530d2019-09-30 14:14:10 -0600785 } else if (pos->first.begin < range.begin) {
786 // Trim the beginning if needed
787 pos = accesses->split(pos, range.begin, sparse_container::split_op_keep_both());
788 ++pos;
789 }
790
791 const auto the_end = accesses->end();
792 while ((pos != the_end) && pos->first.intersects(range)) {
793 if (pos->first.end > range.end) {
794 pos = accesses->split(pos, range.end, sparse_container::split_op_keep_both());
795 }
796
797 pos = action(accesses, pos);
798 if (pos == the_end) break;
799
800 auto next = pos;
801 ++next;
802 if ((pos->first.end < range.end) && (next != the_end) && !next->first.is_subsequent_to(pos->first)) {
803 // Need to infill if next is disjoint
804 VkDeviceSize limit = (next == the_end) ? range.end : std::min(range.end, next->first.begin);
John Zulauf5c5e88d2019-12-26 11:22:02 -0700805 ResourceAccessRange new_range(pos->first.end, limit);
John Zulauf9cb530d2019-09-30 14:14:10 -0600806 next = action.Infill(accesses, next, new_range);
807 }
808 pos = next;
809 }
810}
811
812struct UpdateMemoryAccessStateFunctor {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700813 using Iterator = ResourceAccessRangeMap::iterator;
814 Iterator Infill(ResourceAccessRangeMap *accesses, Iterator pos, ResourceAccessRange range) const {
John Zulauf5f13a792020-03-10 07:31:21 -0600815 // this is only called on gaps, and never returns a gap.
816 ResourceAccessState default_state;
John Zulauf16adfc92020-04-08 10:28:33 -0600817 context.ResolvePreviousAccess(type, range, accesses, &default_state);
John Zulauf5f13a792020-03-10 07:31:21 -0600818 return accesses->lower_bound(range);
John Zulauf9cb530d2019-09-30 14:14:10 -0600819 }
John Zulauf5f13a792020-03-10 07:31:21 -0600820
John Zulauf5c5e88d2019-12-26 11:22:02 -0700821 Iterator operator()(ResourceAccessRangeMap *accesses, Iterator pos) const {
John Zulauf9cb530d2019-09-30 14:14:10 -0600822 auto &access_state = pos->second;
823 access_state.Update(usage, tag);
824 return pos;
825 }
826
John Zulauf16adfc92020-04-08 10:28:33 -0600827 UpdateMemoryAccessStateFunctor(AccessContext::AddressType type_, const AccessContext &context_, SyncStageAccessIndex usage_,
John Zulauf540266b2020-04-06 18:54:53 -0600828 const ResourceUsageTag &tag_)
John Zulauf16adfc92020-04-08 10:28:33 -0600829 : type(type_), context(context_), usage(usage_), tag(tag_) {}
830 const AccessContext::AddressType type;
John Zulauf540266b2020-04-06 18:54:53 -0600831 const AccessContext &context;
John Zulauf16adfc92020-04-08 10:28:33 -0600832 const SyncStageAccessIndex usage;
John Zulauf9cb530d2019-09-30 14:14:10 -0600833 const ResourceUsageTag &tag;
834};
835
836struct ApplyMemoryAccessBarrierFunctor {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700837 using Iterator = ResourceAccessRangeMap::iterator;
838 inline Iterator Infill(ResourceAccessRangeMap *accesses, Iterator pos, ResourceAccessRange range) const { return pos; }
John Zulauf9cb530d2019-09-30 14:14:10 -0600839
John Zulauf5c5e88d2019-12-26 11:22:02 -0700840 Iterator operator()(ResourceAccessRangeMap *accesses, Iterator pos) const {
John Zulauf9cb530d2019-09-30 14:14:10 -0600841 auto &access_state = pos->second;
John Zulauf36bcf6a2020-02-03 15:12:52 -0700842 access_state.ApplyMemoryAccessBarrier(src_exec_scope, src_access_scope, dst_exec_scope, dst_access_scope);
John Zulauf9cb530d2019-09-30 14:14:10 -0600843 return pos;
844 }
845
John Zulauf36bcf6a2020-02-03 15:12:52 -0700846 ApplyMemoryAccessBarrierFunctor(VkPipelineStageFlags src_exec_scope_, SyncStageAccessFlags src_access_scope_,
847 VkPipelineStageFlags dst_exec_scope_, SyncStageAccessFlags dst_access_scope_)
848 : src_exec_scope(src_exec_scope_),
849 src_access_scope(src_access_scope_),
850 dst_exec_scope(dst_exec_scope_),
851 dst_access_scope(dst_access_scope_) {}
John Zulauf9cb530d2019-09-30 14:14:10 -0600852
John Zulauf36bcf6a2020-02-03 15:12:52 -0700853 VkPipelineStageFlags src_exec_scope;
854 SyncStageAccessFlags src_access_scope;
855 VkPipelineStageFlags dst_exec_scope;
856 SyncStageAccessFlags dst_access_scope;
John Zulauf9cb530d2019-09-30 14:14:10 -0600857};
858
859struct ApplyGlobalBarrierFunctor {
John Zulauf5c5e88d2019-12-26 11:22:02 -0700860 using Iterator = ResourceAccessRangeMap::iterator;
861 inline Iterator Infill(ResourceAccessRangeMap *accesses, Iterator pos, ResourceAccessRange range) const { return pos; }
John Zulauf9cb530d2019-09-30 14:14:10 -0600862
John Zulauf5c5e88d2019-12-26 11:22:02 -0700863 Iterator operator()(ResourceAccessRangeMap *accesses, Iterator pos) const {
John Zulauf9cb530d2019-09-30 14:14:10 -0600864 auto &access_state = pos->second;
John Zulauf36bcf6a2020-02-03 15:12:52 -0700865 access_state.ApplyExecutionBarrier(src_exec_scope, dst_exec_scope);
John Zulauf9cb530d2019-09-30 14:14:10 -0600866
867 for (const auto &functor : barrier_functor) {
868 functor(accesses, pos);
869 }
870 return pos;
871 }
872
John Zulauf36bcf6a2020-02-03 15:12:52 -0700873 ApplyGlobalBarrierFunctor(VkPipelineStageFlags src_exec_scope, VkPipelineStageFlags dst_exec_scope,
874 SyncStageAccessFlags src_stage_accesses, SyncStageAccessFlags dst_stage_accesses,
John Zulauf9cb530d2019-09-30 14:14:10 -0600875 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers)
John Zulauf36bcf6a2020-02-03 15:12:52 -0700876 : src_exec_scope(src_exec_scope), dst_exec_scope(dst_exec_scope) {
John Zulauf9cb530d2019-09-30 14:14:10 -0600877 // Don't want to create this per tracked item, but don't want to loop through all tracked items per barrier...
878 barrier_functor.reserve(memoryBarrierCount);
879 for (uint32_t barrier_index = 0; barrier_index < memoryBarrierCount; barrier_index++) {
880 const auto &barrier = pMemoryBarriers[barrier_index];
John Zulauf36bcf6a2020-02-03 15:12:52 -0700881 barrier_functor.emplace_back(src_exec_scope, SyncStageAccess::AccessScope(src_stage_accesses, barrier.srcAccessMask),
882 dst_exec_scope, SyncStageAccess::AccessScope(dst_stage_accesses, barrier.dstAccessMask));
John Zulauf9cb530d2019-09-30 14:14:10 -0600883 }
884 }
885
John Zulauf36bcf6a2020-02-03 15:12:52 -0700886 const VkPipelineStageFlags src_exec_scope;
887 const VkPipelineStageFlags dst_exec_scope;
John Zulauf9cb530d2019-09-30 14:14:10 -0600888 std::vector<ApplyMemoryAccessBarrierFunctor> barrier_functor;
889};
890
John Zulauf355e49b2020-04-24 15:11:15 -0600891void AccessContext::UpdateAccessState(AddressType type, SyncStageAccessIndex current_usage, const ResourceAccessRange &range,
892 const ResourceUsageTag &tag) {
John Zulauf16adfc92020-04-08 10:28:33 -0600893 UpdateMemoryAccessStateFunctor action(type, *this, current_usage, tag);
894 UpdateMemoryAccessState(&GetAccessStateMap(type), range, action);
John Zulauf3d84f1b2020-03-09 13:33:25 -0600895}
896
John Zulauf16adfc92020-04-08 10:28:33 -0600897void AccessContext::UpdateAccessState(const BUFFER_STATE &buffer, SyncStageAccessIndex current_usage,
John Zulauf355e49b2020-04-24 15:11:15 -0600898 const ResourceAccessRange &range, const ResourceUsageTag &tag) {
John Zulauf16adfc92020-04-08 10:28:33 -0600899 if (!SimpleBinding(buffer)) return;
900 const auto base_address = ResourceBaseAddress(buffer);
901 UpdateAccessState(AddressType::kLinearAddress, current_usage, range + base_address, tag);
902}
John Zulauf355e49b2020-04-24 15:11:15 -0600903
John Zulauf540266b2020-04-06 18:54:53 -0600904void AccessContext::UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
John Zulauf355e49b2020-04-24 15:11:15 -0600905 const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset,
John Zulauf540266b2020-04-06 18:54:53 -0600906 const VkExtent3D &extent, const ResourceUsageTag &tag) {
John Zulauf16adfc92020-04-08 10:28:33 -0600907 if (!SimpleBinding(image)) return;
locke-lunargae26eac2020-04-16 15:29:05 -0600908 subresource_adapter::ImageRangeGenerator range_gen(*image.fragment_encoder.get(), subresource_range, offset, extent);
John Zulauf16adfc92020-04-08 10:28:33 -0600909 const auto address_type = ImageAddressType(image);
910 const auto base_address = ResourceBaseAddress(image);
911 UpdateMemoryAccessStateFunctor action(address_type, *this, current_usage, tag);
John Zulauf5f13a792020-03-10 07:31:21 -0600912 for (; range_gen->non_empty(); ++range_gen) {
John Zulauf16adfc92020-04-08 10:28:33 -0600913 UpdateMemoryAccessState(&GetAccessStateMap(address_type), (*range_gen + base_address), action);
John Zulauf5f13a792020-03-10 07:31:21 -0600914 }
John Zulauf3d84f1b2020-03-09 13:33:25 -0600915}
916
John Zulauf355e49b2020-04-24 15:11:15 -0600917void AccessContext::UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage,
918 const VkImageSubresourceLayers &subresource, const VkOffset3D &offset,
919 const VkExtent3D &extent, const ResourceUsageTag &tag) {
920 // TODO: replace the encoder/generator with offset3D aware versions
921 VkImageSubresourceRange subresource_range = {subresource.aspectMask, subresource.mipLevel, 1, subresource.baseArrayLayer,
922 subresource.layerCount};
923 UpdateAccessState(image, current_usage, subresource_range, offset, extent, tag);
924}
925
John Zulauf540266b2020-04-06 18:54:53 -0600926template <typename Action>
927void AccessContext::UpdateMemoryAccess(const BUFFER_STATE &buffer, const ResourceAccessRange &range, const Action action) {
John Zulauf16adfc92020-04-08 10:28:33 -0600928 if (!SimpleBinding(buffer)) return;
929 const auto base_address = ResourceBaseAddress(buffer);
930 UpdateMemoryAccessState(&GetAccessStateMap(AddressType::kLinearAddress), (range + base_address), action);
John Zulauf540266b2020-04-06 18:54:53 -0600931}
932
933template <typename Action>
934void AccessContext::UpdateMemoryAccess(const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range,
935 const Action action) {
John Zulauf16adfc92020-04-08 10:28:33 -0600936 if (!SimpleBinding(image)) return;
937 const auto address_type = ImageAddressType(image);
938 auto *accesses = &GetAccessStateMap(address_type);
John Zulauf540266b2020-04-06 18:54:53 -0600939
locke-lunargae26eac2020-04-16 15:29:05 -0600940 subresource_adapter::ImageRangeGenerator range_gen(*image.fragment_encoder.get(), subresource_range, {0, 0, 0},
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600941 image.createInfo.extent);
John Zulauf540266b2020-04-06 18:54:53 -0600942
John Zulauf16adfc92020-04-08 10:28:33 -0600943 const auto base_address = ResourceBaseAddress(image);
John Zulauf540266b2020-04-06 18:54:53 -0600944 for (; range_gen->non_empty(); ++range_gen) {
John Zulauf16adfc92020-04-08 10:28:33 -0600945 UpdateMemoryAccessState(accesses, (*range_gen + base_address), action);
John Zulauf540266b2020-04-06 18:54:53 -0600946 }
947}
948
949template <typename Action>
950void AccessContext::ApplyGlobalBarriers(const Action &barrier_action) {
951 // Note: Barriers do *not* cross context boundaries, applying to accessess within.... (at least for renderpass subpasses)
John Zulauf16adfc92020-04-08 10:28:33 -0600952 for (const auto address_type : kAddressTypes) {
953 UpdateMemoryAccessState(&GetAccessStateMap(address_type), full_range, barrier_action);
John Zulauf540266b2020-04-06 18:54:53 -0600954 }
955}
956
957void AccessContext::ResolveChildContexts(const std::vector<AccessContext> &contexts) {
John Zulauf540266b2020-04-06 18:54:53 -0600958 for (uint32_t subpass_index = 0; subpass_index < contexts.size(); subpass_index++) {
959 auto &context = contexts[subpass_index];
John Zulauf16adfc92020-04-08 10:28:33 -0600960 for (const auto address_type : kAddressTypes) {
John Zulauf355e49b2020-04-24 15:11:15 -0600961 context.ResolveAccessRange(address_type, full_range, &context.GetDstExternalTrackBack().barrier,
962 &GetAccessStateMap(address_type), nullptr, false);
John Zulauf540266b2020-04-06 18:54:53 -0600963 }
964 }
965}
966
John Zulauf355e49b2020-04-24 15:11:15 -0600967void AccessContext::ApplyImageBarrier(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
968 SyncStageAccessFlags src_access_scope, VkPipelineStageFlags dst_exec_scope,
969 SyncStageAccessFlags dst_access_scope, const VkImageSubresourceRange &subresource_range) {
970 const ApplyMemoryAccessBarrierFunctor barrier_action(src_exec_scope, src_access_scope, dst_exec_scope, dst_access_scope);
971 UpdateMemoryAccess(image, subresource_range, barrier_action);
972}
973
974// TODO: Plumb offset/extent throughout the image call stacks, with default injector overloads to preserved backwards compatiblity
975// as needed
976void AccessContext::ApplyImageBarrier(const IMAGE_STATE &image, VkPipelineStageFlags src_exec_scope,
977 SyncStageAccessFlags src_access_scope, VkPipelineStageFlags dst_exec_scope,
978 SyncStageAccessFlags dst_access_scope, const VkImageSubresourceRange &subresource_range,
979 bool layout_transition, const ResourceUsageTag &tag) {
980 if (layout_transition) {
981 UpdateAccessState(image, SYNC_IMAGE_LAYOUT_TRANSITION, subresource_range, VkOffset3D{0, 0, 0}, image.createInfo.extent,
982 tag);
983 ApplyImageBarrier(image, src_exec_scope, SYNC_IMAGE_LAYOUT_TRANSITION_BIT, dst_exec_scope, dst_access_scope,
984 subresource_range);
John Zulaufc9201222020-05-13 15:13:03 -0600985 } else {
986 ApplyImageBarrier(image, src_exec_scope, src_access_scope, dst_exec_scope, dst_access_scope, subresource_range);
John Zulauf355e49b2020-04-24 15:11:15 -0600987 }
John Zulauf355e49b2020-04-24 15:11:15 -0600988}
989
990void AccessContext::ApplyImageBarrier(const IMAGE_STATE &image, const SyncBarrier &barrier,
991 const VkImageSubresourceRange &subresource_range, bool layout_transition,
992 const ResourceUsageTag &tag) {
993 ApplyImageBarrier(image, barrier.src_exec_scope, barrier.src_access_scope, barrier.dst_exec_scope, barrier.dst_access_scope,
994 subresource_range, layout_transition, tag);
995}
996
997// Suitable only for *subpass* access contexts
998HazardResult AccessContext::DetectSubpassTransitionHazard(const RENDER_PASS_STATE::AttachmentTransition &transition,
999 const std::vector<const IMAGE_VIEW_STATE *> &attachments) const {
1000 const auto *attach_view = attachments[transition.attachment];
1001 if (!attach_view) return HazardResult();
1002 const auto image_state = attach_view->image_state.get();
1003 if (!image_state) return HazardResult();
1004
1005 const auto *track_back = GetTrackBackFromSubpass(transition.prev_pass);
1006 // We should never ask for a transition from a context we don't have
1007 assert(track_back);
1008 assert(track_back->context);
1009
1010 // Do the detection against the specific prior context independent of other contexts. (Synchronous only)
1011 auto hazard = track_back->context->DetectImageBarrierHazard(*image_state, track_back->barrier.src_exec_scope,
1012 track_back->barrier.src_access_scope,
1013 attach_view->normalized_subresource_range, kDetectPrevious);
1014 if (!hazard.hazard) {
1015 // The Async hazard check is against the current context's async set.
1016 hazard = DetectImageBarrierHazard(*image_state, track_back->barrier.src_exec_scope, track_back->barrier.src_access_scope,
1017 attach_view->normalized_subresource_range, kDetectAsync);
1018 }
1019 return hazard;
1020}
1021
1022// Class CommandBufferAccessContext: Keep track of resource access state information for a specific command buffer
1023bool CommandBufferAccessContext::ValidateBeginRenderPass(const RENDER_PASS_STATE &rp_state,
1024
1025 const VkRenderPassBeginInfo *pRenderPassBegin,
1026 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
1027 const char *func_name) const {
1028 // Check if any of the layout transitions are hazardous.... but we don't have the renderpass context to work with, so we
1029 bool skip = false;
1030 uint32_t subpass = 0;
1031 const auto &transitions = rp_state.subpass_transitions[subpass];
1032 if (transitions.size()) {
1033 const std::vector<AccessContext> empty_context_vector;
1034 // Create context we can use to validate against...
1035 AccessContext temp_context(subpass, queue_flags_, rp_state.subpass_dependencies, empty_context_vector,
1036 const_cast<AccessContext *>(&cb_access_context_));
1037
1038 assert(pRenderPassBegin);
1039 if (nullptr == pRenderPassBegin) return skip;
1040
1041 const auto fb_state = sync_state_->Get<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer);
1042 assert(fb_state);
1043 if (nullptr == fb_state) return skip;
1044
1045 // Create a limited array of views (which we'll need to toss
1046 std::vector<const IMAGE_VIEW_STATE *> views;
1047 const auto count_attachment = GetFramebufferAttachments(*pRenderPassBegin, *fb_state);
1048 const auto attachment_count = count_attachment.first;
1049 const auto *attachments = count_attachment.second;
1050 views.resize(attachment_count, nullptr);
1051 for (const auto &transition : transitions) {
1052 assert(transition.attachment < attachment_count);
1053 views[transition.attachment] = sync_state_->Get<IMAGE_VIEW_STATE>(attachments[transition.attachment]);
1054 }
1055
John Zulauf1507ee42020-05-18 11:33:09 -06001056 skip |= temp_context.ValidateLayoutTransitions(*sync_state_, rp_state, views, func_name, 0);
1057 skip |= temp_context.ValidateLoadOperation(*sync_state_, rp_state, pRenderPassBegin->renderArea, views, func_name, 0);
John Zulauf355e49b2020-04-24 15:11:15 -06001058 }
1059 return skip;
1060}
1061
1062bool CommandBufferAccessContext::ValidateNextSubpass(const char *func_name) const {
1063 // TODO: Things to add here.
1064 // Validate Preserve/Resolve attachments
1065 bool skip = false;
John Zulauf1507ee42020-05-18 11:33:09 -06001066 skip |=
1067 current_renderpass_context_->ValidateNextSubpass(*sync_state_, cb_state_->activeRenderPassBeginInfo.renderArea, func_name);
John Zulauf355e49b2020-04-24 15:11:15 -06001068
1069 return skip;
1070}
1071
1072bool CommandBufferAccessContext::ValidateEndRenderpass(const char *func_name) const {
1073 // TODO: Things to add here.
1074 // Validate Preserve/Resolve attachments
1075 bool skip = false;
1076 skip |= current_renderpass_context_->ValidateFinalSubpassLayoutTransitions(*sync_state_, func_name);
1077
1078 return skip;
1079}
1080
1081void CommandBufferAccessContext::RecordBeginRenderPass(const ResourceUsageTag &tag) {
1082 assert(sync_state_);
1083 if (!cb_state_) return;
1084
1085 // Create an access context the current renderpass.
1086 render_pass_contexts_.emplace_back(&cb_access_context_);
John Zulauf16adfc92020-04-08 10:28:33 -06001087 current_renderpass_context_ = &render_pass_contexts_.back();
John Zulauf355e49b2020-04-24 15:11:15 -06001088 current_renderpass_context_->RecordBeginRenderPass(*sync_state_, *cb_state_, queue_flags_, tag);
John Zulauf16adfc92020-04-08 10:28:33 -06001089 current_context_ = &current_renderpass_context_->CurrentContext();
John Zulauf16adfc92020-04-08 10:28:33 -06001090}
1091
John Zulauf355e49b2020-04-24 15:11:15 -06001092void CommandBufferAccessContext::RecordNextSubpass(const RENDER_PASS_STATE &rp_state, const ResourceUsageTag &tag) {
John Zulauf16adfc92020-04-08 10:28:33 -06001093 assert(current_renderpass_context_);
John Zulauf1507ee42020-05-18 11:33:09 -06001094 current_renderpass_context_->RecordNextSubpass(cb_state_->activeRenderPassBeginInfo.renderArea, tag);
John Zulauf16adfc92020-04-08 10:28:33 -06001095 current_context_ = &current_renderpass_context_->CurrentContext();
1096}
1097
John Zulauf355e49b2020-04-24 15:11:15 -06001098void CommandBufferAccessContext::RecordEndRenderPass(const RENDER_PASS_STATE &render_pass, const ResourceUsageTag &tag) {
1099 // TODO: Add layout load/store/resolve access (here or in RenderPassContext)
John Zulauf16adfc92020-04-08 10:28:33 -06001100 assert(current_renderpass_context_);
1101 if (!current_renderpass_context_) return;
1102
John Zulauf355e49b2020-04-24 15:11:15 -06001103 current_renderpass_context_->RecordEndRenderPass(tag);
1104 current_context_ = &cb_access_context_;
John Zulauf16adfc92020-04-08 10:28:33 -06001105 current_renderpass_context_ = nullptr;
1106}
1107
John Zulauf1507ee42020-05-18 11:33:09 -06001108bool RenderPassAccessContext::ValidateNextSubpass(const SyncValidator &sync_state, const VkRect2D &render_area,
1109 const char *func_name) const {
John Zulauf355e49b2020-04-24 15:11:15 -06001110 bool skip = false;
John Zulaufb027cdb2020-05-21 14:25:22 -06001111 skip |= CurrentContext().ValidateResolveOperations(sync_state, *rp_state_, render_area, attachment_views_, func_name,
1112 current_subpass_);
John Zulauf355e49b2020-04-24 15:11:15 -06001113 const auto next_subpass = current_subpass_ + 1;
John Zulauf1507ee42020-05-18 11:33:09 -06001114 const auto &next_context = subpass_contexts_[next_subpass];
1115 skip |= next_context.ValidateLayoutTransitions(sync_state, *rp_state_, attachment_views_, func_name, next_subpass);
1116 skip |= next_context.ValidateLoadOperation(sync_state, *rp_state_, render_area, attachment_views_, func_name, next_subpass);
John Zulauf355e49b2020-04-24 15:11:15 -06001117 return skip;
1118}
1119
1120bool RenderPassAccessContext::ValidateFinalSubpassLayoutTransitions(const SyncValidator &sync_state, const char *func_name) const {
1121 bool skip = false;
1122
1123 // Validate the "finalLayout" transitions to external
1124 // Get them from where there we're hidding in the extra entry.
1125 const auto &final_transitions = rp_state_->subpass_transitions.back();
1126 for (const auto &transition : final_transitions) {
1127 const auto &attach_view = attachment_views_[transition.attachment];
1128 const auto &trackback = subpass_contexts_[transition.prev_pass].GetDstExternalTrackBack();
1129 assert(trackback.context); // Transitions are given implicit transitions if the StateTracker is working correctly
1130 auto hazard = trackback.context->DetectImageBarrierHazard(
1131 *attach_view->image_state, trackback.barrier.src_exec_scope, trackback.barrier.src_access_scope,
1132 attach_view->normalized_subresource_range, AccessContext::DetectOptions::kDetectPrevious);
1133 if (hazard.hazard) {
1134 skip |= sync_state.LogError(rp_state_->renderPass, string_SyncHazardVUID(hazard.hazard),
1135 "%s: Hazard %s with last use subpass %" PRIu32 " for attachment %" PRIu32
1136 " final image layout transition.",
1137 func_name, string_SyncHazard(hazard.hazard), transition.prev_pass, transition.attachment);
1138 }
1139 }
1140 return skip;
1141}
1142
1143void RenderPassAccessContext::RecordLayoutTransitions(const ResourceUsageTag &tag) {
1144 // Add layout transitions...
1145 const auto &transitions = rp_state_->subpass_transitions[current_subpass_];
1146 auto &subpass_context = subpass_contexts_[current_subpass_];
John Zulaufc9201222020-05-13 15:13:03 -06001147 std::set<const IMAGE_VIEW_STATE *> view_seen;
John Zulauf355e49b2020-04-24 15:11:15 -06001148 for (const auto &transition : transitions) {
1149 const auto attachment_view = attachment_views_[transition.attachment];
1150 if (!attachment_view) continue;
1151 const auto image = attachment_view->image_state.get();
1152 if (!image) continue;
1153
1154 const auto *barrier = subpass_context.GetTrackBackFromSubpass(transition.prev_pass);
John Zulaufc9201222020-05-13 15:13:03 -06001155 auto insert_pair = view_seen.insert(attachment_view);
1156 if (insert_pair.second) {
1157 // We haven't recorded the transistion yet, so treat this as a normal barrier with transistion.
1158 subpass_context.ApplyImageBarrier(*image, barrier->barrier, attachment_view->normalized_subresource_range, true, tag);
1159
1160 } else {
1161 // We've recorded the transition, but we need to added on the additional dest barriers, and rerecording the transition
1162 // would clear out the prior barrier flags, so apply this as a *non* transition barrier
1163 auto barrier_to_transition = barrier->barrier;
1164 barrier_to_transition.src_access_scope |= SYNC_IMAGE_LAYOUT_TRANSITION_BIT;
1165 subpass_context.ApplyImageBarrier(*image, barrier->barrier, attachment_view->normalized_subresource_range, false, tag);
1166 }
John Zulauf355e49b2020-04-24 15:11:15 -06001167 }
1168}
1169
John Zulauf1507ee42020-05-18 11:33:09 -06001170void RenderPassAccessContext::RecordLoadOperations(const VkRect2D &render_area, const ResourceUsageTag &tag) {
1171 const auto *attachment_ci = rp_state_->createInfo.pAttachments;
1172 auto &subpass_context = subpass_contexts_[current_subpass_];
1173 VkExtent3D extent = CastTo3D(render_area.extent);
1174 VkOffset3D offset = CastTo3D(render_area.offset);
1175
1176 for (uint32_t i = 0; i < rp_state_->createInfo.attachmentCount; i++) {
1177 if (rp_state_->attachment_first_subpass[i] == current_subpass_) {
1178 if (attachment_views_[i] == nullptr) continue; // UNUSED
1179 const auto &view = *attachment_views_[i];
1180 const IMAGE_STATE *image = view.image_state.get();
1181 if (image == nullptr) continue;
1182
1183 const auto &ci = attachment_ci[i];
1184 const bool has_depth = FormatHasDepth(ci.format);
John Zulaufb027cdb2020-05-21 14:25:22 -06001185 const bool has_stencil = FormatHasStencil(ci.format);
John Zulauf1507ee42020-05-18 11:33:09 -06001186 const bool is_color = !(has_depth || has_stencil);
1187
1188 if (is_color) {
1189 subpass_context.UpdateAccessState(*image, ColorLoadUsage(ci.loadOp), view.normalized_subresource_range, offset,
1190 extent, tag);
1191 } else {
1192 auto update_range = view.normalized_subresource_range;
1193 if (has_depth) {
1194 update_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
1195 subpass_context.UpdateAccessState(*image, DepthStencilLoadUsage(ci.loadOp), update_range, offset, extent, tag);
1196 }
1197 if (has_stencil) {
1198 update_range.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
1199 subpass_context.UpdateAccessState(*image, DepthStencilLoadUsage(ci.stencilLoadOp), update_range, offset, extent,
1200 tag);
1201 }
1202 }
1203 }
1204 }
1205}
1206
John Zulauf355e49b2020-04-24 15:11:15 -06001207void RenderPassAccessContext::RecordBeginRenderPass(const SyncValidator &state, const CMD_BUFFER_STATE &cb_state,
1208 VkQueueFlags queue_flags, const ResourceUsageTag &tag) {
1209 current_subpass_ = 0;
1210 rp_state_ = cb_state.activeRenderPass;
1211 subpass_contexts_.reserve(rp_state_->createInfo.subpassCount);
1212 // Add this for all subpasses here so that they exsist during next subpass validation
1213 for (uint32_t pass = 0; pass < rp_state_->createInfo.subpassCount; pass++) {
1214 subpass_contexts_.emplace_back(pass, queue_flags, rp_state_->subpass_dependencies, subpass_contexts_, external_context_);
1215 }
1216 attachment_views_ = state.GetCurrentAttachmentViews(cb_state);
1217
1218 RecordLayoutTransitions(tag);
John Zulauf1507ee42020-05-18 11:33:09 -06001219 RecordLoadOperations(cb_state.activeRenderPassBeginInfo.renderArea, tag);
John Zulauf355e49b2020-04-24 15:11:15 -06001220}
John Zulauf1507ee42020-05-18 11:33:09 -06001221
1222void RenderPassAccessContext::RecordNextSubpass(const VkRect2D &render_area, const ResourceUsageTag &tag) {
John Zulauf355e49b2020-04-24 15:11:15 -06001223 current_subpass_++;
1224 assert(current_subpass_ < subpass_contexts_.size());
1225 RecordLayoutTransitions(tag);
John Zulauf1507ee42020-05-18 11:33:09 -06001226 RecordLoadOperations(render_area, tag);
John Zulauf355e49b2020-04-24 15:11:15 -06001227}
1228
1229void RenderPassAccessContext::RecordEndRenderPass(const ResourceUsageTag &tag) {
1230 // Export the accesses from the renderpass...
1231 external_context_->ResolveChildContexts(subpass_contexts_);
1232
1233 // Add the "finalLayout" transitions to external
1234 // Get them from where there we're hidding in the extra entry.
1235 const auto &final_transitions = rp_state_->subpass_transitions.back();
1236 for (const auto &transition : final_transitions) {
1237 const auto &attachment = attachment_views_[transition.attachment];
1238 const auto &last_trackback = subpass_contexts_[transition.prev_pass].GetDstExternalTrackBack();
1239 assert(external_context_ == last_trackback.context);
1240 external_context_->ApplyImageBarrier(*attachment->image_state, last_trackback.barrier,
1241 attachment->normalized_subresource_range, true, tag);
1242 }
1243}
1244
John Zulauf3d84f1b2020-03-09 13:33:25 -06001245SyncBarrier::SyncBarrier(VkQueueFlags queue_flags, const VkSubpassDependency2 &barrier) {
1246 const auto src_stage_mask = ExpandPipelineStages(queue_flags, barrier.srcStageMask);
1247 src_exec_scope = WithEarlierPipelineStages(src_stage_mask);
1248 src_access_scope = SyncStageAccess::AccessScope(src_stage_mask, barrier.srcAccessMask);
1249 const auto dst_stage_mask = ExpandPipelineStages(queue_flags, barrier.dstStageMask);
1250 dst_exec_scope = WithLaterPipelineStages(dst_stage_mask);
1251 dst_access_scope = SyncStageAccess::AccessScope(dst_stage_mask, barrier.dstAccessMask);
1252}
1253
1254void ResourceAccessState::ApplyBarrier(const SyncBarrier &barrier) {
1255 ApplyExecutionBarrier(barrier.src_exec_scope, barrier.dst_exec_scope);
1256 ApplyMemoryAccessBarrier(barrier.src_exec_scope, barrier.src_access_scope, barrier.dst_exec_scope, barrier.dst_access_scope);
1257}
1258
John Zulauf9cb530d2019-09-30 14:14:10 -06001259HazardResult ResourceAccessState::DetectHazard(SyncStageAccessIndex usage_index) const {
1260 HazardResult hazard;
1261 auto usage = FlagBit(usage_index);
1262 if (IsRead(usage)) {
John Zulaufc9201222020-05-13 15:13:03 -06001263 if (last_write && IsWriteHazard(usage)) {
John Zulauf9cb530d2019-09-30 14:14:10 -06001264 hazard.Set(READ_AFTER_WRITE, write_tag);
1265 }
1266 } else {
1267 // Assume write
1268 // TODO determine what to do with READ-WRITE usage states if any
1269 // Write-After-Write check -- if we have a previous write to test against
1270 if (last_write && IsWriteHazard(usage)) {
1271 hazard.Set(WRITE_AFTER_WRITE, write_tag);
1272 } else {
John Zulauf69133422020-05-20 14:55:53 -06001273 // Look for casus belli for WAR
John Zulauf9cb530d2019-09-30 14:14:10 -06001274 const auto usage_stage = PipelineStageBit(usage_index);
1275 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
1276 if (IsReadHazard(usage_stage, last_reads[read_index])) {
1277 hazard.Set(WRITE_AFTER_READ, last_reads[read_index].tag);
1278 break;
1279 }
1280 }
1281 }
1282 }
1283 return hazard;
1284}
1285
John Zulauf69133422020-05-20 14:55:53 -06001286HazardResult ResourceAccessState::DetectHazard(SyncStageAccessIndex usage_index, const SyncOrderingBarrier &ordering) const {
1287 // The ordering guarantees act as barriers to the last accesses, independent of synchronization operations
1288 HazardResult hazard;
1289 const auto usage = FlagBit(usage_index);
1290 const bool write_is_ordered = (last_write & ordering.access_scope) == last_write; // Is true if no write, and that's good.
1291 if (IsRead(usage)) {
1292 if (!write_is_ordered && IsWriteHazard(usage)) {
1293 hazard.Set(READ_AFTER_WRITE, write_tag);
1294 }
1295 } else {
1296 if (!write_is_ordered && IsWriteHazard(usage)) {
1297 hazard.Set(WRITE_AFTER_WRITE, write_tag);
1298 } else {
1299 const auto usage_stage = PipelineStageBit(usage_index);
1300 const auto unordered_reads = last_read_stages & ~ordering.exec_scope;
1301 if (unordered_reads) {
1302 // Look for any WAR hazards outside the ordered set of stages
1303 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
1304 if (last_reads[read_index].stage & unordered_reads) {
1305 if (IsReadHazard(usage_stage, last_reads[read_index])) {
1306 hazard.Set(WRITE_AFTER_READ, last_reads[read_index].tag);
1307 break;
1308 }
1309 }
1310 }
1311 }
1312 }
1313 }
1314 return hazard;
1315}
1316
John Zulauf2f952d22020-02-10 11:34:51 -07001317// Asynchronous Hazards occur between subpasses with no connection through the DAG
John Zulauf3d84f1b2020-03-09 13:33:25 -06001318HazardResult ResourceAccessState::DetectAsyncHazard(SyncStageAccessIndex usage_index) const {
John Zulauf2f952d22020-02-10 11:34:51 -07001319 HazardResult hazard;
1320 auto usage = FlagBit(usage_index);
1321 if (IsRead(usage)) {
1322 if (last_write != 0) {
1323 hazard.Set(READ_RACING_WRITE, write_tag);
1324 }
1325 } else {
1326 if (last_write != 0) {
1327 hazard.Set(WRITE_RACING_WRITE, write_tag);
1328 } else if (last_read_count > 0) {
1329 hazard.Set(WRITE_RACING_READ, last_reads[0].tag);
1330 }
1331 }
1332 return hazard;
1333}
1334
John Zulauf36bcf6a2020-02-03 15:12:52 -07001335HazardResult ResourceAccessState::DetectBarrierHazard(SyncStageAccessIndex usage_index, VkPipelineStageFlags src_exec_scope,
1336 SyncStageAccessFlags src_access_scope) const {
John Zulauf0cb5be22020-01-23 12:18:22 -07001337 // Only supporting image layout transitions for now
1338 assert(usage_index == SyncStageAccessIndex::SYNC_IMAGE_LAYOUT_TRANSITION);
1339 HazardResult hazard;
1340 if (last_write) {
1341 // If the previous write is *not* in the 1st access scope
1342 // *AND* the current barrier is not in the dependency chain
1343 // *AND* the there is no prior memory barrier for the previous write in the dependency chain
1344 // then the barrier access is unsafe (R/W after W)
John Zulauf36bcf6a2020-02-03 15:12:52 -07001345 if (((last_write & src_access_scope) == 0) && ((src_exec_scope & write_dependency_chain) == 0) && (write_barriers == 0)) {
John Zulauf0cb5be22020-01-23 12:18:22 -07001346 // TODO: Do we need a difference hazard name for this?
1347 hazard.Set(WRITE_AFTER_WRITE, write_tag);
1348 }
John Zulauf355e49b2020-04-24 15:11:15 -06001349 }
1350 if (!hazard.hazard) {
1351 // Look at the reads if any
John Zulauf0cb5be22020-01-23 12:18:22 -07001352 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
John Zulauf36bcf6a2020-02-03 15:12:52 -07001353 const auto &read_access = last_reads[read_index];
1354 // If the read stage is not in the src sync sync
1355 // *AND* not execution chained with an existing sync barrier (that's the or)
1356 // then the barrier access is unsafe (R/W after R)
1357 if ((src_exec_scope & (read_access.stage | read_access.barriers)) == 0) {
1358 hazard.Set(WRITE_AFTER_READ, read_access.tag);
John Zulauf0cb5be22020-01-23 12:18:22 -07001359 break;
1360 }
1361 }
1362 }
1363 return hazard;
1364}
1365
John Zulauf5f13a792020-03-10 07:31:21 -06001366// The logic behind resolves is the same as update, we assume that earlier hazards have be reported, and that no
1367// tranistive hazard can exists with a hazard between the earlier operations. Yes, an early hazard can mask that another
1368// exists, but if you fix *that* hazard it either fixes or unmasks the subsequent ones.
1369void ResourceAccessState::Resolve(const ResourceAccessState &other) {
1370 if (write_tag.IsBefore(other.write_tag)) {
1371 // If this is a later write, we've reported any exsiting hazard, and we can just overwrite as the more recent operation
1372 *this = other;
1373 } else if (!other.write_tag.IsBefore(write_tag)) {
1374 // This is the *equals* case for write operations, we merged the write barriers and the read state (but without the
1375 // dependency chaining logic or any stage expansion)
1376 write_barriers |= other.write_barriers;
1377
1378 // Merge that read states
1379 for (uint32_t other_read_index = 0; other_read_index < other.last_read_count; other_read_index++) {
1380 auto &other_read = other.last_reads[other_read_index];
1381 if (last_read_stages & other_read.stage) {
1382 // Merge in the barriers for read stages that exist in *both* this and other
1383 // TODO: This is N^2 with stages... perhaps the ReadStates should be by stage index.
1384 for (uint32_t my_read_index = 0; my_read_index < last_read_count; my_read_index++) {
1385 auto &my_read = last_reads[my_read_index];
1386 if (other_read.stage == my_read.stage) {
1387 if (my_read.tag.IsBefore(other_read.tag)) {
1388 my_read.tag = other_read.tag;
1389 }
1390 my_read.barriers |= other_read.barriers;
1391 break;
1392 }
1393 }
1394 } else {
1395 // The other read stage doesn't exist in this, so add it.
1396 last_reads[last_read_count] = other_read;
1397 last_read_count++;
1398 last_read_stages |= other_read.stage;
1399 }
1400 }
1401 } // the else clause would be that other write is before this write... in which case we supercede the other state and ignore
1402 // it.
1403}
1404
John Zulauf9cb530d2019-09-30 14:14:10 -06001405void ResourceAccessState::Update(SyncStageAccessIndex usage_index, const ResourceUsageTag &tag) {
1406 // Move this logic in the ResourceStateTracker as methods, thereof (or we'll repeat it for every flavor of resource...
1407 const auto usage_bit = FlagBit(usage_index);
1408 if (IsRead(usage_index)) {
1409 // Mulitple outstanding reads may be of interest and do dependency chains independently
1410 // However, for purposes of barrier tracking, only one read per pipeline stage matters
1411 const auto usage_stage = PipelineStageBit(usage_index);
1412 if (usage_stage & last_read_stages) {
1413 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
1414 ReadState &access = last_reads[read_index];
1415 if (access.stage == usage_stage) {
1416 access.barriers = 0;
1417 access.tag = tag;
1418 break;
1419 }
1420 }
1421 } else {
1422 // We don't have this stage in the list yet...
1423 assert(last_read_count < last_reads.size());
1424 ReadState &access = last_reads[last_read_count++];
1425 access.stage = usage_stage;
1426 access.barriers = 0;
1427 access.tag = tag;
1428 last_read_stages |= usage_stage;
1429 }
1430 } else {
1431 // Assume write
1432 // TODO determine what to do with READ-WRITE operations if any
1433 // Clobber last read and both sets of barriers... because all we have is DANGER, DANGER, WILL ROBINSON!!!
1434 // if the last_reads/last_write were unsafe, we've reported them,
1435 // in either case the prior access is irrelevant, we can overwrite them as *this* write is now after them
1436 last_read_count = 0;
1437 last_read_stages = 0;
1438
1439 write_barriers = 0;
1440 write_dependency_chain = 0;
1441 write_tag = tag;
1442 last_write = usage_bit;
1443 }
1444}
John Zulauf5f13a792020-03-10 07:31:21 -06001445
John Zulauf9cb530d2019-09-30 14:14:10 -06001446void ResourceAccessState::ApplyExecutionBarrier(VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask) {
1447 // Execution Barriers only protect read operations
1448 for (uint32_t read_index = 0; read_index < last_read_count; read_index++) {
1449 ReadState &access = last_reads[read_index];
1450 // The | implements the "dependency chain" logic for this access, as the barriers field stores the second sync scope
1451 if (srcStageMask & (access.stage | access.barriers)) {
1452 access.barriers |= dstStageMask;
1453 }
1454 }
1455 if (write_dependency_chain & srcStageMask) write_dependency_chain |= dstStageMask;
1456}
1457
John Zulauf36bcf6a2020-02-03 15:12:52 -07001458void ResourceAccessState::ApplyMemoryAccessBarrier(VkPipelineStageFlags src_exec_scope, SyncStageAccessFlags src_access_scope,
1459 VkPipelineStageFlags dst_exec_scope, SyncStageAccessFlags dst_access_scope) {
John Zulauf9cb530d2019-09-30 14:14:10 -06001460 // Assuming we've applied the execution side of this barrier, we update just the write
1461 // The || implements the "dependency chain" logic for this barrier
John Zulauf36bcf6a2020-02-03 15:12:52 -07001462 if ((src_access_scope & last_write) || (write_dependency_chain & src_exec_scope)) {
1463 write_barriers |= dst_access_scope;
1464 write_dependency_chain |= dst_exec_scope;
John Zulauf9cb530d2019-09-30 14:14:10 -06001465 }
1466}
1467
John Zulaufd1f85d42020-04-15 12:23:15 -06001468void SyncValidator::ResetCommandBufferCallback(VkCommandBuffer command_buffer) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06001469 auto *access_context = GetAccessContextNoInsert(command_buffer);
1470 if (access_context) {
1471 access_context->Reset();
John Zulauf9cb530d2019-09-30 14:14:10 -06001472 }
1473}
1474
John Zulaufd1f85d42020-04-15 12:23:15 -06001475void SyncValidator::FreeCommandBufferCallback(VkCommandBuffer command_buffer) {
1476 auto access_found = cb_access_state.find(command_buffer);
1477 if (access_found != cb_access_state.end()) {
1478 access_found->second->Reset();
1479 cb_access_state.erase(access_found);
1480 }
1481}
1482
John Zulauf540266b2020-04-06 18:54:53 -06001483void SyncValidator::ApplyGlobalBarriers(AccessContext *context, VkPipelineStageFlags srcStageMask,
John Zulauf36bcf6a2020-02-03 15:12:52 -07001484 VkPipelineStageFlags dstStageMask, SyncStageAccessFlags src_access_scope,
1485 SyncStageAccessFlags dst_access_scope, uint32_t memoryBarrierCount,
John Zulauf9cb530d2019-09-30 14:14:10 -06001486 const VkMemoryBarrier *pMemoryBarriers) {
1487 // TODO: Implement this better (maybe some delayed/on-demand integration).
John Zulauf36bcf6a2020-02-03 15:12:52 -07001488 ApplyGlobalBarrierFunctor barriers_functor(srcStageMask, dstStageMask, src_access_scope, dst_access_scope, memoryBarrierCount,
John Zulauf9cb530d2019-09-30 14:14:10 -06001489 pMemoryBarriers);
John Zulauf540266b2020-04-06 18:54:53 -06001490 context->ApplyGlobalBarriers(barriers_functor);
John Zulauf9cb530d2019-09-30 14:14:10 -06001491}
1492
John Zulauf540266b2020-04-06 18:54:53 -06001493void SyncValidator::ApplyBufferBarriers(AccessContext *context, VkPipelineStageFlags src_exec_scope,
John Zulauf36bcf6a2020-02-03 15:12:52 -07001494 SyncStageAccessFlags src_stage_accesses, VkPipelineStageFlags dst_exec_scope,
1495 SyncStageAccessFlags dst_stage_accesses, uint32_t barrier_count,
John Zulauf9cb530d2019-09-30 14:14:10 -06001496 const VkBufferMemoryBarrier *barriers) {
1497 // TODO Implement this at subresource/memory_range accuracy
1498 for (uint32_t index = 0; index < barrier_count; index++) {
locke-lunarg3c038002020-04-30 23:08:08 -06001499 auto barrier = barriers[index];
John Zulauf9cb530d2019-09-30 14:14:10 -06001500 const auto *buffer = Get<BUFFER_STATE>(barrier.buffer);
1501 if (!buffer) continue;
locke-lunarg3c038002020-04-30 23:08:08 -06001502 barrier.size = GetRealWholeSize(barrier.offset, barrier.size, buffer->createInfo.size);
John Zulauf16adfc92020-04-08 10:28:33 -06001503 ResourceAccessRange range = MakeRange(barrier);
John Zulauf540266b2020-04-06 18:54:53 -06001504 const auto src_access_scope = AccessScope(src_stage_accesses, barrier.srcAccessMask);
1505 const auto dst_access_scope = AccessScope(dst_stage_accesses, barrier.dstAccessMask);
1506 const ApplyMemoryAccessBarrierFunctor update_action(src_exec_scope, src_access_scope, dst_exec_scope, dst_access_scope);
1507 context->UpdateMemoryAccess(*buffer, range, update_action);
John Zulauf9cb530d2019-09-30 14:14:10 -06001508 }
1509}
1510
John Zulauf540266b2020-04-06 18:54:53 -06001511void SyncValidator::ApplyImageBarriers(AccessContext *context, VkPipelineStageFlags src_exec_scope,
1512 SyncStageAccessFlags src_stage_accesses, VkPipelineStageFlags dst_exec_scope,
1513 SyncStageAccessFlags dst_stage_accesses, uint32_t barrier_count,
John Zulauf355e49b2020-04-24 15:11:15 -06001514 const VkImageMemoryBarrier *barriers, const ResourceUsageTag &tag) {
John Zulauf5c5e88d2019-12-26 11:22:02 -07001515 for (uint32_t index = 0; index < barrier_count; index++) {
1516 const auto &barrier = barriers[index];
1517 const auto *image = Get<IMAGE_STATE>(barrier.image);
1518 if (!image) continue;
John Zulauf540266b2020-04-06 18:54:53 -06001519 auto subresource_range = NormalizeSubresourceRange(image->createInfo, barrier.subresourceRange);
John Zulauf355e49b2020-04-24 15:11:15 -06001520 bool layout_transition = barrier.oldLayout != barrier.newLayout;
1521 const auto src_access_scope = AccessScope(src_stage_accesses, barrier.srcAccessMask);
1522 const auto dst_access_scope = AccessScope(dst_stage_accesses, barrier.dstAccessMask);
1523 context->ApplyImageBarrier(*image, src_exec_scope, src_access_scope, dst_exec_scope, dst_access_scope, subresource_range,
1524 layout_transition, tag);
John Zulauf9cb530d2019-09-30 14:14:10 -06001525 }
John Zulauf9cb530d2019-09-30 14:14:10 -06001526}
1527
1528bool SyncValidator::PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
1529 uint32_t regionCount, const VkBufferCopy *pRegions) const {
1530 bool skip = false;
John Zulauf3d84f1b2020-03-09 13:33:25 -06001531 const auto *cb_context = GetAccessContext(commandBuffer);
1532 assert(cb_context);
1533 if (!cb_context) return skip;
1534 const auto *context = cb_context->GetCurrentAccessContext();
John Zulauf9cb530d2019-09-30 14:14:10 -06001535
John Zulauf3d84f1b2020-03-09 13:33:25 -06001536 // If we have no previous accesses, we have no hazards
1537 // TODO: make this sub-resource capable
1538 // TODO: make this general, and stuff it into templates/utility functions
1539 const auto *src_buffer = Get<BUFFER_STATE>(srcBuffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001540 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001541
1542 for (uint32_t region = 0; region < regionCount; region++) {
1543 const auto &copy_region = pRegions[region];
John Zulauf16adfc92020-04-08 10:28:33 -06001544 if (src_buffer) {
locke-lunarg3c038002020-04-30 23:08:08 -06001545 ResourceAccessRange src_range =
1546 MakeRange(copy_region.srcOffset, GetRealWholeSize(copy_region.srcOffset, copy_region.size, src_buffer->createInfo.size));
John Zulauf16adfc92020-04-08 10:28:33 -06001547 auto hazard = context->DetectHazard(*src_buffer, SYNC_TRANSFER_TRANSFER_READ, src_range);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001548 if (hazard.hazard) {
1549 // TODO -- add tag information to log msg when useful.
locke-lunarga0003652020-03-10 11:38:51 -06001550 skip |= LogError(srcBuffer, string_SyncHazardVUID(hazard.hazard),
1551 "vkCmdCopyBuffer: Hazard %s for srcBuffer %s, region %" PRIu32, string_SyncHazard(hazard.hazard),
1552 report_data->FormatHandle(srcBuffer).c_str(), region);
John Zulauf9cb530d2019-09-30 14:14:10 -06001553 }
John Zulauf9cb530d2019-09-30 14:14:10 -06001554 }
John Zulauf16adfc92020-04-08 10:28:33 -06001555 if (dst_buffer && !skip) {
locke-lunarg3c038002020-04-30 23:08:08 -06001556 ResourceAccessRange dst_range =
1557 MakeRange(copy_region.dstOffset, GetRealWholeSize(copy_region.dstOffset, copy_region.size, dst_buffer->createInfo.size));
John Zulauf355e49b2020-04-24 15:11:15 -06001558 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, dst_range);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001559 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06001560 skip |= LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
1561 "vkCmdCopyBuffer: Hazard %s for dstBuffer %s, region %" PRIu32, string_SyncHazard(hazard.hazard),
1562 report_data->FormatHandle(dstBuffer).c_str(), region);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001563 }
1564 }
1565 if (skip) break;
John Zulauf9cb530d2019-09-30 14:14:10 -06001566 }
1567 return skip;
1568}
1569
1570void SyncValidator::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
1571 uint32_t regionCount, const VkBufferCopy *pRegions) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06001572 auto *cb_context = GetAccessContext(commandBuffer);
1573 assert(cb_context);
John Zulauf2b151bf2020-04-24 15:37:44 -06001574 const auto tag = cb_context->NextCommandTag(CMD_COPYBUFFER);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001575 auto *context = cb_context->GetCurrentAccessContext();
1576
John Zulauf9cb530d2019-09-30 14:14:10 -06001577 const auto *src_buffer = Get<BUFFER_STATE>(srcBuffer);
John Zulauf9cb530d2019-09-30 14:14:10 -06001578 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
John Zulauf9cb530d2019-09-30 14:14:10 -06001579
1580 for (uint32_t region = 0; region < regionCount; region++) {
1581 const auto &copy_region = pRegions[region];
John Zulauf16adfc92020-04-08 10:28:33 -06001582 if (src_buffer) {
locke-lunarg3c038002020-04-30 23:08:08 -06001583 ResourceAccessRange src_range =
1584 MakeRange(copy_region.srcOffset, GetRealWholeSize(copy_region.srcOffset, copy_region.size, src_buffer->createInfo.size));
John Zulauf16adfc92020-04-08 10:28:33 -06001585 context->UpdateAccessState(*src_buffer, SYNC_TRANSFER_TRANSFER_READ, src_range, tag);
John Zulauf9cb530d2019-09-30 14:14:10 -06001586 }
John Zulauf16adfc92020-04-08 10:28:33 -06001587 if (dst_buffer) {
locke-lunarg3c038002020-04-30 23:08:08 -06001588 ResourceAccessRange dst_range =
1589 MakeRange(copy_region.dstOffset, GetRealWholeSize(copy_region.dstOffset, copy_region.size, dst_buffer->createInfo.size));
John Zulauf16adfc92020-04-08 10:28:33 -06001590 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, dst_range, tag);
John Zulauf5c5e88d2019-12-26 11:22:02 -07001591 }
1592 }
1593}
1594
1595bool SyncValidator::PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
1596 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
1597 const VkImageCopy *pRegions) const {
1598 bool skip = false;
John Zulauf3d84f1b2020-03-09 13:33:25 -06001599 const auto *cb_access_context = GetAccessContext(commandBuffer);
1600 assert(cb_access_context);
1601 if (!cb_access_context) return skip;
John Zulauf5c5e88d2019-12-26 11:22:02 -07001602
John Zulauf3d84f1b2020-03-09 13:33:25 -06001603 const auto *context = cb_access_context->GetCurrentAccessContext();
1604 assert(context);
1605 if (!context) return skip;
1606
1607 const auto *src_image = Get<IMAGE_STATE>(srcImage);
1608 const auto *dst_image = Get<IMAGE_STATE>(dstImage);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001609 for (uint32_t region = 0; region < regionCount; region++) {
1610 const auto &copy_region = pRegions[region];
1611 if (src_image) {
John Zulauf540266b2020-04-06 18:54:53 -06001612 auto hazard = context->DetectHazard(*src_image, SYNC_TRANSFER_TRANSFER_READ, copy_region.srcSubresource,
John Zulauf3d84f1b2020-03-09 13:33:25 -06001613 copy_region.srcOffset, copy_region.extent);
1614 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06001615 skip |= LogError(srcImage, string_SyncHazardVUID(hazard.hazard),
1616 "vkCmdCopyImage: Hazard %s for srcImage %s, region %" PRIu32, string_SyncHazard(hazard.hazard),
1617 report_data->FormatHandle(srcImage).c_str(), region);
John Zulauf5c5e88d2019-12-26 11:22:02 -07001618 }
John Zulauf3d84f1b2020-03-09 13:33:25 -06001619 }
1620
1621 if (dst_image) {
locke-lunarg1df1f882020-03-02 16:42:08 -07001622 VkExtent3D dst_copy_extent =
1623 GetAdjustedDestImageExtent(src_image->createInfo.format, dst_image->createInfo.format, copy_region.extent);
John Zulauf540266b2020-04-06 18:54:53 -06001624 auto hazard = context->DetectHazard(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, copy_region.dstSubresource,
locke-lunarg1df1f882020-03-02 16:42:08 -07001625 copy_region.dstOffset, dst_copy_extent);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001626 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06001627 skip |= LogError(dstImage, string_SyncHazardVUID(hazard.hazard),
1628 "vkCmdCopyImage: Hazard %s for dstImage %s, region %" PRIu32, string_SyncHazard(hazard.hazard),
1629 report_data->FormatHandle(dstImage).c_str(), region);
John Zulauf5c5e88d2019-12-26 11:22:02 -07001630 }
locke-lunarg1dbbb9e2020-02-28 22:43:53 -07001631 if (skip) break;
John Zulauf5c5e88d2019-12-26 11:22:02 -07001632 }
1633 }
John Zulauf3d84f1b2020-03-09 13:33:25 -06001634
John Zulauf5c5e88d2019-12-26 11:22:02 -07001635 return skip;
1636}
1637
1638void SyncValidator::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
1639 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
1640 const VkImageCopy *pRegions) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06001641 auto *cb_access_context = GetAccessContext(commandBuffer);
1642 assert(cb_access_context);
John Zulauf2b151bf2020-04-24 15:37:44 -06001643 const auto tag = cb_access_context->NextCommandTag(CMD_COPYIMAGE);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001644 auto *context = cb_access_context->GetCurrentAccessContext();
1645 assert(context);
1646
John Zulauf5c5e88d2019-12-26 11:22:02 -07001647 auto *src_image = Get<IMAGE_STATE>(srcImage);
John Zulauf5c5e88d2019-12-26 11:22:02 -07001648 auto *dst_image = Get<IMAGE_STATE>(dstImage);
John Zulauf5c5e88d2019-12-26 11:22:02 -07001649
1650 for (uint32_t region = 0; region < regionCount; region++) {
1651 const auto &copy_region = pRegions[region];
John Zulauf3d84f1b2020-03-09 13:33:25 -06001652 if (src_image) {
John Zulauf540266b2020-04-06 18:54:53 -06001653 context->UpdateAccessState(*src_image, SYNC_TRANSFER_TRANSFER_READ, copy_region.srcSubresource, copy_region.srcOffset,
1654 copy_region.extent, tag);
John Zulauf5c5e88d2019-12-26 11:22:02 -07001655 }
John Zulauf3d84f1b2020-03-09 13:33:25 -06001656 if (dst_image) {
locke-lunarg1df1f882020-03-02 16:42:08 -07001657 VkExtent3D dst_copy_extent =
1658 GetAdjustedDestImageExtent(src_image->createInfo.format, dst_image->createInfo.format, copy_region.extent);
John Zulauf540266b2020-04-06 18:54:53 -06001659 context->UpdateAccessState(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, copy_region.dstSubresource, copy_region.dstOffset,
1660 dst_copy_extent, tag);
John Zulauf9cb530d2019-09-30 14:14:10 -06001661 }
1662 }
1663}
1664
1665bool SyncValidator::PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
1666 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
1667 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
1668 uint32_t bufferMemoryBarrierCount,
1669 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1670 uint32_t imageMemoryBarrierCount,
1671 const VkImageMemoryBarrier *pImageMemoryBarriers) const {
1672 bool skip = false;
John Zulauf3d84f1b2020-03-09 13:33:25 -06001673 const auto *cb_access_context = GetAccessContext(commandBuffer);
1674 assert(cb_access_context);
1675 if (!cb_access_context) return skip;
John Zulauf0cb5be22020-01-23 12:18:22 -07001676
John Zulauf3d84f1b2020-03-09 13:33:25 -06001677 const auto *context = cb_access_context->GetCurrentAccessContext();
1678 assert(context);
1679 if (!context) return skip;
John Zulauf0cb5be22020-01-23 12:18:22 -07001680
John Zulauf3d84f1b2020-03-09 13:33:25 -06001681 const auto src_stage_mask = ExpandPipelineStages(cb_access_context->GetQueueFlags(), srcStageMask);
John Zulauf36bcf6a2020-02-03 15:12:52 -07001682 const auto src_exec_scope = WithEarlierPipelineStages(src_stage_mask);
1683 auto src_stage_accesses = AccessScopeByStage(src_stage_mask);
John Zulauf0cb5be22020-01-23 12:18:22 -07001684 // Validate Image Layout transitions
1685 for (uint32_t index = 0; index < imageMemoryBarrierCount; index++) {
1686 const auto &barrier = pImageMemoryBarriers[index];
1687 if (barrier.newLayout == barrier.oldLayout) continue; // Only interested in layout transitions at this point.
1688 const auto *image_state = Get<IMAGE_STATE>(barrier.image);
1689 if (!image_state) continue;
John Zulauf16adfc92020-04-08 10:28:33 -06001690 const auto hazard = context->DetectImageBarrierHazard(*image_state, src_exec_scope, src_stage_accesses, barrier);
John Zulauf0cb5be22020-01-23 12:18:22 -07001691 if (hazard.hazard) {
1692 // TODO -- add tag information to log msg when useful.
locke-lunarga0003652020-03-10 11:38:51 -06001693 skip |= LogError(barrier.image, string_SyncHazardVUID(hazard.hazard),
1694 "vkCmdPipelineBarrier: Hazard %s for image barrier %" PRIu32 " %s", string_SyncHazard(hazard.hazard),
1695 index, report_data->FormatHandle(barrier.image).c_str());
John Zulauf0cb5be22020-01-23 12:18:22 -07001696 }
1697 }
John Zulauf9cb530d2019-09-30 14:14:10 -06001698
1699 return skip;
1700}
1701
1702void SyncValidator::PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
1703 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
1704 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
1705 uint32_t bufferMemoryBarrierCount,
1706 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1707 uint32_t imageMemoryBarrierCount,
1708 const VkImageMemoryBarrier *pImageMemoryBarriers) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06001709 auto *cb_access_context = GetAccessContext(commandBuffer);
1710 assert(cb_access_context);
1711 if (!cb_access_context) return;
John Zulauf2b151bf2020-04-24 15:37:44 -06001712 const auto tag = cb_access_context->NextCommandTag(CMD_PIPELINEBARRIER);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001713 auto access_context = cb_access_context->GetCurrentAccessContext();
1714 assert(access_context);
1715 if (!access_context) return;
John Zulauf9cb530d2019-09-30 14:14:10 -06001716
John Zulauf3d84f1b2020-03-09 13:33:25 -06001717 const auto src_stage_mask = ExpandPipelineStages(cb_access_context->GetQueueFlags(), srcStageMask);
John Zulauf36bcf6a2020-02-03 15:12:52 -07001718 auto src_stage_accesses = AccessScopeByStage(src_stage_mask);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001719 const auto dst_stage_mask = ExpandPipelineStages(cb_access_context->GetQueueFlags(), dstStageMask);
John Zulauf36bcf6a2020-02-03 15:12:52 -07001720 auto dst_stage_accesses = AccessScopeByStage(dst_stage_mask);
1721 const auto src_exec_scope = WithEarlierPipelineStages(src_stage_mask);
1722 const auto dst_exec_scope = WithLaterPipelineStages(dst_stage_mask);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001723 ApplyBufferBarriers(access_context, src_exec_scope, src_stage_accesses, dst_exec_scope, dst_stage_accesses,
1724 bufferMemoryBarrierCount, pBufferMemoryBarriers);
John Zulauf540266b2020-04-06 18:54:53 -06001725 ApplyImageBarriers(access_context, src_exec_scope, src_stage_accesses, dst_exec_scope, dst_stage_accesses,
John Zulauf355e49b2020-04-24 15:11:15 -06001726 imageMemoryBarrierCount, pImageMemoryBarriers, tag);
John Zulauf9cb530d2019-09-30 14:14:10 -06001727
1728 // Apply these last in-case there operation is a superset of the other two and would clean them up...
John Zulauf3d84f1b2020-03-09 13:33:25 -06001729 ApplyGlobalBarriers(access_context, src_exec_scope, dst_exec_scope, src_stage_accesses, dst_stage_accesses, memoryBarrierCount,
John Zulauf0cb5be22020-01-23 12:18:22 -07001730 pMemoryBarriers);
John Zulauf9cb530d2019-09-30 14:14:10 -06001731}
1732
1733void SyncValidator::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
1734 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result) {
1735 // The state tracker sets up the device state
1736 StateTracker::PostCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, result);
1737
John Zulauf5f13a792020-03-10 07:31:21 -06001738 // Add the callback hooks for the functions that are either broadly or deeply used and that the ValidationStateTracker
1739 // refactor would be messier without.
John Zulauf9cb530d2019-09-30 14:14:10 -06001740 // TODO: Find a good way to do this hooklessly.
1741 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
1742 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, LayerObjectTypeSyncValidation);
1743 SyncValidator *sync_device_state = static_cast<SyncValidator *>(validation_data);
1744
John Zulaufd1f85d42020-04-15 12:23:15 -06001745 sync_device_state->SetCommandBufferResetCallback([sync_device_state](VkCommandBuffer command_buffer) -> void {
1746 sync_device_state->ResetCommandBufferCallback(command_buffer);
1747 });
1748 sync_device_state->SetCommandBufferFreeCallback([sync_device_state](VkCommandBuffer command_buffer) -> void {
1749 sync_device_state->FreeCommandBufferCallback(command_buffer);
1750 });
John Zulauf9cb530d2019-09-30 14:14:10 -06001751}
John Zulauf3d84f1b2020-03-09 13:33:25 -06001752
John Zulauf355e49b2020-04-24 15:11:15 -06001753bool SyncValidator::ValidateBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
1754 const VkSubpassBeginInfoKHR *pSubpassBeginInfo, const char *func_name) const {
1755 bool skip = false;
1756 const auto rp_state = Get<RENDER_PASS_STATE>(pRenderPassBegin->renderPass);
1757 auto cb_context = GetAccessContext(commandBuffer);
1758
1759 if (rp_state && cb_context) {
1760 skip |= cb_context->ValidateBeginRenderPass(*rp_state, pRenderPassBegin, pSubpassBeginInfo, func_name);
1761 }
1762
1763 return skip;
1764}
1765
1766bool SyncValidator::PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
1767 VkSubpassContents contents) const {
1768 bool skip = StateTracker::PreCallValidateCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
1769 auto subpass_begin_info = lvl_init_struct<VkSubpassBeginInfo>();
1770 subpass_begin_info.contents = contents;
1771 skip |= ValidateBeginRenderPass(commandBuffer, pRenderPassBegin, &subpass_begin_info, "vkCmdBeginRenderPass");
1772 return skip;
1773}
1774
1775bool SyncValidator::PreCallValidateCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
1776 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) const {
1777 bool skip = StateTracker::PreCallValidateCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
1778 skip |= ValidateBeginRenderPass(commandBuffer, pRenderPassBegin, pSubpassBeginInfo, "vkCmdBeginRenderPass2");
1779 return skip;
1780}
1781
1782bool SyncValidator::PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
1783 const VkRenderPassBeginInfo *pRenderPassBegin,
1784 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) const {
1785 bool skip = StateTracker::PreCallValidateCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
1786 skip |= ValidateBeginRenderPass(commandBuffer, pRenderPassBegin, pSubpassBeginInfo, "vkCmdBeginRenderPass2KHR");
1787 return skip;
1788}
1789
John Zulauf3d84f1b2020-03-09 13:33:25 -06001790void SyncValidator::PostCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo,
1791 VkResult result) {
1792 // The state tracker sets up the command buffer state
1793 StateTracker::PostCallRecordBeginCommandBuffer(commandBuffer, pBeginInfo, result);
1794
1795 // Create/initialize the structure that trackers accesses at the command buffer scope.
1796 auto cb_access_context = GetAccessContext(commandBuffer);
1797 assert(cb_access_context);
1798 cb_access_context->Reset();
1799}
1800
1801void SyncValidator::RecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
John Zulauf355e49b2020-04-24 15:11:15 -06001802 const VkSubpassBeginInfo *pSubpassBeginInfo, CMD_TYPE command) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06001803 auto cb_context = GetAccessContext(commandBuffer);
John Zulauf355e49b2020-04-24 15:11:15 -06001804 if (cb_context) {
1805 cb_context->RecordBeginRenderPass(cb_context->NextCommandTag(command));
John Zulauf3d84f1b2020-03-09 13:33:25 -06001806 }
1807}
1808
1809void SyncValidator::PostCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
1810 VkSubpassContents contents) {
1811 StateTracker::PostCallRecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
1812 auto subpass_begin_info = lvl_init_struct<VkSubpassBeginInfo>();
1813 subpass_begin_info.contents = contents;
John Zulauf355e49b2020-04-24 15:11:15 -06001814 RecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, &subpass_begin_info, CMD_BEGINRENDERPASS);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001815}
1816
1817void SyncValidator::PostCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
1818 const VkSubpassBeginInfo *pSubpassBeginInfo) {
1819 StateTracker::PostCallRecordCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06001820 RecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, pSubpassBeginInfo, CMD_BEGINRENDERPASS2);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001821}
1822
1823void SyncValidator::PostCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
1824 const VkRenderPassBeginInfo *pRenderPassBegin,
1825 const VkSubpassBeginInfo *pSubpassBeginInfo) {
1826 StateTracker::PostCallRecordCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06001827 RecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, pSubpassBeginInfo, CMD_BEGINRENDERPASS2);
1828}
1829
1830bool SyncValidator::ValidateCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
1831 const VkSubpassEndInfoKHR *pSubpassEndInfo, const char *func_name) const {
1832 bool skip = false;
1833
1834 auto cb_context = GetAccessContext(commandBuffer);
1835 assert(cb_context);
1836 auto cb_state = cb_context->GetCommandBufferState();
1837 if (!cb_state) return skip;
1838
1839 auto rp_state = cb_state->activeRenderPass;
1840 if (!rp_state) return skip;
1841
1842 skip |= cb_context->ValidateNextSubpass(func_name);
1843
1844 return skip;
1845}
1846
1847bool SyncValidator::PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const {
1848 bool skip = StateTracker::PreCallValidateCmdNextSubpass(commandBuffer, contents);
1849 auto subpass_begin_info = lvl_init_struct<VkSubpassBeginInfo>();
1850 subpass_begin_info.contents = contents;
1851 skip |= ValidateCmdNextSubpass(commandBuffer, &subpass_begin_info, nullptr, "vkCmdNextSubpass");
1852 return skip;
1853}
1854
1855bool SyncValidator::PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
1856 const VkSubpassEndInfoKHR *pSubpassEndInfo) const {
1857 bool skip = StateTracker::PreCallValidateCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
1858 skip |= ValidateCmdNextSubpass(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo, "vkCmdNextSubpass2KHR");
1859 return skip;
1860}
1861
1862bool SyncValidator::PreCallValidateCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
1863 const VkSubpassEndInfo *pSubpassEndInfo) const {
1864 bool skip = StateTracker::PreCallValidateCmdNextSubpass2(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
1865 skip |= ValidateCmdNextSubpass(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo, "vkCmdNextSubpass2");
1866 return skip;
John Zulauf3d84f1b2020-03-09 13:33:25 -06001867}
1868
1869void SyncValidator::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
John Zulauf355e49b2020-04-24 15:11:15 -06001870 const VkSubpassEndInfo *pSubpassEndInfo, CMD_TYPE command) {
John Zulauf3d84f1b2020-03-09 13:33:25 -06001871 auto cb_context = GetAccessContext(commandBuffer);
1872 assert(cb_context);
1873 auto cb_state = cb_context->GetCommandBufferState();
1874 if (!cb_state) return;
1875
1876 auto rp_state = cb_state->activeRenderPass;
1877 if (!rp_state) return;
1878
John Zulauf355e49b2020-04-24 15:11:15 -06001879 cb_context->RecordNextSubpass(*rp_state, cb_context->NextCommandTag(command));
John Zulauf3d84f1b2020-03-09 13:33:25 -06001880}
1881
1882void SyncValidator::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
1883 StateTracker::PostCallRecordCmdNextSubpass(commandBuffer, contents);
1884 auto subpass_begin_info = lvl_init_struct<VkSubpassBeginInfo>();
1885 subpass_begin_info.contents = contents;
John Zulauf355e49b2020-04-24 15:11:15 -06001886 RecordCmdNextSubpass(commandBuffer, &subpass_begin_info, nullptr, CMD_NEXTSUBPASS);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001887}
1888
1889void SyncValidator::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
1890 const VkSubpassEndInfo *pSubpassEndInfo) {
1891 StateTracker::PostCallRecordCmdNextSubpass2(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06001892 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo, CMD_NEXTSUBPASS2);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001893}
1894
1895void SyncValidator::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
1896 const VkSubpassEndInfo *pSubpassEndInfo) {
1897 StateTracker::PostCallRecordCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06001898 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo, CMD_NEXTSUBPASS2);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001899}
1900
John Zulauf355e49b2020-04-24 15:11:15 -06001901bool SyncValidator::ValidateCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo,
1902 const char *func_name) const {
1903 bool skip = false;
1904
1905 auto cb_context = GetAccessContext(commandBuffer);
1906 assert(cb_context);
1907 auto cb_state = cb_context->GetCommandBufferState();
1908 if (!cb_state) return skip;
1909
1910 auto rp_state = cb_state->activeRenderPass;
1911 if (!rp_state) return skip;
1912
1913 skip |= cb_context->ValidateEndRenderpass(func_name);
1914 return skip;
1915}
1916
1917bool SyncValidator::PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) const {
1918 bool skip = StateTracker::PreCallValidateCmdEndRenderPass(commandBuffer);
1919 skip |= ValidateCmdEndRenderPass(commandBuffer, nullptr, "vkEndRenderPass");
1920 return skip;
1921}
1922
1923bool SyncValidator::PreCallValidateCmdEndRenderPass2(VkCommandBuffer commandBuffer,
1924 const VkSubpassEndInfoKHR *pSubpassEndInfo) const {
1925 bool skip = StateTracker::PreCallValidateCmdEndRenderPass2(commandBuffer, pSubpassEndInfo);
1926 skip |= ValidateCmdEndRenderPass(commandBuffer, pSubpassEndInfo, "vkEndRenderPass2");
1927 return skip;
1928}
1929
1930bool SyncValidator::PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
1931 const VkSubpassEndInfoKHR *pSubpassEndInfo) const {
1932 bool skip = StateTracker::PreCallValidateCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
1933 skip |= ValidateCmdEndRenderPass(commandBuffer, pSubpassEndInfo, "vkEndRenderPass2KHR");
1934 return skip;
1935}
1936
1937void SyncValidator::RecordCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo,
1938 CMD_TYPE command) {
John Zulaufe5da6e52020-03-18 15:32:18 -06001939 // Resolve the all subpass contexts to the command buffer contexts
1940 auto cb_context = GetAccessContext(commandBuffer);
1941 assert(cb_context);
1942 auto cb_state = cb_context->GetCommandBufferState();
1943 if (!cb_state) return;
1944
1945 const auto *rp_state = cb_state->activeRenderPass;
1946 if (!rp_state) return;
1947
John Zulauf355e49b2020-04-24 15:11:15 -06001948 cb_context->RecordEndRenderPass(*rp_state, cb_context->NextCommandTag(command));
John Zulaufe5da6e52020-03-18 15:32:18 -06001949}
John Zulauf3d84f1b2020-03-09 13:33:25 -06001950
1951void SyncValidator::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
1952 StateTracker::PostCallRecordCmdEndRenderPass(commandBuffer);
John Zulauf355e49b2020-04-24 15:11:15 -06001953 RecordCmdEndRenderPass(commandBuffer, nullptr, CMD_ENDRENDERPASS);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001954}
1955
1956void SyncValidator::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) {
1957 StateTracker::PostCallRecordCmdEndRenderPass2(commandBuffer, pSubpassEndInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06001958 RecordCmdEndRenderPass(commandBuffer, pSubpassEndInfo, CMD_ENDRENDERPASS2);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001959}
1960
1961void SyncValidator::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) {
1962 StateTracker::PostCallRecordCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
John Zulauf355e49b2020-04-24 15:11:15 -06001963 RecordCmdEndRenderPass(commandBuffer, pSubpassEndInfo, CMD_ENDRENDERPASS2);
John Zulauf3d84f1b2020-03-09 13:33:25 -06001964}
locke-lunarga19c71d2020-03-02 18:17:04 -07001965
1966bool SyncValidator::PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
1967 VkImageLayout dstImageLayout, uint32_t regionCount,
1968 const VkBufferImageCopy *pRegions) const {
1969 bool skip = false;
1970 const auto *cb_access_context = GetAccessContext(commandBuffer);
1971 assert(cb_access_context);
1972 if (!cb_access_context) return skip;
1973
1974 const auto *context = cb_access_context->GetCurrentAccessContext();
1975 assert(context);
1976 if (!context) return skip;
1977
1978 const auto *src_buffer = Get<BUFFER_STATE>(srcBuffer);
locke-lunarga19c71d2020-03-02 18:17:04 -07001979 const auto *dst_image = Get<IMAGE_STATE>(dstImage);
1980
1981 for (uint32_t region = 0; region < regionCount; region++) {
1982 const auto &copy_region = pRegions[region];
John Zulauf16adfc92020-04-08 10:28:33 -06001983 if (src_buffer) {
John Zulauf355e49b2020-04-24 15:11:15 -06001984 ResourceAccessRange src_range =
1985 MakeRange(copy_region.bufferOffset, GetBufferSizeFromCopyImage(copy_region, dst_image->createInfo.format));
John Zulauf16adfc92020-04-08 10:28:33 -06001986 auto hazard = context->DetectHazard(*src_buffer, SYNC_TRANSFER_TRANSFER_READ, src_range);
locke-lunarga19c71d2020-03-02 18:17:04 -07001987 if (hazard.hazard) {
1988 // TODO -- add tag information to log msg when useful.
locke-lunarga0003652020-03-10 11:38:51 -06001989 skip |= LogError(srcBuffer, string_SyncHazardVUID(hazard.hazard),
1990 "vkCmdCopyBufferToImage: Hazard %s for srcBuffer %s, region %" PRIu32,
locke-lunarga19c71d2020-03-02 18:17:04 -07001991 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcBuffer).c_str(), region);
1992 }
1993 }
1994 if (dst_image) {
John Zulauf540266b2020-04-06 18:54:53 -06001995 auto hazard = context->DetectHazard(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, copy_region.imageSubresource,
locke-lunarga19c71d2020-03-02 18:17:04 -07001996 copy_region.imageOffset, copy_region.imageExtent);
1997 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06001998 skip |= LogError(dstImage, string_SyncHazardVUID(hazard.hazard),
1999 "vkCmdCopyBufferToImage: Hazard %s for dstImage %s, region %" PRIu32,
locke-lunarga19c71d2020-03-02 18:17:04 -07002000 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstImage).c_str(), region);
2001 }
2002 if (skip) break;
2003 }
2004 if (skip) break;
2005 }
2006 return skip;
2007}
2008
2009void SyncValidator::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
2010 VkImageLayout dstImageLayout, uint32_t regionCount,
2011 const VkBufferImageCopy *pRegions) {
2012 auto *cb_access_context = GetAccessContext(commandBuffer);
2013 assert(cb_access_context);
John Zulauf2b151bf2020-04-24 15:37:44 -06002014 const auto tag = cb_access_context->NextCommandTag(CMD_COPYBUFFERTOIMAGE);
locke-lunarga19c71d2020-03-02 18:17:04 -07002015 auto *context = cb_access_context->GetCurrentAccessContext();
2016 assert(context);
2017
2018 const auto *src_buffer = Get<BUFFER_STATE>(srcBuffer);
John Zulauf16adfc92020-04-08 10:28:33 -06002019 const auto *dst_image = Get<IMAGE_STATE>(dstImage);
locke-lunarga19c71d2020-03-02 18:17:04 -07002020
2021 for (uint32_t region = 0; region < regionCount; region++) {
2022 const auto &copy_region = pRegions[region];
2023 if (src_buffer) {
John Zulauf355e49b2020-04-24 15:11:15 -06002024 ResourceAccessRange src_range =
2025 MakeRange(copy_region.bufferOffset, GetBufferSizeFromCopyImage(copy_region, dst_image->createInfo.format));
John Zulauf16adfc92020-04-08 10:28:33 -06002026 context->UpdateAccessState(*src_buffer, SYNC_TRANSFER_TRANSFER_READ, src_range, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07002027 }
2028 if (dst_image) {
John Zulauf540266b2020-04-06 18:54:53 -06002029 context->UpdateAccessState(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, copy_region.imageSubresource,
John Zulauf5f13a792020-03-10 07:31:21 -06002030 copy_region.imageOffset, copy_region.imageExtent, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07002031 }
2032 }
2033}
2034
2035bool SyncValidator::PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
2036 VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount,
2037 const VkBufferImageCopy *pRegions) const {
2038 bool skip = false;
2039 const auto *cb_access_context = GetAccessContext(commandBuffer);
2040 assert(cb_access_context);
2041 if (!cb_access_context) return skip;
2042
2043 const auto *context = cb_access_context->GetCurrentAccessContext();
2044 assert(context);
2045 if (!context) return skip;
2046
2047 const auto *src_image = Get<IMAGE_STATE>(srcImage);
2048 const auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
2049 const auto dst_mem = (dst_buffer && !dst_buffer->sparse) ? dst_buffer->binding.mem_state->mem : VK_NULL_HANDLE;
2050 for (uint32_t region = 0; region < regionCount; region++) {
2051 const auto &copy_region = pRegions[region];
2052 if (src_image) {
John Zulauf540266b2020-04-06 18:54:53 -06002053 auto hazard = context->DetectHazard(*src_image, SYNC_TRANSFER_TRANSFER_READ, copy_region.imageSubresource,
locke-lunarga19c71d2020-03-02 18:17:04 -07002054 copy_region.imageOffset, copy_region.imageExtent);
2055 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002056 skip |= LogError(srcImage, string_SyncHazardVUID(hazard.hazard),
2057 "vkCmdCopyImageToBuffer: Hazard %s for srcImage %s, region %" PRIu32,
locke-lunarga19c71d2020-03-02 18:17:04 -07002058 string_SyncHazard(hazard.hazard), report_data->FormatHandle(srcImage).c_str(), region);
2059 }
2060 }
2061 if (dst_mem) {
John Zulauf355e49b2020-04-24 15:11:15 -06002062 ResourceAccessRange dst_range =
2063 MakeRange(copy_region.bufferOffset, GetBufferSizeFromCopyImage(copy_region, src_image->createInfo.format));
John Zulauf16adfc92020-04-08 10:28:33 -06002064 auto hazard = context->DetectHazard(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, dst_range);
locke-lunarga19c71d2020-03-02 18:17:04 -07002065 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002066 skip |= LogError(dstBuffer, string_SyncHazardVUID(hazard.hazard),
2067 "vkCmdCopyImageToBuffer: Hazard %s for dstBuffer %s, region %" PRIu32,
locke-lunarga19c71d2020-03-02 18:17:04 -07002068 string_SyncHazard(hazard.hazard), report_data->FormatHandle(dstBuffer).c_str(), region);
2069 }
2070 }
2071 if (skip) break;
2072 }
2073 return skip;
2074}
2075
2076void SyncValidator::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2077 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) {
2078 auto *cb_access_context = GetAccessContext(commandBuffer);
2079 assert(cb_access_context);
John Zulauf2b151bf2020-04-24 15:37:44 -06002080 const auto tag = cb_access_context->NextCommandTag(CMD_COPYIMAGETOBUFFER);
locke-lunarga19c71d2020-03-02 18:17:04 -07002081 auto *context = cb_access_context->GetCurrentAccessContext();
2082 assert(context);
2083
2084 const auto *src_image = Get<IMAGE_STATE>(srcImage);
locke-lunarga19c71d2020-03-02 18:17:04 -07002085 auto *dst_buffer = Get<BUFFER_STATE>(dstBuffer);
2086 const auto dst_mem = (dst_buffer && !dst_buffer->sparse) ? dst_buffer->binding.mem_state->mem : VK_NULL_HANDLE;
John Zulauf5f13a792020-03-10 07:31:21 -06002087 const VulkanTypedHandle dst_handle(dst_mem, kVulkanObjectTypeDeviceMemory);
locke-lunarga19c71d2020-03-02 18:17:04 -07002088
2089 for (uint32_t region = 0; region < regionCount; region++) {
2090 const auto &copy_region = pRegions[region];
2091 if (src_image) {
John Zulauf540266b2020-04-06 18:54:53 -06002092 context->UpdateAccessState(*src_image, SYNC_TRANSFER_TRANSFER_READ, copy_region.imageSubresource,
John Zulauf5f13a792020-03-10 07:31:21 -06002093 copy_region.imageOffset, copy_region.imageExtent, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07002094 }
2095 if (dst_buffer) {
John Zulauf355e49b2020-04-24 15:11:15 -06002096 ResourceAccessRange dst_range =
2097 MakeRange(copy_region.bufferOffset, GetBufferSizeFromCopyImage(copy_region, src_image->createInfo.format));
John Zulauf16adfc92020-04-08 10:28:33 -06002098 context->UpdateAccessState(*dst_buffer, SYNC_TRANSFER_TRANSFER_WRITE, dst_range, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07002099 }
2100 }
2101}
2102
2103bool SyncValidator::PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2104 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2105 const VkImageBlit *pRegions, VkFilter filter) const {
2106 bool skip = false;
2107 const auto *cb_access_context = GetAccessContext(commandBuffer);
2108 assert(cb_access_context);
2109 if (!cb_access_context) return skip;
2110
2111 const auto *context = cb_access_context->GetCurrentAccessContext();
2112 assert(context);
2113 if (!context) return skip;
2114
2115 const auto *src_image = Get<IMAGE_STATE>(srcImage);
2116 const auto *dst_image = Get<IMAGE_STATE>(dstImage);
2117
2118 for (uint32_t region = 0; region < regionCount; region++) {
2119 const auto &blit_region = pRegions[region];
2120 if (src_image) {
2121 VkExtent3D extent = {static_cast<uint32_t>(blit_region.srcOffsets[1].x - blit_region.srcOffsets[0].x),
2122 static_cast<uint32_t>(blit_region.srcOffsets[1].y - blit_region.srcOffsets[0].y),
2123 static_cast<uint32_t>(blit_region.srcOffsets[1].z - blit_region.srcOffsets[0].z)};
John Zulauf540266b2020-04-06 18:54:53 -06002124 auto hazard = context->DetectHazard(*src_image, SYNC_TRANSFER_TRANSFER_READ, blit_region.srcSubresource,
locke-lunarga19c71d2020-03-02 18:17:04 -07002125 blit_region.srcOffsets[0], extent);
2126 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002127 skip |= LogError(srcImage, string_SyncHazardVUID(hazard.hazard),
2128 "vkCmdBlitImage: Hazard %s for srcImage %s, region %" PRIu32, string_SyncHazard(hazard.hazard),
2129 report_data->FormatHandle(srcImage).c_str(), region);
locke-lunarga19c71d2020-03-02 18:17:04 -07002130 }
2131 }
2132
2133 if (dst_image) {
2134 VkExtent3D extent = {static_cast<uint32_t>(blit_region.dstOffsets[1].x - blit_region.dstOffsets[0].x),
2135 static_cast<uint32_t>(blit_region.dstOffsets[1].y - blit_region.dstOffsets[0].y),
2136 static_cast<uint32_t>(blit_region.dstOffsets[1].z - blit_region.dstOffsets[0].z)};
John Zulauf540266b2020-04-06 18:54:53 -06002137 auto hazard = context->DetectHazard(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, blit_region.dstSubresource,
locke-lunarga19c71d2020-03-02 18:17:04 -07002138 blit_region.dstOffsets[0], extent);
2139 if (hazard.hazard) {
locke-lunarga0003652020-03-10 11:38:51 -06002140 skip |= LogError(dstImage, string_SyncHazardVUID(hazard.hazard),
2141 "vkCmdBlitImage: Hazard %s for dstImage %s, region %" PRIu32, string_SyncHazard(hazard.hazard),
2142 report_data->FormatHandle(dstImage).c_str(), region);
locke-lunarga19c71d2020-03-02 18:17:04 -07002143 }
2144 if (skip) break;
2145 }
2146 }
2147
2148 return skip;
2149}
2150
2151void SyncValidator::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2152 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2153 const VkImageBlit *pRegions, VkFilter filter) {
2154 auto *cb_access_context = GetAccessContext(commandBuffer);
2155 assert(cb_access_context);
John Zulauf2b151bf2020-04-24 15:37:44 -06002156 const auto tag = cb_access_context->NextCommandTag(CMD_BLITIMAGE);
locke-lunarga19c71d2020-03-02 18:17:04 -07002157 auto *context = cb_access_context->GetCurrentAccessContext();
2158 assert(context);
2159
2160 auto *src_image = Get<IMAGE_STATE>(srcImage);
locke-lunarga19c71d2020-03-02 18:17:04 -07002161 auto *dst_image = Get<IMAGE_STATE>(dstImage);
locke-lunarga19c71d2020-03-02 18:17:04 -07002162
2163 for (uint32_t region = 0; region < regionCount; region++) {
2164 const auto &blit_region = pRegions[region];
2165 if (src_image) {
2166 VkExtent3D extent = {static_cast<uint32_t>(blit_region.srcOffsets[1].x - blit_region.srcOffsets[0].x),
2167 static_cast<uint32_t>(blit_region.srcOffsets[1].y - blit_region.srcOffsets[0].y),
2168 static_cast<uint32_t>(blit_region.srcOffsets[1].z - blit_region.srcOffsets[0].z)};
John Zulauf540266b2020-04-06 18:54:53 -06002169 context->UpdateAccessState(*src_image, SYNC_TRANSFER_TRANSFER_READ, blit_region.srcSubresource,
John Zulauf5f13a792020-03-10 07:31:21 -06002170 blit_region.srcOffsets[0], extent, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07002171 }
2172 if (dst_image) {
2173 VkExtent3D extent = {static_cast<uint32_t>(blit_region.dstOffsets[1].x - blit_region.dstOffsets[0].x),
2174 static_cast<uint32_t>(blit_region.dstOffsets[1].y - blit_region.dstOffsets[0].y),
2175 static_cast<uint32_t>(blit_region.dstOffsets[1].z - blit_region.dstOffsets[0].z)};
John Zulauf540266b2020-04-06 18:54:53 -06002176 context->UpdateAccessState(*dst_image, SYNC_TRANSFER_TRANSFER_WRITE, blit_region.dstSubresource,
John Zulauf5f13a792020-03-10 07:31:21 -06002177 blit_region.dstOffsets[0], extent, tag);
locke-lunarga19c71d2020-03-02 18:17:04 -07002178 }
2179 }
2180}