John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1 | /* |
Tony-LunarG | 4c25337 | 2022-01-18 13:51:07 -0700 | [diff] [blame] | 2 | * Copyright (c) 2019-2022 Valve Corporation |
| 3 | * Copyright (c) 2019-2022 LunarG, Inc. |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 4 | * |
| 5 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 6 | * you may not use this file except in compliance with the License. |
| 7 | * You may obtain a copy of the License at |
| 8 | * |
| 9 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | * |
| 11 | * Unless required by applicable law or agreed to in writing, software |
| 12 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | * See the License for the specific language governing permissions and |
| 15 | * limitations under the License. |
| 16 | * |
| 17 | * Author: John Zulauf <jzulauf@lunarg.com> |
John Zulauf | ab7756b | 2020-12-29 16:10:16 -0700 | [diff] [blame] | 18 | * Author: Locke Lin <locke@lunarg.com> |
| 19 | * Author: Jeremy Gebben <jeremyg@lunarg.com> |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 20 | */ |
| 21 | |
| 22 | #pragma once |
| 23 | |
John Zulauf | 7635de3 | 2020-05-29 17:14:15 -0600 | [diff] [blame] | 24 | #include <limits> |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 25 | #include <memory> |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 26 | #include <vulkan/vulkan.h> |
| 27 | |
| 28 | #include "synchronization_validation_types.h" |
| 29 | #include "state_tracker.h" |
Jeremy Gebben | 159b3cc | 2021-06-03 09:09:03 -0600 | [diff] [blame] | 30 | #include "cmd_buffer_state.h" |
| 31 | #include "render_pass_state.h" |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 32 | |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 33 | class AccessContext; |
John Zulauf | faea0ee | 2021-01-14 14:01:32 -0700 | [diff] [blame] | 34 | class CommandBufferAccessContext; |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 35 | class CommandExecutionContext; |
John Zulauf | ecf4ac5 | 2022-06-06 10:08:42 -0600 | [diff] [blame] | 36 | class QueueBatchContext; |
John Zulauf | a8700a5 | 2022-08-18 16:22:08 -0600 | [diff] [blame] | 37 | struct QueueSubmitCmdState; |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 38 | class RenderPassAccessContext; |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 39 | class ResourceAccessState; |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 40 | struct ResourceFirstAccess; |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 41 | class SyncEventsContext; |
| 42 | struct SyncEventState; |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 43 | class SyncValidator; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 44 | |
John Zulauf | d0ec59f | 2021-03-13 14:25:08 -0700 | [diff] [blame] | 45 | using ImageRangeEncoder = subresource_adapter::ImageRangeEncoder; |
| 46 | using ImageRangeGen = subresource_adapter::ImageRangeGenerator; |
| 47 | |
John Zulauf | ecf4ac5 | 2022-06-06 10:08:42 -0600 | [diff] [blame] | 48 | using QueueId = uint32_t; |
| 49 | |
John Zulauf | 2f952d2 | 2020-02-10 11:34:51 -0700 | [diff] [blame] | 50 | enum SyncHazard { |
| 51 | NONE = 0, |
| 52 | READ_AFTER_WRITE, |
| 53 | WRITE_AFTER_READ, |
| 54 | WRITE_AFTER_WRITE, |
| 55 | READ_RACING_WRITE, |
| 56 | WRITE_RACING_WRITE, |
| 57 | WRITE_RACING_READ, |
| 58 | }; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 59 | |
John Zulauf | 8e3c3e9 | 2021-01-06 11:19:36 -0700 | [diff] [blame] | 60 | enum class SyncOrdering : uint8_t { |
| 61 | kNonAttachment = 0, |
| 62 | kColorAttachment = 1, |
| 63 | kDepthStencilAttachment = 2, |
| 64 | kRaster = 3, |
| 65 | kNumOrderings = 4, |
| 66 | }; |
| 67 | |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 68 | // Useful Utilites for manipulating StageAccess parameters, suitable as base class to save typing |
| 69 | struct SyncStageAccess { |
Jeremy Gebben | d0de1f8 | 2020-11-09 08:21:07 -0700 | [diff] [blame] | 70 | static inline SyncStageAccessFlags FlagBit(SyncStageAccessIndex stage_access) { |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 71 | return syncStageAccessInfoByStageAccessIndex[stage_access].stage_access_bit; |
| 72 | } |
John Zulauf | 1507ee4 | 2020-05-18 11:33:09 -0600 | [diff] [blame] | 73 | static inline SyncStageAccessFlags Flags(SyncStageAccessIndex stage_access) { |
| 74 | return static_cast<SyncStageAccessFlags>(FlagBit(stage_access)); |
| 75 | } |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 76 | |
Jeremy Gebben | d0de1f8 | 2020-11-09 08:21:07 -0700 | [diff] [blame] | 77 | static bool IsRead(const SyncStageAccessFlags &stage_access_bit) { return (stage_access_bit & syncStageAccessReadMask).any(); } |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 78 | static bool IsRead(SyncStageAccessIndex stage_access_index) { return IsRead(FlagBit(stage_access_index)); } |
| 79 | |
Jeremy Gebben | d0de1f8 | 2020-11-09 08:21:07 -0700 | [diff] [blame] | 80 | static bool IsWrite(const SyncStageAccessFlags &stage_access_bit) { |
| 81 | return (stage_access_bit & syncStageAccessWriteMask).any(); |
| 82 | } |
| 83 | static bool HasWrite(const SyncStageAccessFlags &stage_access_mask) { |
| 84 | return (stage_access_mask & syncStageAccessWriteMask).any(); |
| 85 | } |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 86 | static bool IsWrite(SyncStageAccessIndex stage_access_index) { return IsWrite(FlagBit(stage_access_index)); } |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 87 | static VkPipelineStageFlags2KHR PipelineStageBit(SyncStageAccessIndex stage_access_index) { |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 88 | return syncStageAccessInfoByStageAccessIndex[stage_access_index].stage_mask; |
| 89 | } |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 90 | static SyncStageAccessFlags AccessScopeByStage(VkPipelineStageFlags2KHR stages); |
| 91 | static SyncStageAccessFlags AccessScopeByAccess(VkAccessFlags2KHR access); |
| 92 | static SyncStageAccessFlags AccessScope(VkPipelineStageFlags2KHR stages, VkAccessFlags2KHR access); |
| 93 | static SyncStageAccessFlags AccessScope(const SyncStageAccessFlags &stage_scope, VkAccessFlags2KHR accesses) { |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 94 | return stage_scope & AccessScopeByAccess(accesses); |
| 95 | } |
| 96 | }; |
| 97 | |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 98 | struct ResourceUsageRecord { |
John Zulauf | 41a9c7c | 2021-12-07 15:59:53 -0700 | [diff] [blame] | 99 | enum class SubcommandType { kNone, kSubpassTransition, kLoadOp, kStoreOp, kResolveOp, kIndex }; |
| 100 | |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 101 | using TagIndex = size_t; |
John Zulauf | faea0ee | 2021-01-14 14:01:32 -0700 | [diff] [blame] | 102 | using Count = uint32_t; |
John Zulauf | f4aecca | 2021-01-05 16:21:58 -0700 | [diff] [blame] | 103 | constexpr static TagIndex kMaxIndex = std::numeric_limits<TagIndex>::max(); |
John Zulauf | 3c2a0b3 | 2021-07-14 11:14:52 -0600 | [diff] [blame] | 104 | constexpr static Count kMaxCount = std::numeric_limits<Count>::max(); |
John Zulauf | faea0ee | 2021-01-14 14:01:32 -0700 | [diff] [blame] | 105 | CMD_TYPE command = CMD_NONE; |
| 106 | Count seq_num = 0U; |
John Zulauf | 41a9c7c | 2021-12-07 15:59:53 -0700 | [diff] [blame] | 107 | SubcommandType sub_command_type = SubcommandType::kNone; |
John Zulauf | faea0ee | 2021-01-14 14:01:32 -0700 | [diff] [blame] | 108 | Count sub_command = 0U; |
John Zulauf | 3c2a0b3 | 2021-07-14 11:14:52 -0600 | [diff] [blame] | 109 | |
| 110 | // This is somewhat repetitive, but it prevents the need for Exec/Submit time touchup, after which usage records can be |
| 111 | // from different command buffers and resets. |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 112 | const CMD_BUFFER_STATE *cb_state = nullptr; // plain pointer as a shared pointer is held by the context storing this record |
John Zulauf | 3c2a0b3 | 2021-07-14 11:14:52 -0600 | [diff] [blame] | 113 | Count reset_count; |
Jeremy Gebben | 4bb7350 | 2020-12-14 11:17:50 -0700 | [diff] [blame] | 114 | |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 115 | ResourceUsageRecord() = default; |
John Zulauf | 41a9c7c | 2021-12-07 15:59:53 -0700 | [diff] [blame] | 116 | ResourceUsageRecord(CMD_TYPE command_, Count seq_num_, SubcommandType sub_type_, Count sub_command_, |
| 117 | const CMD_BUFFER_STATE *cb_state_, Count reset_count_) |
| 118 | : command(command_), |
| 119 | seq_num(seq_num_), |
| 120 | sub_command_type(sub_type_), |
| 121 | sub_command(sub_command_), |
| 122 | cb_state(cb_state_), |
| 123 | reset_count(reset_count_) {} |
John Zulauf | 5f13a79 | 2020-03-10 07:31:21 -0600 | [diff] [blame] | 124 | }; |
| 125 | |
John Zulauf | 3c2a0b3 | 2021-07-14 11:14:52 -0600 | [diff] [blame] | 126 | // The resource tag index is relative to the command buffer or queue in which it's found |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 127 | using ResourceUsageTag = ResourceUsageRecord::TagIndex; |
John Zulauf | ae84200 | 2021-04-15 18:20:55 -0600 | [diff] [blame] | 128 | using ResourceUsageRange = sparse_container::range<ResourceUsageTag>; |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 129 | |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 130 | struct HazardResult { |
John Zulauf | 59e2507 | 2020-07-17 10:55:21 -0600 | [diff] [blame] | 131 | std::unique_ptr<const ResourceAccessState> access_state; |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 132 | std::unique_ptr<const ResourceFirstAccess> recorded_access; |
John Zulauf | 59e2507 | 2020-07-17 10:55:21 -0600 | [diff] [blame] | 133 | SyncStageAccessIndex usage_index = std::numeric_limits<SyncStageAccessIndex>::max(); |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 134 | SyncHazard hazard = NONE; |
John Zulauf | 37ceaed | 2020-07-03 16:18:15 -0600 | [diff] [blame] | 135 | SyncStageAccessFlags prior_access = 0U; // TODO -- change to a NONE enum in ...Bits |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 136 | ResourceUsageTag tag = ResourceUsageTag(); |
John Zulauf | 59e2507 | 2020-07-17 10:55:21 -0600 | [diff] [blame] | 137 | void Set(const ResourceAccessState *access_state_, SyncStageAccessIndex usage_index_, SyncHazard hazard_, |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 138 | const SyncStageAccessFlags &prior_, ResourceUsageTag tag_); |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 139 | void AddRecordedAccess(const ResourceFirstAccess &first_access); |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 140 | bool IsHazard() const { return NONE != hazard; } |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 141 | }; |
| 142 | |
Jeremy Gebben | 9893daf | 2021-01-04 10:40:50 -0700 | [diff] [blame] | 143 | struct SyncExecScope { |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 144 | VkPipelineStageFlags2KHR mask_param; // the xxxStageMask parameter passed by the caller |
| 145 | VkPipelineStageFlags2KHR |
| 146 | expanded_mask; // all stage bits covered by any 'catch all bits' in the parameter (eg. ALL_GRAPHICS_BIT). |
| 147 | VkPipelineStageFlags2KHR exec_scope; // all earlier or later stages that would be affected by a barrier using this scope. |
Jeremy Gebben | 9893daf | 2021-01-04 10:40:50 -0700 | [diff] [blame] | 148 | SyncStageAccessFlags valid_accesses; // all valid accesses that can be used with this scope. |
| 149 | |
| 150 | SyncExecScope() : mask_param(0), expanded_mask(0), exec_scope(0), valid_accesses(0) {} |
| 151 | |
John Zulauf | 06f6f1e | 2022-04-19 15:28:11 -0600 | [diff] [blame] | 152 | static SyncExecScope MakeSrc(VkQueueFlags queue_flags, VkPipelineStageFlags2KHR src_stage_mask, |
| 153 | const VkPipelineStageFlags2KHR disabled_feature_mask = 0); |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 154 | static SyncExecScope MakeDst(VkQueueFlags queue_flags, VkPipelineStageFlags2KHR src_stage_mask); |
Jeremy Gebben | 9893daf | 2021-01-04 10:40:50 -0700 | [diff] [blame] | 155 | }; |
| 156 | |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 157 | struct SyncBarrier { |
John Zulauf | ecf4ac5 | 2022-06-06 10:08:42 -0600 | [diff] [blame] | 158 | struct AllAccess {}; |
John Zulauf | c523bf6 | 2021-02-16 08:20:34 -0700 | [diff] [blame] | 159 | SyncExecScope src_exec_scope; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 160 | SyncStageAccessFlags src_access_scope; |
John Zulauf | c523bf6 | 2021-02-16 08:20:34 -0700 | [diff] [blame] | 161 | SyncExecScope dst_exec_scope; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 162 | SyncStageAccessFlags dst_access_scope; |
| 163 | SyncBarrier() = default; |
Jeremy Gebben | 9893daf | 2021-01-04 10:40:50 -0700 | [diff] [blame] | 164 | SyncBarrier(const SyncBarrier &other) = default; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 165 | SyncBarrier &operator=(const SyncBarrier &) = default; |
Jeremy Gebben | 9893daf | 2021-01-04 10:40:50 -0700 | [diff] [blame] | 166 | |
| 167 | SyncBarrier(const SyncExecScope &src, const SyncExecScope &dst); |
John Zulauf | ecf4ac5 | 2022-06-06 10:08:42 -0600 | [diff] [blame] | 168 | SyncBarrier(const SyncExecScope &src, const SyncExecScope &dst, const AllAccess &); |
John Zulauf | 06f6f1e | 2022-04-19 15:28:11 -0600 | [diff] [blame] | 169 | SyncBarrier(const SyncExecScope &src_exec, const SyncStageAccessFlags &src_access, const SyncExecScope &dst_exec, |
| 170 | const SyncStageAccessFlags &dst_access) |
| 171 | : src_exec_scope(src_exec), src_access_scope(src_access), dst_exec_scope(dst_exec), dst_access_scope(dst_access) {} |
Jeremy Gebben | 9893daf | 2021-01-04 10:40:50 -0700 | [diff] [blame] | 172 | |
| 173 | template <typename Barrier> |
| 174 | SyncBarrier(const Barrier &barrier, const SyncExecScope &src, const SyncExecScope &dst); |
| 175 | |
| 176 | SyncBarrier(VkQueueFlags queue_flags, const VkSubpassDependency2 &barrier); |
Jeremy Gebben | df3fcc3 | 2021-02-15 08:53:17 -0700 | [diff] [blame] | 177 | // template constructor for sync2 barriers |
| 178 | template <typename Barrier> |
| 179 | SyncBarrier(VkQueueFlags queue_flags, const Barrier &barrier); |
Jeremy Gebben | 9893daf | 2021-01-04 10:40:50 -0700 | [diff] [blame] | 180 | |
John Zulauf | a0a9829 | 2020-09-18 09:30:10 -0600 | [diff] [blame] | 181 | void Merge(const SyncBarrier &other) { |
John Zulauf | c523bf6 | 2021-02-16 08:20:34 -0700 | [diff] [blame] | 182 | // Note that after merge, only the exec_scope and access_scope fields are fully valid |
| 183 | // TODO: Do we need to update any of the other fields? Merging has limited application. |
| 184 | src_exec_scope.exec_scope |= other.src_exec_scope.exec_scope; |
John Zulauf | a0a9829 | 2020-09-18 09:30:10 -0600 | [diff] [blame] | 185 | src_access_scope |= other.src_access_scope; |
John Zulauf | c523bf6 | 2021-02-16 08:20:34 -0700 | [diff] [blame] | 186 | dst_exec_scope.exec_scope |= other.dst_exec_scope.exec_scope; |
John Zulauf | a0a9829 | 2020-09-18 09:30:10 -0600 | [diff] [blame] | 187 | dst_access_scope |= other.dst_access_scope; |
| 188 | } |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 189 | }; |
John Zulauf | 6913342 | 2020-05-20 14:55:53 -0600 | [diff] [blame] | 190 | |
John Zulauf | 43cc746 | 2020-12-03 12:33:12 -0700 | [diff] [blame] | 191 | enum class AccessAddressType : uint32_t { kLinear = 0, kIdealized = 1, kMaxType = 1, kTypeCount = kMaxType + 1 }; |
| 192 | |
John Zulauf | ecf4ac5 | 2022-06-06 10:08:42 -0600 | [diff] [blame] | 193 | struct SemaphoreScope : SyncExecScope { |
| 194 | SemaphoreScope(QueueId qid, const SyncExecScope &exec_scope) : SyncExecScope(exec_scope), queue(qid) {} |
| 195 | SemaphoreScope() = default; |
| 196 | QueueId queue; |
| 197 | }; |
| 198 | |
| 199 | class SignaledSemaphores { |
| 200 | public: |
| 201 | // Is the record of a signaled semaphore, deleted when unsignaled |
| 202 | struct Signal { |
| 203 | Signal() = delete; |
| 204 | Signal(const Signal &other) = default; |
| 205 | Signal(Signal &&other) = default; |
| 206 | Signal &operator=(const Signal &other) = default; |
| 207 | Signal &operator=(Signal &&other) = default; |
| 208 | Signal(const std::shared_ptr<const SEMAPHORE_STATE> &sem_state_, const std::shared_ptr<QueueBatchContext> &batch_, |
| 209 | const SyncExecScope &exec_scope_); |
| 210 | |
| 211 | std::shared_ptr<const SEMAPHORE_STATE> sem_state; |
| 212 | std::shared_ptr<QueueBatchContext> batch; |
| 213 | // Use the SyncExecScope::valid_accesses for first access scope |
| 214 | SemaphoreScope first_scope; |
| 215 | // TODO add timeline semaphore support. |
| 216 | }; |
| 217 | using SignalMap = layer_data::unordered_map<VkSemaphore, std::shared_ptr<Signal>>; |
| 218 | using iterator = SignalMap::iterator; |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 219 | using const_iterator = SignalMap::const_iterator; |
| 220 | using mapped_type = SignalMap::mapped_type; |
John Zulauf | ecf4ac5 | 2022-06-06 10:08:42 -0600 | [diff] [blame] | 221 | iterator begin() { return signaled_.begin(); } |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 222 | const_iterator begin() const { return signaled_.begin(); } |
John Zulauf | ecf4ac5 | 2022-06-06 10:08:42 -0600 | [diff] [blame] | 223 | iterator end() { return signaled_.end(); } |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 224 | const_iterator end() const { return signaled_.end(); } |
John Zulauf | ecf4ac5 | 2022-06-06 10:08:42 -0600 | [diff] [blame] | 225 | |
| 226 | bool SignalSemaphore(const std::shared_ptr<const SEMAPHORE_STATE> &sem_state, const std::shared_ptr<QueueBatchContext> &batch, |
| 227 | const VkSemaphoreSubmitInfo &signal_info); |
| 228 | std::shared_ptr<const Signal> Unsignal(VkSemaphore); |
| 229 | void Import(VkSemaphore sem, std::shared_ptr<Signal> &&move_from); |
| 230 | void Reset(); |
| 231 | SignaledSemaphores() : prev_(nullptr) {} |
| 232 | SignaledSemaphores(const SignaledSemaphores &prev) : prev_(&prev) {} |
| 233 | |
| 234 | private: |
| 235 | std::shared_ptr<const Signal> GetPrev(VkSemaphore sem) const; |
| 236 | layer_data::unordered_map<VkSemaphore, std::shared_ptr<Signal>> signaled_; |
| 237 | const SignaledSemaphores *prev_; // Allowing this type to act as a writable overlay |
| 238 | }; |
| 239 | |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 240 | struct ResourceFirstAccess { |
| 241 | ResourceUsageTag tag; |
| 242 | SyncStageAccessIndex usage_index; |
| 243 | SyncOrdering ordering_rule; |
| 244 | ResourceFirstAccess(ResourceUsageTag tag_, SyncStageAccessIndex usage_index_, SyncOrdering ordering_rule_) |
| 245 | : tag(tag_), usage_index(usage_index_), ordering_rule(ordering_rule_){}; |
| 246 | ResourceFirstAccess(const ResourceFirstAccess &other) = default; |
| 247 | ResourceFirstAccess(ResourceFirstAccess &&other) = default; |
| 248 | ResourceFirstAccess &operator=(const ResourceFirstAccess &rhs) = default; |
| 249 | ResourceFirstAccess &operator=(ResourceFirstAccess &&rhs) = default; |
| 250 | bool operator==(const ResourceFirstAccess &rhs) const { |
| 251 | return (tag == rhs.tag) && (usage_index == rhs.usage_index) && (ordering_rule == rhs.ordering_rule); |
| 252 | } |
| 253 | }; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 254 | |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 255 | using QueueId = uint32_t; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 256 | class ResourceAccessState : public SyncStageAccess { |
| 257 | protected: |
John Zulauf | 8e3c3e9 | 2021-01-06 11:19:36 -0700 | [diff] [blame] | 258 | struct OrderingBarrier { |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 259 | VkPipelineStageFlags2KHR exec_scope; |
John Zulauf | 8e3c3e9 | 2021-01-06 11:19:36 -0700 | [diff] [blame] | 260 | SyncStageAccessFlags access_scope; |
| 261 | OrderingBarrier() = default; |
Nathaniel Cesario | b38a667 | 2021-11-15 12:05:48 -0700 | [diff] [blame] | 262 | OrderingBarrier(const OrderingBarrier &) = default; |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 263 | OrderingBarrier(VkPipelineStageFlags2KHR es, SyncStageAccessFlags as) : exec_scope(es), access_scope(as) {} |
John Zulauf | 8e3c3e9 | 2021-01-06 11:19:36 -0700 | [diff] [blame] | 264 | OrderingBarrier &operator=(const OrderingBarrier &) = default; |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 265 | OrderingBarrier &operator|=(const OrderingBarrier &rhs) { |
| 266 | exec_scope |= rhs.exec_scope; |
| 267 | access_scope |= rhs.access_scope; |
| 268 | return *this; |
John Zulauf | faea0ee | 2021-01-14 14:01:32 -0700 | [diff] [blame] | 269 | } |
| 270 | }; |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 271 | using OrderingBarriers = std::array<OrderingBarrier, static_cast<size_t>(SyncOrdering::kNumOrderings)>; |
| 272 | using FirstAccesses = small_vector<ResourceFirstAccess, 3>; |
John Zulauf | faea0ee | 2021-01-14 14:01:32 -0700 | [diff] [blame] | 273 | |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 274 | // Mutliple read operations can be simlutaneously (and independently) synchronized, |
| 275 | // given the only the second execution scope creates a dependency chain, we have to track each, |
| 276 | // but only up to one per pipeline stage (as another read from the *same* stage become more recent, |
| 277 | // and applicable one for hazard detection |
| 278 | struct ReadState { |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 279 | VkPipelineStageFlags2KHR stage; // The stage of this read |
John Zulauf | 37ceaed | 2020-07-03 16:18:15 -0600 | [diff] [blame] | 280 | SyncStageAccessFlags access; // TODO: Change to FlagBits when we have a None bit enum |
John Zulauf | f51fbb6 | 2020-10-02 14:43:24 -0600 | [diff] [blame] | 281 | // TODO: Revisit whether this needs to support multiple reads per stage |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 282 | VkPipelineStageFlags2KHR barriers; // all applicable barriered stages |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 283 | VkPipelineStageFlags2KHR sync_stages; // reads known to have happened after this |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 284 | ResourceUsageTag tag; |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 285 | QueueId queue; |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 286 | VkPipelineStageFlags2KHR pending_dep_chain; // Should be zero except during barrier application |
| 287 | // Excluded from comparison |
John Zulauf | 89311b4 | 2020-09-29 16:28:47 -0600 | [diff] [blame] | 288 | ReadState() = default; |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 289 | ReadState(VkPipelineStageFlags2KHR stage_, SyncStageAccessFlags access_, VkPipelineStageFlags2KHR barriers_, |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 290 | ResourceUsageTag tag_); |
John Zulauf | e5da6e5 | 2020-03-18 15:32:18 -0600 | [diff] [blame] | 291 | bool operator==(const ReadState &rhs) const { |
John Zulauf | 37ceaed | 2020-07-03 16:18:15 -0600 | [diff] [blame] | 292 | bool same = (stage == rhs.stage) && (access == rhs.access) && (barriers == rhs.barriers) && (tag == rhs.tag); |
John Zulauf | e5da6e5 | 2020-03-18 15:32:18 -0600 | [diff] [blame] | 293 | return same; |
| 294 | } |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 295 | bool IsReadBarrierHazard(VkPipelineStageFlags2KHR src_exec_scope) const { |
John Zulauf | 4a6105a | 2020-11-17 15:11:05 -0700 | [diff] [blame] | 296 | // If the read stage is not in the src sync scope |
| 297 | // *AND* not execution chained with an existing sync barrier (that's the or) |
| 298 | // then the barrier access is unsafe (R/W after R) |
| 299 | return (src_exec_scope & (stage | barriers)) == 0; |
| 300 | } |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 301 | bool IsReadBarrierHazard(QueueId barrier_queue, VkPipelineStageFlags2KHR src_exec_scope) const { |
| 302 | // If the read stage is not in the src sync scope |
| 303 | // *AND* not execution chained with an existing sync barrier (that's the or) |
| 304 | // then the barrier access is unsafe (R/W after R) |
| 305 | VkPipelineStageFlags2 queue_ordered_stage = (queue == barrier_queue) ? stage : VK_PIPELINE_STAGE_2_NONE; |
| 306 | return (src_exec_scope & (queue_ordered_stage | barriers)) == 0; |
| 307 | } |
John Zulauf | 4a6105a | 2020-11-17 15:11:05 -0700 | [diff] [blame] | 308 | |
John Zulauf | e5da6e5 | 2020-03-18 15:32:18 -0600 | [diff] [blame] | 309 | bool operator!=(const ReadState &rhs) const { return !(*this == rhs); } |
John Zulauf | ee98402 | 2022-04-13 16:39:50 -0600 | [diff] [blame] | 310 | void Set(VkPipelineStageFlags2KHR stage_, const SyncStageAccessFlags &access_, VkPipelineStageFlags2KHR barriers_, |
| 311 | ResourceUsageTag tag_); |
John Zulauf | b757830 | 2022-05-19 13:50:18 -0600 | [diff] [blame] | 312 | bool ReadInScopeOrChain(VkPipelineStageFlags2 exec_scope) const { return (exec_scope & (stage | barriers)) != 0; } |
John Zulauf | 0011952 | 2022-05-23 19:07:42 -0600 | [diff] [blame] | 313 | bool ReadInQueueScopeOrChain(QueueId queue, VkPipelineStageFlags2 exec_scope) const; |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 314 | bool ReadInEventScope(VkPipelineStageFlags2 exec_scope, QueueId scope_queue, ResourceUsageTag scope_tag) const { |
John Zulauf | b757830 | 2022-05-19 13:50:18 -0600 | [diff] [blame] | 315 | // If this read is the same one we included in the set event and in scope, then apply the execution barrier... |
| 316 | // NOTE: That's not really correct... this read stage might *not* have been included in the setevent, and the barriers |
| 317 | // representing the chain might have changed since then (that would be an odd usage), so as a first approximation |
| 318 | // we'll assume the barriers *haven't* been changed since (if the tag hasn't), and while this could be a false |
| 319 | // positive in the case of Set; SomeBarrier; Wait; we'll live with it until we can add more state to the first scope |
| 320 | // capture (the specific write and read stages that *were* in scope at the moment of SetEvents. |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 321 | return (tag < scope_tag) && ReadInQueueScopeOrChain(scope_queue, exec_scope); |
John Zulauf | b757830 | 2022-05-19 13:50:18 -0600 | [diff] [blame] | 322 | } |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 323 | }; |
| 324 | |
| 325 | public: |
| 326 | HazardResult DetectHazard(SyncStageAccessIndex usage_index) const; |
John Zulauf | ec943ec | 2022-06-29 07:52:56 -0600 | [diff] [blame] | 327 | HazardResult DetectHazard(SyncStageAccessIndex usage_index, SyncOrdering ordering_rule, QueueId queue_id) const; |
| 328 | HazardResult DetectHazard(SyncStageAccessIndex usage_index, const OrderingBarrier &ordering, QueueId queue_id) const; |
| 329 | HazardResult DetectHazard(const ResourceAccessState &recorded_use, QueueId queue_id, const ResourceUsageRange &tag_range) const; |
John Zulauf | ae84200 | 2021-04-15 18:20:55 -0600 | [diff] [blame] | 330 | |
| 331 | HazardResult DetectAsyncHazard(SyncStageAccessIndex usage_index, ResourceUsageTag start_tag) const; |
| 332 | HazardResult DetectAsyncHazard(const ResourceAccessState &recorded_use, const ResourceUsageRange &tag_range, |
| 333 | ResourceUsageTag start_tag) const; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 334 | |
John Zulauf | ec943ec | 2022-06-29 07:52:56 -0600 | [diff] [blame] | 335 | HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, QueueId queue_id, VkPipelineStageFlags2KHR source_exec_scope, |
Jeremy Gebben | d0de1f8 | 2020-11-09 08:21:07 -0700 | [diff] [blame] | 336 | const SyncStageAccessFlags &source_access_scope) const; |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 337 | HazardResult DetectBarrierHazard(SyncStageAccessIndex usage_index, const ResourceAccessState &scope_state, |
| 338 | VkPipelineStageFlags2KHR source_exec_scope, const SyncStageAccessFlags &source_access_scope, |
| 339 | QueueId event_queue, ResourceUsageTag event_tag) const; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 340 | |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 341 | void Update(SyncStageAccessIndex usage_index, SyncOrdering ordering_rule, ResourceUsageTag tag); |
| 342 | void SetWrite(const SyncStageAccessFlags &usage_bit, ResourceUsageTag tag); |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 343 | void ClearWrite(); |
| 344 | void ClearRead(); |
John Zulauf | 5f13a79 | 2020-03-10 07:31:21 -0600 | [diff] [blame] | 345 | void Resolve(const ResourceAccessState &other); |
John Zulauf | b02c1eb | 2020-10-06 16:33:36 -0600 | [diff] [blame] | 346 | void ApplyBarriers(const std::vector<SyncBarrier> &barriers, bool layout_transition); |
John Zulauf | bb89045 | 2021-12-14 11:30:18 -0700 | [diff] [blame] | 347 | void ApplyBarriersImmediate(const std::vector<SyncBarrier> &barriers); |
John Zulauf | b757830 | 2022-05-19 13:50:18 -0600 | [diff] [blame] | 348 | template <typename ScopeOps> |
| 349 | void ApplyBarrier(ScopeOps &&scope, const SyncBarrier &barrier, bool layout_transition); |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 350 | void ApplyPendingBarriers(ResourceUsageTag tag); |
John Zulauf | ecf4ac5 | 2022-06-06 10:08:42 -0600 | [diff] [blame] | 351 | void ApplySemaphore(const SemaphoreScope &signal, const SemaphoreScope wait); |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 352 | |
| 353 | struct QueueTagPredicate { |
| 354 | QueueId queue; |
| 355 | ResourceUsageTag tag; |
John Zulauf | 3da08bb | 2022-08-01 17:56:56 -0600 | [diff] [blame] | 356 | bool operator()(QueueId usage_queue, ResourceUsageTag usage_tag) const; |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 357 | }; |
| 358 | |
| 359 | struct QueuePredicate { |
| 360 | QueueId queue; |
| 361 | QueuePredicate(QueueId queue_) : queue(queue_) {} |
John Zulauf | 3da08bb | 2022-08-01 17:56:56 -0600 | [diff] [blame] | 362 | bool operator()(QueueId usage_queue, ResourceUsageTag usage_tag) const; |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 363 | }; |
| 364 | struct TagPredicate { |
| 365 | ResourceUsageTag tag; |
John Zulauf | 3da08bb | 2022-08-01 17:56:56 -0600 | [diff] [blame] | 366 | bool operator()(QueueId usage_queue, ResourceUsageTag usage_tag) const; |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 367 | }; |
| 368 | |
| 369 | template <typename Pred> |
| 370 | bool ApplyQueueTagWait(Pred &&); |
John Zulauf | ae84200 | 2021-04-15 18:20:55 -0600 | [diff] [blame] | 371 | bool FirstAccessInTagRange(const ResourceUsageRange &tag_range) const; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 372 | |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 373 | void OffsetTag(ResourceUsageTag offset); |
| 374 | ResourceAccessState(); |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 375 | |
John Zulauf | b02c1eb | 2020-10-06 16:33:36 -0600 | [diff] [blame] | 376 | bool HasPendingState() const { |
Jeremy Gebben | d0de1f8 | 2020-11-09 08:21:07 -0700 | [diff] [blame] | 377 | return (0 != pending_layout_transition) || pending_write_barriers.any() || (0 != pending_write_dep_chain); |
John Zulauf | b02c1eb | 2020-10-06 16:33:36 -0600 | [diff] [blame] | 378 | } |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 379 | bool HasWriteOp() const { return last_write != 0; } |
John Zulauf | e5da6e5 | 2020-03-18 15:32:18 -0600 | [diff] [blame] | 380 | bool operator==(const ResourceAccessState &rhs) const { |
| 381 | bool same = (write_barriers == rhs.write_barriers) && (write_dependency_chain == rhs.write_dependency_chain) && |
John Zulauf | ab7756b | 2020-12-29 16:10:16 -0700 | [diff] [blame] | 382 | (last_reads == rhs.last_reads) && (last_read_stages == rhs.last_read_stages) && (write_tag == rhs.write_tag) && |
| 383 | (input_attachment_read == rhs.input_attachment_read) && |
John Zulauf | faea0ee | 2021-01-14 14:01:32 -0700 | [diff] [blame] | 384 | (read_execution_barriers == rhs.read_execution_barriers) && (first_accesses_ == rhs.first_accesses_); |
John Zulauf | e5da6e5 | 2020-03-18 15:32:18 -0600 | [diff] [blame] | 385 | return same; |
| 386 | } |
| 387 | bool operator!=(const ResourceAccessState &rhs) const { return !(*this == rhs); } |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 388 | VkPipelineStageFlags2KHR GetReadBarriers(const SyncStageAccessFlags &usage) const; |
John Zulauf | 59e2507 | 2020-07-17 10:55:21 -0600 | [diff] [blame] | 389 | SyncStageAccessFlags GetWriteBarriers() const { return write_barriers; } |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 390 | bool InSourceScopeOrChain(VkPipelineStageFlags2KHR src_exec_scope, SyncStageAccessFlags src_access_scope) const { |
John Zulauf | 4a6105a | 2020-11-17 15:11:05 -0700 | [diff] [blame] | 391 | return ReadInSourceScopeOrChain(src_exec_scope) || WriteInSourceScopeOrChain(src_exec_scope, src_access_scope); |
| 392 | } |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 393 | void SetQueueId(QueueId id); |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 394 | |
John Zulauf | 0011952 | 2022-05-23 19:07:42 -0600 | [diff] [blame] | 395 | bool WriteInChain(VkPipelineStageFlags2KHR src_exec_scope) const; |
| 396 | bool WriteInScope(const SyncStageAccessFlags &src_access_scope) const; |
John Zulauf | ec943ec | 2022-06-29 07:52:56 -0600 | [diff] [blame] | 397 | bool WriteBarrierInScope(const SyncStageAccessFlags &src_access_scope) const; |
| 398 | bool WriteInChainedScope(VkPipelineStageFlags2KHR src_exec_scope, const SyncStageAccessFlags &src_access_scope) const; |
John Zulauf | b757830 | 2022-05-19 13:50:18 -0600 | [diff] [blame] | 399 | bool WriteInSourceScopeOrChain(VkPipelineStageFlags2KHR src_exec_scope, SyncStageAccessFlags src_access_scope) const; |
John Zulauf | 0011952 | 2022-05-23 19:07:42 -0600 | [diff] [blame] | 400 | bool WriteInQueueSourceScopeOrChain(QueueId queue, VkPipelineStageFlags2KHR src_exec_scope, |
| 401 | SyncStageAccessFlags src_access_scope) const; |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 402 | bool WriteInEventScope(VkPipelineStageFlags2KHR src_exec_scope, const SyncStageAccessFlags &src_access_scope, |
| 403 | QueueId scope_queue, ResourceUsageTag scope_tag) const; |
John Zulauf | b757830 | 2022-05-19 13:50:18 -0600 | [diff] [blame] | 404 | |
| 405 | struct UntaggedScopeOps { |
| 406 | bool WriteInScope(const SyncBarrier &barrier, const ResourceAccessState &access) const { |
| 407 | return access.WriteInSourceScopeOrChain(barrier.src_exec_scope.exec_scope, barrier.src_access_scope); |
| 408 | } |
| 409 | bool ReadInScope(const SyncBarrier &barrier, const ReadState &read_state) const { |
| 410 | return read_state.ReadInScopeOrChain(barrier.src_exec_scope.exec_scope); |
| 411 | } |
| 412 | }; |
John Zulauf | 0011952 | 2022-05-23 19:07:42 -0600 | [diff] [blame] | 413 | |
| 414 | struct QueueScopeOps { |
| 415 | bool WriteInScope(const SyncBarrier &barrier, const ResourceAccessState &access) const { |
| 416 | return access.WriteInQueueSourceScopeOrChain(queue, barrier.src_exec_scope.exec_scope, barrier.src_access_scope); |
| 417 | } |
| 418 | bool ReadInScope(const SyncBarrier &barrier, const ReadState &read_state) const { |
| 419 | return read_state.ReadInQueueScopeOrChain(queue, barrier.src_exec_scope.exec_scope); |
| 420 | } |
| 421 | QueueScopeOps(QueueId scope_queue) : queue(scope_queue) {} |
| 422 | QueueId queue; |
| 423 | }; |
| 424 | |
John Zulauf | b757830 | 2022-05-19 13:50:18 -0600 | [diff] [blame] | 425 | struct EventScopeOps { |
| 426 | bool WriteInScope(const SyncBarrier &barrier, const ResourceAccessState &access) const { |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 427 | return access.WriteInEventScope(barrier.src_exec_scope.exec_scope, barrier.src_access_scope, scope_queue, scope_tag); |
John Zulauf | b757830 | 2022-05-19 13:50:18 -0600 | [diff] [blame] | 428 | } |
| 429 | bool ReadInScope(const SyncBarrier &barrier, const ReadState &read_state) const { |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 430 | return read_state.ReadInEventScope(barrier.src_exec_scope.exec_scope, scope_queue, scope_tag); |
John Zulauf | b757830 | 2022-05-19 13:50:18 -0600 | [diff] [blame] | 431 | } |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 432 | EventScopeOps(QueueId qid, ResourceUsageTag event_tag) : scope_queue(qid), scope_tag(event_tag) {} |
| 433 | QueueId scope_queue; |
John Zulauf | b757830 | 2022-05-19 13:50:18 -0600 | [diff] [blame] | 434 | ResourceUsageTag scope_tag; |
| 435 | }; |
| 436 | |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 437 | private: |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 438 | static constexpr VkPipelineStageFlags2KHR kInvalidAttachmentStage = ~VkPipelineStageFlags2KHR(0); |
Jeremy Gebben | d0de1f8 | 2020-11-09 08:21:07 -0700 | [diff] [blame] | 439 | bool IsWriteHazard(SyncStageAccessFlags usage) const { return (usage & ~write_barriers).any(); } |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 440 | bool IsRAWHazard(VkPipelineStageFlags2KHR usage_stage, const SyncStageAccessFlags &usage) const; |
John Zulauf | ec943ec | 2022-06-29 07:52:56 -0600 | [diff] [blame] | 441 | |
| 442 | // This form is only valid when queue submit order is known... |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 443 | bool IsWriteBarrierHazard(VkPipelineStageFlags2KHR src_exec_scope, const SyncStageAccessFlags &src_access_scope) const { |
John Zulauf | 6b58364 | 2021-10-05 17:25:31 -0600 | [diff] [blame] | 444 | // If the previous write is *not* a layout transition |
| 445 | // *AND* is *not* in the 1st access scope |
John Zulauf | 4a6105a | 2020-11-17 15:11:05 -0700 | [diff] [blame] | 446 | // *AND* the current barrier is not in the dependency chain |
| 447 | // *AND* the there is no prior memory barrier for the previous write in the dependency chain |
| 448 | // then the barrier access is unsafe (R/W after W) |
John Zulauf | ec943ec | 2022-06-29 07:52:56 -0600 | [diff] [blame] | 449 | return (last_write != SYNC_IMAGE_LAYOUT_TRANSITION_BIT) && !WriteInScope(src_access_scope) && |
| 450 | !WriteInChainedScope(src_exec_scope, src_access_scope); |
| 451 | } |
| 452 | |
| 453 | bool IsWriteBarrierHazard(QueueId queue_id, VkPipelineStageFlags2KHR src_exec_scope, |
| 454 | const SyncStageAccessFlags &src_access_scope) const { |
| 455 | if (queue_id == write_queue) { |
| 456 | return IsWriteBarrierHazard(src_exec_scope, src_access_scope); |
| 457 | } |
John Zulauf | a4bc699 | 2022-08-29 14:37:20 -0600 | [diff] [blame^] | 458 | // Accesses with queue submit or... |
| 459 | // If the last access is a layout transition, then exec_scope is all that is needed, otherwise access scope is needed |
| 460 | if (last_write == SYNC_IMAGE_LAYOUT_TRANSITION_BIT) { |
| 461 | return !WriteInChain(src_exec_scope); |
| 462 | } |
John Zulauf | ec943ec | 2022-06-29 07:52:56 -0600 | [diff] [blame] | 463 | return !WriteInChainedScope(src_exec_scope, src_access_scope); |
John Zulauf | 4a6105a | 2020-11-17 15:11:05 -0700 | [diff] [blame] | 464 | } |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 465 | bool ReadInSourceScopeOrChain(VkPipelineStageFlags2KHR src_exec_scope) const { |
John Zulauf | 4a6105a | 2020-11-17 15:11:05 -0700 | [diff] [blame] | 466 | return (0 != (src_exec_scope & (last_read_stages | read_execution_barriers))); |
| 467 | } |
John Zulauf | d14743a | 2020-07-03 09:42:39 -0600 | [diff] [blame] | 468 | |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 469 | static bool IsReadHazard(VkPipelineStageFlags2KHR stage_mask, const VkPipelineStageFlags2KHR barriers) { |
John Zulauf | d14743a | 2020-07-03 09:42:39 -0600 | [diff] [blame] | 470 | return stage_mask != (stage_mask & barriers); |
| 471 | } |
| 472 | |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 473 | bool IsReadHazard(VkPipelineStageFlags2KHR stage_mask, const ReadState &read_access) const { |
John Zulauf | d14743a | 2020-07-03 09:42:39 -0600 | [diff] [blame] | 474 | return IsReadHazard(stage_mask, read_access.barriers); |
John Zulauf | 0cb5be2 | 2020-01-23 12:18:22 -0700 | [diff] [blame] | 475 | } |
John Zulauf | ec943ec | 2022-06-29 07:52:56 -0600 | [diff] [blame] | 476 | VkPipelineStageFlags2 GetOrderedStages(QueueId queue_id, const OrderingBarrier &ordering) const; |
John Zulauf | 8e3c3e9 | 2021-01-06 11:19:36 -0700 | [diff] [blame] | 477 | |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 478 | void UpdateFirst(ResourceUsageTag tag, SyncStageAccessIndex usage_index, SyncOrdering ordering_rule); |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 479 | void TouchupFirstForLayoutTransition(ResourceUsageTag tag, const OrderingBarrier &layout_ordering); |
John Zulauf | faea0ee | 2021-01-14 14:01:32 -0700 | [diff] [blame] | 480 | |
John Zulauf | 8e3c3e9 | 2021-01-06 11:19:36 -0700 | [diff] [blame] | 481 | static const OrderingBarrier &GetOrderingRules(SyncOrdering ordering_enum) { |
| 482 | return kOrderingRules[static_cast<size_t>(ordering_enum)]; |
| 483 | } |
John Zulauf | d14743a | 2020-07-03 09:42:39 -0600 | [diff] [blame] | 484 | |
Jeremy Gebben | d0de1f8 | 2020-11-09 08:21:07 -0700 | [diff] [blame] | 485 | // TODO: Add a NONE (zero) enum to SyncStageAccessFlags for input_attachment_read and last_write |
John Zulauf | d14743a | 2020-07-03 09:42:39 -0600 | [diff] [blame] | 486 | |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 487 | // With reads, each must be "safe" relative to it's prior write, so we need only |
| 488 | // save the most recent write operation (as anything *transitively* unsafe would arleady |
| 489 | // be included |
| 490 | SyncStageAccessFlags write_barriers; // union of applicable barrier masks since last write |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 491 | VkPipelineStageFlags2KHR write_dependency_chain; // intiially zero, but accumulating the dstStages of barriers if they chain. |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 492 | ResourceUsageTag write_tag; |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 493 | QueueId write_queue; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 494 | SyncStageAccessFlags last_write; // only the most recent write |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 495 | |
John Zulauf | f51fbb6 | 2020-10-02 14:43:24 -0600 | [diff] [blame] | 496 | // TODO Input Attachment cleanup for multiple reads in a given stage |
| 497 | // Tracks whether the fragment shader read is input attachment read |
| 498 | bool input_attachment_read; |
John Zulauf | d14743a | 2020-07-03 09:42:39 -0600 | [diff] [blame] | 499 | |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 500 | VkPipelineStageFlags2KHR last_read_stages; |
| 501 | VkPipelineStageFlags2KHR read_execution_barriers; |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 502 | using ReadStates = small_vector<ReadState, 3, uint32_t>; |
| 503 | ReadStates last_reads; |
John Zulauf | 89311b4 | 2020-09-29 16:28:47 -0600 | [diff] [blame] | 504 | |
| 505 | // Pending execution state to support independent parallel barriers |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 506 | VkPipelineStageFlags2KHR pending_write_dep_chain; |
John Zulauf | 89311b4 | 2020-09-29 16:28:47 -0600 | [diff] [blame] | 507 | bool pending_layout_transition; |
| 508 | SyncStageAccessFlags pending_write_barriers; |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 509 | OrderingBarrier pending_layout_ordering_; |
John Zulauf | faea0ee | 2021-01-14 14:01:32 -0700 | [diff] [blame] | 510 | FirstAccesses first_accesses_; |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 511 | VkPipelineStageFlags2KHR first_read_stages_; |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 512 | OrderingBarrier first_write_layout_ordering_; |
John Zulauf | 8e3c3e9 | 2021-01-06 11:19:36 -0700 | [diff] [blame] | 513 | |
| 514 | static OrderingBarriers kOrderingRules; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 515 | }; |
John Zulauf | 22aefed | 2021-03-11 18:14:35 -0700 | [diff] [blame] | 516 | using ResourceAccessStateFunction = std::function<void(ResourceAccessState *)>; |
| 517 | using ResourceAccessStateConstFunction = std::function<void(const ResourceAccessState &)>; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 518 | |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 519 | using ResourceAddress = VkDeviceSize; |
| 520 | using ResourceAccessRangeMap = sparse_container::range_map<ResourceAddress, ResourceAccessState>; |
John Zulauf | 5c5e88d | 2019-12-26 11:22:02 -0700 | [diff] [blame] | 521 | using ResourceAccessRange = typename ResourceAccessRangeMap::key_type; |
John Zulauf | 22aefed | 2021-03-11 18:14:35 -0700 | [diff] [blame] | 522 | using ResourceAccessRangeIndex = typename ResourceAccessRange::index_type; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 523 | using ResourceRangeMergeIterator = sparse_container::parallel_iterator<ResourceAccessRangeMap, const ResourceAccessRangeMap>; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 524 | |
John Zulauf | 3da08bb | 2022-08-01 17:56:56 -0600 | [diff] [blame] | 525 | struct FenceSyncState { |
| 526 | std::shared_ptr<const FENCE_STATE> fence; |
| 527 | ResourceUsageTag tag; |
| 528 | QueueId queue_id; |
| 529 | FenceSyncState(); |
| 530 | FenceSyncState(const FenceSyncState &other) = default; |
| 531 | FenceSyncState(FenceSyncState &&other) = default; |
| 532 | FenceSyncState &operator=(const FenceSyncState &other) = default; |
| 533 | FenceSyncState &operator=(FenceSyncState &&other) = default; |
| 534 | |
| 535 | FenceSyncState(const std::shared_ptr<const FENCE_STATE> &fence_, ResourceUsageTag tag_, QueueId queue_id_) |
| 536 | : fence(fence_), tag(tag_), queue_id(queue_id_) {} |
| 537 | }; |
| 538 | |
John Zulauf | d0ec59f | 2021-03-13 14:25:08 -0700 | [diff] [blame] | 539 | class AttachmentViewGen { |
| 540 | public: |
| 541 | enum Gen { kViewSubresource = 0, kRenderArea = 1, kDepthOnlyRenderArea = 2, kStencilOnlyRenderArea = 3, kGenSize = 4 }; |
| 542 | AttachmentViewGen(const IMAGE_VIEW_STATE *view_, const VkOffset3D &offset, const VkExtent3D &extent); |
| 543 | AttachmentViewGen(const AttachmentViewGen &other) = default; |
| 544 | AttachmentViewGen(AttachmentViewGen &&other) = default; |
| 545 | AccessAddressType GetAddressType() const; |
| 546 | const IMAGE_VIEW_STATE *GetViewState() const { return view_; } |
| 547 | const ImageRangeGen *GetRangeGen(Gen type) const; |
| 548 | bool IsValid() const { return gen_store_[Gen::kViewSubresource]; } |
| 549 | Gen GetDepthStencilRenderAreaGenType(bool depth_op, bool stencil_op) const; |
| 550 | |
| 551 | private: |
| 552 | using RangeGenStore = layer_data::optional<ImageRangeGen>; |
| 553 | const IMAGE_VIEW_STATE *view_ = nullptr; |
| 554 | VkImageAspectFlags view_mask_ = 0U; |
| 555 | std::array<RangeGenStore, Gen::kGenSize> gen_store_; |
| 556 | }; |
| 557 | |
| 558 | using AttachmentViewGenVector = std::vector<AttachmentViewGen>; |
| 559 | |
John Zulauf | e7f6a5e | 2021-01-16 14:31:18 -0700 | [diff] [blame] | 560 | using SyncMemoryBarrier = SyncBarrier; |
| 561 | struct SyncBufferMemoryBarrier { |
| 562 | using Buffer = std::shared_ptr<const BUFFER_STATE>; |
| 563 | Buffer buffer; |
| 564 | SyncBarrier barrier; |
| 565 | ResourceAccessRange range; |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 566 | bool IsLayoutTransition() const { return false; } |
| 567 | const ResourceAccessRange &Range() const { return range; }; |
| 568 | const BUFFER_STATE *GetState() const { return buffer.get(); } |
John Zulauf | e7f6a5e | 2021-01-16 14:31:18 -0700 | [diff] [blame] | 569 | SyncBufferMemoryBarrier(const Buffer &buffer_, const SyncBarrier &barrier_, const ResourceAccessRange &range_) |
| 570 | : buffer(buffer_), barrier(barrier_), range(range_) {} |
| 571 | SyncBufferMemoryBarrier() = default; |
| 572 | }; |
| 573 | |
| 574 | struct SyncImageMemoryBarrier { |
| 575 | using Image = std::shared_ptr<const IMAGE_STATE>; |
John Zulauf | 110413c | 2021-03-20 05:38:38 -0600 | [diff] [blame] | 576 | |
John Zulauf | e7f6a5e | 2021-01-16 14:31:18 -0700 | [diff] [blame] | 577 | Image image; |
| 578 | uint32_t index; |
| 579 | SyncBarrier barrier; |
| 580 | VkImageLayout old_layout; |
| 581 | VkImageLayout new_layout; |
John Zulauf | 110413c | 2021-03-20 05:38:38 -0600 | [diff] [blame] | 582 | VkImageSubresourceRange range; |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 583 | |
| 584 | bool IsLayoutTransition() const { return old_layout != new_layout; } |
John Zulauf | 110413c | 2021-03-20 05:38:38 -0600 | [diff] [blame] | 585 | const VkImageSubresourceRange &Range() const { return range; }; |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 586 | const IMAGE_STATE *GetState() const { return image.get(); } |
John Zulauf | e7f6a5e | 2021-01-16 14:31:18 -0700 | [diff] [blame] | 587 | SyncImageMemoryBarrier(const Image &image_, uint32_t index_, const SyncBarrier &barrier_, VkImageLayout old_layout_, |
| 588 | VkImageLayout new_layout_, const VkImageSubresourceRange &subresource_range_) |
| 589 | : image(image_), |
| 590 | index(index_), |
| 591 | barrier(barrier_), |
| 592 | old_layout(old_layout_), |
| 593 | new_layout(new_layout_), |
John Zulauf | 110413c | 2021-03-20 05:38:38 -0600 | [diff] [blame] | 594 | range(subresource_range_) {} |
John Zulauf | e7f6a5e | 2021-01-16 14:31:18 -0700 | [diff] [blame] | 595 | SyncImageMemoryBarrier() = default; |
| 596 | }; |
| 597 | |
John Zulauf | bb89045 | 2021-12-14 11:30:18 -0700 | [diff] [blame] | 598 | template <typename SubpassNode> |
| 599 | struct SubpassBarrierTrackback { |
| 600 | std::vector<SyncBarrier> barriers; |
| 601 | const SubpassNode *source_subpass = nullptr; |
| 602 | SubpassBarrierTrackback() = default; |
| 603 | SubpassBarrierTrackback(const SubpassBarrierTrackback &) = default; |
| 604 | SubpassBarrierTrackback(const SubpassNode *source_subpass_, VkQueueFlags queue_flags_, |
| 605 | const std::vector<const VkSubpassDependency2 *> &subpass_dependencies_) |
| 606 | : barriers(), source_subpass(source_subpass_) { |
| 607 | barriers.reserve(subpass_dependencies_.size()); |
| 608 | for (const VkSubpassDependency2 *dependency : subpass_dependencies_) { |
| 609 | assert(dependency); |
| 610 | barriers.emplace_back(queue_flags_, *dependency); |
| 611 | } |
| 612 | } |
John Zulauf | 06f6f1e | 2022-04-19 15:28:11 -0600 | [diff] [blame] | 613 | SubpassBarrierTrackback(const SubpassNode *source_subpass_, const SyncBarrier &barrier_) |
| 614 | : barriers(1, barrier_), source_subpass(source_subpass_) {} |
John Zulauf | bb89045 | 2021-12-14 11:30:18 -0700 | [diff] [blame] | 615 | SubpassBarrierTrackback &operator=(const SubpassBarrierTrackback &) = default; |
| 616 | }; |
| 617 | |
John Zulauf | e7f6a5e | 2021-01-16 14:31:18 -0700 | [diff] [blame] | 618 | class SyncOpBase { |
| 619 | public: |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 620 | SyncOpBase() : cmd_type_(CMD_NONE) {} |
| 621 | SyncOpBase(CMD_TYPE cmd_type) : cmd_type_(cmd_type) {} |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 622 | virtual ~SyncOpBase() = default; |
| 623 | |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 624 | const char *CmdName() const { return CommandTypeString(cmd_type_); } |
John Zulauf | bb89045 | 2021-12-14 11:30:18 -0700 | [diff] [blame] | 625 | |
John Zulauf | e7f6a5e | 2021-01-16 14:31:18 -0700 | [diff] [blame] | 626 | virtual bool Validate(const CommandBufferAccessContext &cb_context) const = 0; |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 627 | virtual ResourceUsageTag Record(CommandBufferAccessContext *cb_context) = 0; |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 628 | virtual bool ReplayValidate(ResourceUsageTag recorded_tag, const CommandBufferAccessContext &recorded_context, |
John Zulauf | 0223f14 | 2022-07-06 09:05:39 -0600 | [diff] [blame] | 629 | ResourceUsageTag base_tag, CommandExecutionContext &exec_context) const = 0; |
| 630 | virtual void ReplayRecord(CommandExecutionContext &exec_context, ResourceUsageTag tag) const = 0; |
John Zulauf | 36ef928 | 2021-02-02 11:47:24 -0700 | [diff] [blame] | 631 | |
| 632 | protected: |
John Zulauf | bb89045 | 2021-12-14 11:30:18 -0700 | [diff] [blame] | 633 | // Only non-null and valid for SyncOps within a render pass instance WIP -- think about how to manage for non RPI calls within |
| 634 | // RPI and 2ndarys... |
John Zulauf | bb89045 | 2021-12-14 11:30:18 -0700 | [diff] [blame] | 635 | uint32_t subpass_ = VK_SUBPASS_EXTERNAL; |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 636 | CMD_TYPE cmd_type_; |
John Zulauf | e7f6a5e | 2021-01-16 14:31:18 -0700 | [diff] [blame] | 637 | }; |
| 638 | |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 639 | class SyncOpBarriers : public SyncOpBase { |
| 640 | protected: |
| 641 | template <typename Barriers, typename FunctorFactory> |
John Zulauf | 0011952 | 2022-05-23 19:07:42 -0600 | [diff] [blame] | 642 | static void ApplyBarriers(const Barriers &barriers, const FunctorFactory &factory, QueueId queue_id, ResourceUsageTag tag, |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 643 | AccessContext *context); |
| 644 | template <typename Barriers, typename FunctorFactory> |
John Zulauf | 0011952 | 2022-05-23 19:07:42 -0600 | [diff] [blame] | 645 | static void ApplyGlobalBarriers(const Barriers &barriers, const FunctorFactory &factory, QueueId queue_id, ResourceUsageTag tag, |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 646 | AccessContext *access_context); |
John Zulauf | e7f6a5e | 2021-01-16 14:31:18 -0700 | [diff] [blame] | 647 | |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 648 | SyncOpBarriers(CMD_TYPE cmd_type, const SyncValidator &sync_state, VkQueueFlags queue_flags, VkPipelineStageFlags srcStageMask, |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 649 | VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, |
| 650 | const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, |
| 651 | const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, |
| 652 | const VkImageMemoryBarrier *pImageMemoryBarriers); |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 653 | SyncOpBarriers(CMD_TYPE cmd_type, const SyncValidator &sync_state, VkQueueFlags queue_flags, uint32_t event_count, |
John Zulauf | 4edde62 | 2021-02-15 08:54:50 -0700 | [diff] [blame] | 654 | const VkDependencyInfoKHR *pDependencyInfo); |
John Zulauf | e7f6a5e | 2021-01-16 14:31:18 -0700 | [diff] [blame] | 655 | |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 656 | ~SyncOpBarriers() override = default; |
| 657 | |
John Zulauf | e7f6a5e | 2021-01-16 14:31:18 -0700 | [diff] [blame] | 658 | protected: |
John Zulauf | 4edde62 | 2021-02-15 08:54:50 -0700 | [diff] [blame] | 659 | struct BarrierSet { |
| 660 | VkDependencyFlags dependency_flags; |
| 661 | SyncExecScope src_exec_scope; |
| 662 | SyncExecScope dst_exec_scope; |
| 663 | std::vector<SyncMemoryBarrier> memory_barriers; |
| 664 | std::vector<SyncBufferMemoryBarrier> buffer_memory_barriers; |
| 665 | std::vector<SyncImageMemoryBarrier> image_memory_barriers; |
| 666 | bool single_exec_scope; |
| 667 | void MakeMemoryBarriers(const SyncExecScope &src, const SyncExecScope &dst, VkDependencyFlags dependencyFlags, |
| 668 | uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers); |
| 669 | void MakeBufferMemoryBarriers(const SyncValidator &sync_state, const SyncExecScope &src, const SyncExecScope &dst, |
| 670 | VkDependencyFlags dependencyFlags, uint32_t bufferMemoryBarrierCount, |
| 671 | const VkBufferMemoryBarrier *pBufferMemoryBarriers); |
| 672 | void MakeImageMemoryBarriers(const SyncValidator &sync_state, const SyncExecScope &src, const SyncExecScope &dst, |
| 673 | VkDependencyFlags dependencyFlags, uint32_t imageMemoryBarrierCount, |
| 674 | const VkImageMemoryBarrier *pImageMemoryBarriers); |
| 675 | void MakeMemoryBarriers(VkQueueFlags queue_flags, VkDependencyFlags dependency_flags, uint32_t barrier_count, |
Tony-LunarG | 3f6eceb | 2021-11-18 14:34:49 -0700 | [diff] [blame] | 676 | const VkMemoryBarrier2 *barriers); |
John Zulauf | 4edde62 | 2021-02-15 08:54:50 -0700 | [diff] [blame] | 677 | void MakeBufferMemoryBarriers(const SyncValidator &sync_state, VkQueueFlags queue_flags, VkDependencyFlags dependency_flags, |
Tony-LunarG | 3f6eceb | 2021-11-18 14:34:49 -0700 | [diff] [blame] | 678 | uint32_t barrier_count, const VkBufferMemoryBarrier2 *barriers); |
John Zulauf | 4edde62 | 2021-02-15 08:54:50 -0700 | [diff] [blame] | 679 | void MakeImageMemoryBarriers(const SyncValidator &sync_state, VkQueueFlags queue_flags, VkDependencyFlags dependency_flags, |
Tony-LunarG | 3f6eceb | 2021-11-18 14:34:49 -0700 | [diff] [blame] | 680 | uint32_t barrier_count, const VkImageMemoryBarrier2 *barriers); |
John Zulauf | 4edde62 | 2021-02-15 08:54:50 -0700 | [diff] [blame] | 681 | }; |
| 682 | std::vector<BarrierSet> barriers_; |
John Zulauf | e7f6a5e | 2021-01-16 14:31:18 -0700 | [diff] [blame] | 683 | }; |
| 684 | |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 685 | class SyncOpPipelineBarrier : public SyncOpBarriers { |
| 686 | public: |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 687 | SyncOpPipelineBarrier(CMD_TYPE cmd_type, const SyncValidator &sync_state, VkQueueFlags queue_flags, |
John Zulauf | 36ef928 | 2021-02-02 11:47:24 -0700 | [diff] [blame] | 688 | VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, |
| 689 | uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 690 | const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, |
| 691 | const VkImageMemoryBarrier *pImageMemoryBarriers); |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 692 | SyncOpPipelineBarrier(CMD_TYPE cmd_type, const SyncValidator &sync_state, VkQueueFlags queue_flags, |
Jeremy Gebben | df3fcc3 | 2021-02-15 08:53:17 -0700 | [diff] [blame] | 693 | const VkDependencyInfoKHR &pDependencyInfo); |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 694 | ~SyncOpPipelineBarrier() override = default; |
| 695 | |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 696 | bool Validate(const CommandBufferAccessContext &cb_context) const override; |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 697 | ResourceUsageTag Record(CommandBufferAccessContext *cb_context) override; |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 698 | bool ReplayValidate(ResourceUsageTag recorded_tag, const CommandBufferAccessContext &recorded_context, |
John Zulauf | 0223f14 | 2022-07-06 09:05:39 -0600 | [diff] [blame] | 699 | ResourceUsageTag base_tag, CommandExecutionContext &exec_context) const override; |
| 700 | void ReplayRecord(CommandExecutionContext &exec_context, ResourceUsageTag tag) const override; |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 701 | }; |
| 702 | |
| 703 | class SyncOpWaitEvents : public SyncOpBarriers { |
| 704 | public: |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 705 | SyncOpWaitEvents(CMD_TYPE cmd_type, const SyncValidator &sync_state, VkQueueFlags queue_flags, uint32_t eventCount, |
John Zulauf | 36ef928 | 2021-02-02 11:47:24 -0700 | [diff] [blame] | 706 | const VkEvent *pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, |
| 707 | uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 708 | const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, |
| 709 | const VkImageMemoryBarrier *pImageMemoryBarriers); |
John Zulauf | 4edde62 | 2021-02-15 08:54:50 -0700 | [diff] [blame] | 710 | |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 711 | SyncOpWaitEvents(CMD_TYPE cmd_type, const SyncValidator &sync_state, VkQueueFlags queue_flags, uint32_t eventCount, |
John Zulauf | 4edde62 | 2021-02-15 08:54:50 -0700 | [diff] [blame] | 712 | const VkEvent *pEvents, const VkDependencyInfoKHR *pDependencyInfo); |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 713 | ~SyncOpWaitEvents() override = default; |
John Zulauf | 4edde62 | 2021-02-15 08:54:50 -0700 | [diff] [blame] | 714 | |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 715 | bool Validate(const CommandBufferAccessContext &cb_context) const override; |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 716 | ResourceUsageTag Record(CommandBufferAccessContext *cb_context) override; |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 717 | bool ReplayValidate(ResourceUsageTag recorded_tag, const CommandBufferAccessContext &recorded_context, |
John Zulauf | 0223f14 | 2022-07-06 09:05:39 -0600 | [diff] [blame] | 718 | ResourceUsageTag base_tag, CommandExecutionContext &exec_context) const override; |
| 719 | void ReplayRecord(CommandExecutionContext &exec_context, ResourceUsageTag tag) const override; |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 720 | |
| 721 | protected: |
John Zulauf | 610e28c | 2021-08-03 17:46:23 -0600 | [diff] [blame] | 722 | static const char *const kIgnored; |
John Zulauf | bb89045 | 2021-12-14 11:30:18 -0700 | [diff] [blame] | 723 | bool DoValidate(const CommandExecutionContext &ex_context, const ResourceUsageTag base_tag) const; |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 724 | // TODO PHASE2 This is the wrong thing to use for "replay".. as the event state will have moved on since the record |
| 725 | // TODO PHASE2 May need to capture by value w.r.t. "first use" or build up in calling/enqueue context through replay. |
John Zulauf | 669dfd5 | 2021-01-27 17:15:28 -0700 | [diff] [blame] | 726 | std::vector<std::shared_ptr<const EVENT_STATE>> events_; |
| 727 | void MakeEventsList(const SyncValidator &sync_state, uint32_t event_count, const VkEvent *events); |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 728 | }; |
| 729 | |
John Zulauf | 6ce2437 | 2021-01-30 05:56:25 -0700 | [diff] [blame] | 730 | class SyncOpResetEvent : public SyncOpBase { |
| 731 | public: |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 732 | SyncOpResetEvent(CMD_TYPE cmd_type, const SyncValidator &sync_state, VkQueueFlags queue_flags, VkEvent event, |
John Zulauf | 4edde62 | 2021-02-15 08:54:50 -0700 | [diff] [blame] | 733 | VkPipelineStageFlags2KHR stageMask); |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 734 | ~SyncOpResetEvent() override = default; |
| 735 | |
John Zulauf | 6ce2437 | 2021-01-30 05:56:25 -0700 | [diff] [blame] | 736 | bool Validate(const CommandBufferAccessContext &cb_context) const override; |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 737 | ResourceUsageTag Record(CommandBufferAccessContext *cb_context) override; |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 738 | bool ReplayValidate(ResourceUsageTag recorded_tag, const CommandBufferAccessContext &recorded_context, |
John Zulauf | 0223f14 | 2022-07-06 09:05:39 -0600 | [diff] [blame] | 739 | ResourceUsageTag base_tag, CommandExecutionContext &exec_context) const override; |
| 740 | void ReplayRecord(CommandExecutionContext &exec_context, ResourceUsageTag tag) const override; |
John Zulauf | 6ce2437 | 2021-01-30 05:56:25 -0700 | [diff] [blame] | 741 | |
| 742 | private: |
John Zulauf | bb89045 | 2021-12-14 11:30:18 -0700 | [diff] [blame] | 743 | bool DoValidate(const CommandExecutionContext &ex_context, const ResourceUsageTag base_tag) const; |
John Zulauf | 6ce2437 | 2021-01-30 05:56:25 -0700 | [diff] [blame] | 744 | std::shared_ptr<const EVENT_STATE> event_; |
| 745 | SyncExecScope exec_scope_; |
| 746 | }; |
| 747 | |
| 748 | class SyncOpSetEvent : public SyncOpBase { |
| 749 | public: |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 750 | SyncOpSetEvent(CMD_TYPE cmd_type, const SyncValidator &sync_state, VkQueueFlags queue_flags, VkEvent event, |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 751 | VkPipelineStageFlags2KHR stageMask, const AccessContext *access_context); |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 752 | SyncOpSetEvent(CMD_TYPE cmd_type, const SyncValidator &sync_state, VkQueueFlags queue_flags, VkEvent event, |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 753 | const VkDependencyInfoKHR &dep_info, const AccessContext *access_context); |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 754 | ~SyncOpSetEvent() override = default; |
| 755 | |
John Zulauf | 6ce2437 | 2021-01-30 05:56:25 -0700 | [diff] [blame] | 756 | bool Validate(const CommandBufferAccessContext &cb_context) const override; |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 757 | ResourceUsageTag Record(CommandBufferAccessContext *cb_context) override; |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 758 | bool ReplayValidate(ResourceUsageTag recorded_tag, const CommandBufferAccessContext &recorded_context, |
John Zulauf | 0223f14 | 2022-07-06 09:05:39 -0600 | [diff] [blame] | 759 | ResourceUsageTag base_tag, CommandExecutionContext &exec_context) const override; |
| 760 | void ReplayRecord(CommandExecutionContext &exec_context, ResourceUsageTag tag) const override; |
John Zulauf | 6ce2437 | 2021-01-30 05:56:25 -0700 | [diff] [blame] | 761 | |
| 762 | private: |
John Zulauf | bb89045 | 2021-12-14 11:30:18 -0700 | [diff] [blame] | 763 | bool DoValidate(const CommandExecutionContext &ex_context, const ResourceUsageTag base_tag) const; |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 764 | void DoRecord(QueueId queue_id, ResourceUsageTag recorded_tag, const std::shared_ptr<const AccessContext> &access_context, |
| 765 | SyncEventsContext *events_context) const; |
John Zulauf | 6ce2437 | 2021-01-30 05:56:25 -0700 | [diff] [blame] | 766 | std::shared_ptr<const EVENT_STATE> event_; |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 767 | // The Access context of the command buffer at record set event time. |
| 768 | std::shared_ptr<const AccessContext> recorded_context_; |
John Zulauf | 6ce2437 | 2021-01-30 05:56:25 -0700 | [diff] [blame] | 769 | SyncExecScope src_exec_scope_; |
John Zulauf | 4edde62 | 2021-02-15 08:54:50 -0700 | [diff] [blame] | 770 | // Note that the dep info is *not* dehandled, but retained for comparison with a future WaitEvents2 |
Tony-LunarG | 273f32f | 2021-09-28 08:56:30 -0600 | [diff] [blame] | 771 | std::shared_ptr<safe_VkDependencyInfo> dep_info_; |
John Zulauf | 6ce2437 | 2021-01-30 05:56:25 -0700 | [diff] [blame] | 772 | }; |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 773 | |
| 774 | class SyncOpBeginRenderPass : public SyncOpBase { |
| 775 | public: |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 776 | SyncOpBeginRenderPass(CMD_TYPE cmd_type, const SyncValidator &sync_state, const VkRenderPassBeginInfo *pRenderPassBegin, |
sfricke-samsung | 85584a7 | 2021-09-30 21:43:38 -0700 | [diff] [blame] | 777 | const VkSubpassBeginInfo *pSubpassBeginInfo); |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 778 | ~SyncOpBeginRenderPass() override = default; |
| 779 | |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 780 | bool Validate(const CommandBufferAccessContext &cb_context) const override; |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 781 | ResourceUsageTag Record(CommandBufferAccessContext *cb_context) override; |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 782 | bool ReplayValidate(ResourceUsageTag recorded_tag, const CommandBufferAccessContext &recorded_context, |
John Zulauf | 0223f14 | 2022-07-06 09:05:39 -0600 | [diff] [blame] | 783 | ResourceUsageTag base_tag, CommandExecutionContext &exec_context) const override; |
| 784 | void ReplayRecord(CommandExecutionContext &exec_context, ResourceUsageTag tag) const override; |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 785 | const RenderPassAccessContext *GetRenderPassAccessContext() const { return rp_context_; } |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 786 | |
| 787 | protected: |
| 788 | safe_VkRenderPassBeginInfo renderpass_begin_info_; |
| 789 | safe_VkSubpassBeginInfo subpass_begin_info_; |
| 790 | std::vector<std::shared_ptr<const IMAGE_VIEW_STATE>> shared_attachments_; |
| 791 | std::vector<const IMAGE_VIEW_STATE *> attachments_; |
| 792 | std::shared_ptr<const RENDER_PASS_STATE> rp_state_; |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 793 | const RenderPassAccessContext *rp_context_; |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 794 | }; |
| 795 | |
| 796 | class SyncOpNextSubpass : public SyncOpBase { |
| 797 | public: |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 798 | SyncOpNextSubpass(CMD_TYPE cmd_type, const SyncValidator &sync_state, const VkSubpassBeginInfo *pSubpassBeginInfo, |
sfricke-samsung | 85584a7 | 2021-09-30 21:43:38 -0700 | [diff] [blame] | 799 | const VkSubpassEndInfo *pSubpassEndInfo); |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 800 | ~SyncOpNextSubpass() override = default; |
| 801 | |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 802 | bool Validate(const CommandBufferAccessContext &cb_context) const override; |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 803 | ResourceUsageTag Record(CommandBufferAccessContext *cb_context) override; |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 804 | bool ReplayValidate(ResourceUsageTag recorded_tag, const CommandBufferAccessContext &recorded_context, |
John Zulauf | 0223f14 | 2022-07-06 09:05:39 -0600 | [diff] [blame] | 805 | ResourceUsageTag base_tag, CommandExecutionContext &exec_context) const override; |
| 806 | void ReplayRecord(CommandExecutionContext &exec_context, ResourceUsageTag tag) const override; |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 807 | |
| 808 | protected: |
| 809 | safe_VkSubpassBeginInfo subpass_begin_info_; |
| 810 | safe_VkSubpassEndInfo subpass_end_info_; |
| 811 | }; |
| 812 | |
| 813 | class SyncOpEndRenderPass : public SyncOpBase { |
| 814 | public: |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 815 | SyncOpEndRenderPass(CMD_TYPE cmd_type, const SyncValidator &sync_state, const VkSubpassEndInfo *pSubpassEndInfo); |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 816 | ~SyncOpEndRenderPass() override = default; |
| 817 | |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 818 | bool Validate(const CommandBufferAccessContext &cb_context) const override; |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 819 | ResourceUsageTag Record(CommandBufferAccessContext *cb_context) override; |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 820 | bool ReplayValidate(ResourceUsageTag recorded_tag, const CommandBufferAccessContext &recorded_context, |
John Zulauf | 0223f14 | 2022-07-06 09:05:39 -0600 | [diff] [blame] | 821 | ResourceUsageTag base_tag, CommandExecutionContext &exec_context) const override; |
| 822 | void ReplayRecord(CommandExecutionContext &exec_context, ResourceUsageTag tag) const override; |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 823 | |
| 824 | protected: |
| 825 | safe_VkSubpassEndInfo subpass_end_info_; |
| 826 | }; |
| 827 | |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 828 | class AccessContext { |
John Zulauf | 5c5e88d | 2019-12-26 11:22:02 -0700 | [diff] [blame] | 829 | public: |
John Zulauf | 6913342 | 2020-05-20 14:55:53 -0600 | [diff] [blame] | 830 | enum DetectOptions : uint32_t { |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 831 | kDetectPrevious = 1U << 0, |
| 832 | kDetectAsync = 1U << 1, |
| 833 | kDetectAll = (kDetectPrevious | kDetectAsync) |
John Zulauf | 16adfc9 | 2020-04-08 10:28:33 -0600 | [diff] [blame] | 834 | }; |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 835 | struct AddressRange { |
| 836 | AccessAddressType type; |
| 837 | ResourceAccessRange range; |
| 838 | AddressRange() = default; // the explicit constructor below isn't needed in 20, but would delete the default. |
| 839 | AddressRange(AccessAddressType type_, ResourceAccessRange range_) : type(type_), range(range_) {} |
| 840 | }; |
John Zulauf | 43cc746 | 2020-12-03 12:33:12 -0700 | [diff] [blame] | 841 | using MapArray = std::array<ResourceAccessRangeMap, static_cast<size_t>(AccessAddressType::kTypeCount)>; |
John Zulauf | 16adfc9 | 2020-04-08 10:28:33 -0600 | [diff] [blame] | 842 | |
John Zulauf | bb89045 | 2021-12-14 11:30:18 -0700 | [diff] [blame] | 843 | using TrackBack = SubpassBarrierTrackback<AccessContext>; |
John Zulauf | 5c5e88d | 2019-12-26 11:22:02 -0700 | [diff] [blame] | 844 | |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 845 | HazardResult DetectHazard(const BUFFER_STATE &buffer, SyncStageAccessIndex usage_index, const ResourceAccessRange &range) const; |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 846 | HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage, |
Aitor Camacho | e67f2c7 | 2022-06-08 14:41:58 +0200 | [diff] [blame] | 847 | const VkImageSubresourceLayers &subresource, const VkOffset3D &offset, const VkExtent3D &extent, |
| 848 | bool is_depth_sliced) const; |
John Zulauf | 6913342 | 2020-05-20 14:55:53 -0600 | [diff] [blame] | 849 | template <typename Detector> |
John Zulauf | d0ec59f | 2021-03-13 14:25:08 -0700 | [diff] [blame] | 850 | HazardResult DetectHazard(Detector &detector, const AttachmentViewGen &view_gen, AttachmentViewGen::Gen gen_type, |
| 851 | DetectOptions options) const; |
| 852 | template <typename Detector> |
John Zulauf | 6913342 | 2020-05-20 14:55:53 -0600 | [diff] [blame] | 853 | HazardResult DetectHazard(Detector &detector, const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range, |
Aitor Camacho | e67f2c7 | 2022-06-08 14:41:58 +0200 | [diff] [blame] | 854 | const VkOffset3D &offset, const VkExtent3D &extent, bool is_depth_sliced, |
| 855 | DetectOptions options) const; |
John Zulauf | 110413c | 2021-03-20 05:38:38 -0600 | [diff] [blame] | 856 | template <typename Detector> |
| 857 | HazardResult DetectHazard(Detector &detector, const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range, |
Aitor Camacho | e67f2c7 | 2022-06-08 14:41:58 +0200 | [diff] [blame] | 858 | bool is_depth_sliced, DetectOptions options) const; |
John Zulauf | 1507ee4 | 2020-05-18 11:33:09 -0600 | [diff] [blame] | 859 | HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage, |
Aitor Camacho | e67f2c7 | 2022-06-08 14:41:58 +0200 | [diff] [blame] | 860 | const VkImageSubresourceRange &subresource_range, bool is_depth_sliced) const; |
John Zulauf | d0ec59f | 2021-03-13 14:25:08 -0700 | [diff] [blame] | 861 | HazardResult DetectHazard(const AttachmentViewGen &view_gen, AttachmentViewGen::Gen gen_type, |
| 862 | SyncStageAccessIndex current_usage, SyncOrdering ordering_rule) const; |
| 863 | |
John Zulauf | 6913342 | 2020-05-20 14:55:53 -0600 | [diff] [blame] | 864 | HazardResult DetectHazard(const IMAGE_STATE &image, SyncStageAccessIndex current_usage, |
John Zulauf | 8e3c3e9 | 2021-01-06 11:19:36 -0700 | [diff] [blame] | 865 | const VkImageSubresourceRange &subresource_range, SyncOrdering ordering_rule, |
Aitor Camacho | e67f2c7 | 2022-06-08 14:41:58 +0200 | [diff] [blame] | 866 | const VkOffset3D &offset, const VkExtent3D &extent, bool is_depth_sliced) const; |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 867 | HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, const VkImageSubresourceRange &subresource_range, |
| 868 | VkPipelineStageFlags2KHR src_exec_scope, const SyncStageAccessFlags &src_access_scope, |
| 869 | QueueId queue_id, const SyncEventState &sync_event, DetectOptions options) const; |
John Zulauf | d0ec59f | 2021-03-13 14:25:08 -0700 | [diff] [blame] | 870 | HazardResult DetectImageBarrierHazard(const AttachmentViewGen &attachment_view, const SyncBarrier &barrier, |
| 871 | DetectOptions options) const; |
Jeremy Gebben | 40a2294 | 2020-12-22 14:22:06 -0700 | [diff] [blame] | 872 | HazardResult DetectImageBarrierHazard(const IMAGE_STATE &image, VkPipelineStageFlags2KHR src_exec_scope, |
John Zulauf | 4a6105a | 2020-11-17 15:11:05 -0700 | [diff] [blame] | 873 | const SyncStageAccessFlags &src_access_scope, |
Jeremy Gebben | d0de1f8 | 2020-11-09 08:21:07 -0700 | [diff] [blame] | 874 | const VkImageSubresourceRange &subresource_range, DetectOptions options) const; |
John Zulauf | e7f6a5e | 2021-01-16 14:31:18 -0700 | [diff] [blame] | 875 | HazardResult DetectImageBarrierHazard(const SyncImageMemoryBarrier &image_barrier) const; |
John Zulauf | d0ec59f | 2021-03-13 14:25:08 -0700 | [diff] [blame] | 876 | HazardResult DetectSubpassTransitionHazard(const TrackBack &track_back, const AttachmentViewGen &attach_view) const; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 877 | |
John Zulauf | b02c1eb | 2020-10-06 16:33:36 -0600 | [diff] [blame] | 878 | void RecordLayoutTransitions(const RENDER_PASS_STATE &rp_state, uint32_t subpass, |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 879 | const AttachmentViewGenVector &attachment_views, ResourceUsageTag tag); |
John Zulauf | b02c1eb | 2020-10-06 16:33:36 -0600 | [diff] [blame] | 880 | |
John Zulauf | 0223f14 | 2022-07-06 09:05:39 -0600 | [diff] [blame] | 881 | HazardResult DetectFirstUseHazard(QueueId queue_id, const ResourceUsageRange &tag_range, |
| 882 | const AccessContext &access_context) const; |
John Zulauf | ae84200 | 2021-04-15 18:20:55 -0600 | [diff] [blame] | 883 | |
John Zulauf | e5da6e5 | 2020-03-18 15:32:18 -0600 | [diff] [blame] | 884 | const TrackBack &GetDstExternalTrackBack() const { return dst_external_; } |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 885 | void Reset() { |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 886 | prev_.clear(); |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 887 | prev_by_subpass_.clear(); |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 888 | async_.clear(); |
John Zulauf | 22aefed | 2021-03-11 18:14:35 -0700 | [diff] [blame] | 889 | src_external_ = nullptr; |
John Zulauf | a0a9829 | 2020-09-18 09:30:10 -0600 | [diff] [blame] | 890 | dst_external_ = TrackBack(); |
Jeremy Gebben | c4b78c5 | 2020-12-11 09:39:47 -0700 | [diff] [blame] | 891 | start_tag_ = ResourceUsageTag(); |
John Zulauf | 16adfc9 | 2020-04-08 10:28:33 -0600 | [diff] [blame] | 892 | for (auto &map : access_state_maps_) { |
| 893 | map.clear(); |
| 894 | } |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 895 | } |
John Zulauf | b02c1eb | 2020-10-06 16:33:36 -0600 | [diff] [blame] | 896 | |
| 897 | // Follow the context previous to access the access state, supporting "lazy" import into the context. Not intended for |
| 898 | // subpass layout transition, as the pending state handling is more complex |
John Zulauf | 5f13a79 | 2020-03-10 07:31:21 -0600 | [diff] [blame] | 899 | // TODO: See if returning the lower_bound would be useful from a performance POV -- look at the lower_bound overhead |
| 900 | // Would need to add a "hint" overload to parallel_iterator::invalidate_[AB] call, if so. |
John Zulauf | 22aefed | 2021-03-11 18:14:35 -0700 | [diff] [blame] | 901 | template <typename BarrierAction> |
| 902 | void ResolvePreviousAccessStack(AccessAddressType type, const ResourceAccessRange &range, ResourceAccessRangeMap *descent_map, |
| 903 | const ResourceAccessState *infill_state, const BarrierAction &previous_barrie) const; |
John Zulauf | 43cc746 | 2020-12-03 12:33:12 -0700 | [diff] [blame] | 904 | void ResolvePreviousAccess(AccessAddressType type, const ResourceAccessRange &range, ResourceAccessRangeMap *descent_map, |
John Zulauf | 22aefed | 2021-03-11 18:14:35 -0700 | [diff] [blame] | 905 | const ResourceAccessState *infill_state, |
| 906 | const ResourceAccessStateFunction *previous_barrier = nullptr) const; |
John Zulauf | 4a6105a | 2020-11-17 15:11:05 -0700 | [diff] [blame] | 907 | void ResolvePreviousAccesses(); |
John Zulauf | b02c1eb | 2020-10-06 16:33:36 -0600 | [diff] [blame] | 908 | template <typename BarrierAction> |
John Zulauf | d0ec59f | 2021-03-13 14:25:08 -0700 | [diff] [blame] | 909 | void ResolveAccessRange(const AttachmentViewGen &view_gen, AttachmentViewGen::Gen gen_type, BarrierAction &barrier_action, |
| 910 | ResourceAccessRangeMap *descent_map, const ResourceAccessState *infill_state) const; |
John Zulauf | b02c1eb | 2020-10-06 16:33:36 -0600 | [diff] [blame] | 911 | template <typename BarrierAction> |
John Zulauf | 43cc746 | 2020-12-03 12:33:12 -0700 | [diff] [blame] | 912 | void ResolveAccessRange(AccessAddressType type, const ResourceAccessRange &range, BarrierAction &barrier_action, |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 913 | ResourceAccessRangeMap *resolve_map, const ResourceAccessState *infill_state, |
| 914 | bool recur_to_infill = true) const; |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 915 | template <typename ResolveOp> |
| 916 | void ResolveFromContext(ResolveOp &&resolve_op, const AccessContext &from_context, |
| 917 | const ResourceAccessState *infill_state = nullptr, bool recur_to_infill = false); |
John Zulauf | b02c1eb | 2020-10-06 16:33:36 -0600 | [diff] [blame] | 918 | |
John Zulauf | 8e3c3e9 | 2021-01-06 11:19:36 -0700 | [diff] [blame] | 919 | void UpdateAccessState(const BUFFER_STATE &buffer, SyncStageAccessIndex current_usage, SyncOrdering ordering_rule, |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 920 | const ResourceAccessRange &range, ResourceUsageTag tag); |
John Zulauf | 8e3c3e9 | 2021-01-06 11:19:36 -0700 | [diff] [blame] | 921 | void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage, SyncOrdering ordering_rule, |
John Zulauf | 110413c | 2021-03-20 05:38:38 -0600 | [diff] [blame] | 922 | const VkImageSubresourceRange &subresource_range, const ResourceUsageTag &tag); |
| 923 | void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage, SyncOrdering ordering_rule, |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 924 | const VkImageSubresourceRange &subresource_range, const VkOffset3D &offset, const VkExtent3D &extent, |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 925 | ResourceUsageTag tag); |
John Zulauf | d0ec59f | 2021-03-13 14:25:08 -0700 | [diff] [blame] | 926 | void UpdateAccessState(const AttachmentViewGen &view_gen, AttachmentViewGen::Gen gen_type, SyncStageAccessIndex current_usage, |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 927 | SyncOrdering ordering_rule, ResourceUsageTag tag); |
John Zulauf | 8e3c3e9 | 2021-01-06 11:19:36 -0700 | [diff] [blame] | 928 | void UpdateAccessState(const IMAGE_STATE &image, SyncStageAccessIndex current_usage, SyncOrdering ordering_rule, |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 929 | const VkImageSubresourceLayers &subresource, const VkOffset3D &offset, const VkExtent3D &extent, |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 930 | ResourceUsageTag tag); |
John Zulauf | d0ec59f | 2021-03-13 14:25:08 -0700 | [diff] [blame] | 931 | void UpdateAttachmentResolveAccess(const RENDER_PASS_STATE &rp_state, const AttachmentViewGenVector &attachment_views, |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 932 | uint32_t subpass, ResourceUsageTag tag); |
John Zulauf | d0ec59f | 2021-03-13 14:25:08 -0700 | [diff] [blame] | 933 | void UpdateAttachmentStoreAccess(const RENDER_PASS_STATE &rp_state, const AttachmentViewGenVector &attachment_views, |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 934 | uint32_t subpass, ResourceUsageTag tag); |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 935 | |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 936 | void ResolveChildContexts(const std::vector<AccessContext> &contexts); |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 937 | |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 938 | void ImportAsyncContexts(const AccessContext &from); |
John Zulauf | d0ec59f | 2021-03-13 14:25:08 -0700 | [diff] [blame] | 939 | template <typename Action, typename RangeGen> |
| 940 | void ApplyUpdateAction(AccessAddressType address_type, const Action &action, RangeGen *range_gen_arg); |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 941 | template <typename Action> |
John Zulauf | d0ec59f | 2021-03-13 14:25:08 -0700 | [diff] [blame] | 942 | void ApplyUpdateAction(const AttachmentViewGen &view_gen, AttachmentViewGen::Gen gen_type, const Action &action); |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 943 | template <typename Action> |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 944 | void ApplyToContext(const Action &barrier_action); |
John Zulauf | 43cc746 | 2020-12-03 12:33:12 -0700 | [diff] [blame] | 945 | static AccessAddressType ImageAddressType(const IMAGE_STATE &image); |
John Zulauf | 16adfc9 | 2020-04-08 10:28:33 -0600 | [diff] [blame] | 946 | |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 947 | void DeleteAccess(const AddressRange &address); |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 948 | AccessContext(uint32_t subpass, VkQueueFlags queue_flags, const std::vector<SubpassDependencyGraphNode> &dependencies, |
John Zulauf | 1a22429 | 2020-06-30 14:52:13 -0600 | [diff] [blame] | 949 | const std::vector<AccessContext> &contexts, const AccessContext *external_context); |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 950 | |
| 951 | AccessContext() { Reset(); } |
John Zulauf | 7635de3 | 2020-05-29 17:14:15 -0600 | [diff] [blame] | 952 | AccessContext(const AccessContext ©_from) = default; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 953 | |
John Zulauf | 43cc746 | 2020-12-03 12:33:12 -0700 | [diff] [blame] | 954 | ResourceAccessRangeMap &GetAccessStateMap(AccessAddressType type) { return access_state_maps_[static_cast<size_t>(type)]; } |
| 955 | const ResourceAccessRangeMap &GetAccessStateMap(AccessAddressType type) const { |
| 956 | return access_state_maps_[static_cast<size_t>(type)]; |
| 957 | } |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 958 | const TrackBack *GetTrackBackFromSubpass(uint32_t subpass) const { |
| 959 | if (subpass == VK_SUBPASS_EXTERNAL) { |
John Zulauf | 22aefed | 2021-03-11 18:14:35 -0700 | [diff] [blame] | 960 | return src_external_; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 961 | } else { |
| 962 | assert(subpass < prev_by_subpass_.size()); |
| 963 | return prev_by_subpass_[subpass]; |
| 964 | } |
| 965 | } |
John Zulauf | 16adfc9 | 2020-04-08 10:28:33 -0600 | [diff] [blame] | 966 | |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 967 | bool ValidateLayoutTransitions(const CommandExecutionContext &ex_context, const RENDER_PASS_STATE &rp_state, |
John Zulauf | d0ec59f | 2021-03-13 14:25:08 -0700 | [diff] [blame] | 968 | const VkRect2D &render_area, uint32_t subpass, const AttachmentViewGenVector &attachment_views, |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 969 | CMD_TYPE cmd_type) const; |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 970 | bool ValidateLoadOperation(const CommandExecutionContext &ex_context, const RENDER_PASS_STATE &rp_state, |
John Zulauf | d0ec59f | 2021-03-13 14:25:08 -0700 | [diff] [blame] | 971 | const VkRect2D &render_area, uint32_t subpass, const AttachmentViewGenVector &attachment_views, |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 972 | CMD_TYPE cmd_type) const; |
| 973 | bool ValidateStoreOperation(const CommandExecutionContext &ex_context, const RENDER_PASS_STATE &rp_state, |
John Zulauf | d0ec59f | 2021-03-13 14:25:08 -0700 | [diff] [blame] | 974 | const VkRect2D &render_area, uint32_t subpass, const AttachmentViewGenVector &attachment_views, |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 975 | CMD_TYPE cmd_type) const; |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 976 | bool ValidateResolveOperations(const CommandExecutionContext &ex_context, const RENDER_PASS_STATE &rp_state, |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 977 | const VkRect2D &render_area, const AttachmentViewGenVector &attachment_views, CMD_TYPE cmd_type, |
| 978 | uint32_t subpass) const; |
John Zulauf | 1507ee4 | 2020-05-18 11:33:09 -0600 | [diff] [blame] | 979 | |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 980 | void SetStartTag(ResourceUsageTag tag) { start_tag_ = tag; } |
John Zulauf | 4a6105a | 2020-11-17 15:11:05 -0700 | [diff] [blame] | 981 | template <typename Action> |
| 982 | void ForAll(Action &&action); |
John Zulauf | f26fca9 | 2022-08-15 11:53:34 -0600 | [diff] [blame] | 983 | template <typename Action> |
| 984 | void ConstForAll(Action &&action) const; |
John Zulauf | 3da08bb | 2022-08-01 17:56:56 -0600 | [diff] [blame] | 985 | template <typename Predicate> |
| 986 | void EraseIf(Predicate &&pred); |
Jeremy Gebben | c4b78c5 | 2020-12-11 09:39:47 -0700 | [diff] [blame] | 987 | |
John Zulauf | ecf4ac5 | 2022-06-06 10:08:42 -0600 | [diff] [blame] | 988 | // For use during queue submit building up the QueueBatchContext AccessContext for validation, otherwise clear. |
John Zulauf | 06f6f1e | 2022-04-19 15:28:11 -0600 | [diff] [blame] | 989 | void AddAsyncContext(const AccessContext *context); |
| 990 | // For use during queue submit to avoid stale pointers; |
John Zulauf | 06f6f1e | 2022-04-19 15:28:11 -0600 | [diff] [blame] | 991 | void ClearAsyncContext(const AccessContext *context) { async_.clear(); } |
| 992 | |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 993 | private: |
| 994 | template <typename Detector> |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 995 | HazardResult DetectHazard(AccessAddressType type, Detector &detector, const ResourceAccessRange &range, |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 996 | DetectOptions options) const; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 997 | template <typename Detector> |
John Zulauf | 43cc746 | 2020-12-03 12:33:12 -0700 | [diff] [blame] | 998 | HazardResult DetectAsyncHazard(AccessAddressType type, const Detector &detector, const ResourceAccessRange &range) const; |
John Zulauf | 5f13a79 | 2020-03-10 07:31:21 -0600 | [diff] [blame] | 999 | template <typename Detector> |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 1000 | HazardResult DetectPreviousHazard(AccessAddressType type, Detector &detector, const ResourceAccessRange &range) const; |
John Zulauf | 8e3c3e9 | 2021-01-06 11:19:36 -0700 | [diff] [blame] | 1001 | void UpdateAccessState(AccessAddressType type, SyncStageAccessIndex current_usage, SyncOrdering ordering_rule, |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 1002 | const ResourceAccessRange &range, ResourceUsageTag tag); |
John Zulauf | b02c1eb | 2020-10-06 16:33:36 -0600 | [diff] [blame] | 1003 | |
| 1004 | MapArray access_state_maps_; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1005 | std::vector<TrackBack> prev_; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1006 | std::vector<TrackBack *> prev_by_subpass_; |
Jeremy Gebben | c4b78c5 | 2020-12-11 09:39:47 -0700 | [diff] [blame] | 1007 | std::vector<const AccessContext *> async_; |
John Zulauf | 22aefed | 2021-03-11 18:14:35 -0700 | [diff] [blame] | 1008 | TrackBack *src_external_; |
John Zulauf | e5da6e5 | 2020-03-18 15:32:18 -0600 | [diff] [blame] | 1009 | TrackBack dst_external_; |
Jeremy Gebben | c4b78c5 | 2020-12-11 09:39:47 -0700 | [diff] [blame] | 1010 | ResourceUsageTag start_tag_; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1011 | }; |
| 1012 | |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 1013 | struct SyncEventState { |
| 1014 | enum IgnoreReason { NotIgnored = 0, ResetWaitRace, Reset2WaitRace, SetRace, MissingStageBits, SetVsWait2, MissingSetEvent }; |
| 1015 | using EventPointer = std::shared_ptr<const EVENT_STATE>; |
| 1016 | using ScopeMap = ResourceAccessRangeMap; |
| 1017 | EventPointer event; |
| 1018 | CMD_TYPE last_command; // Only Event commands are valid here. |
| 1019 | ResourceUsageTag last_command_tag; // Needed to filter replay validation |
| 1020 | CMD_TYPE unsynchronized_set; |
| 1021 | VkPipelineStageFlags2KHR barriers; |
| 1022 | SyncExecScope scope; |
| 1023 | ResourceUsageTag first_scope_tag; |
| 1024 | bool destroyed; |
| 1025 | std::shared_ptr<const AccessContext> first_scope; |
| 1026 | |
| 1027 | SyncEventState() |
| 1028 | : event(), |
| 1029 | last_command(CMD_NONE), |
| 1030 | last_command_tag(0), |
| 1031 | unsynchronized_set(CMD_NONE), |
| 1032 | barriers(0U), |
| 1033 | scope(), |
| 1034 | first_scope_tag(), |
| 1035 | destroyed(true) {} |
| 1036 | |
| 1037 | SyncEventState(const SyncEventState &) = default; |
| 1038 | SyncEventState(SyncEventState &&) = default; |
| 1039 | |
| 1040 | SyncEventState(const SyncEventState::EventPointer &event_state) : SyncEventState() { |
| 1041 | event = event_state; |
| 1042 | destroyed = (event.get() == nullptr) || event_state->Destroyed(); |
| 1043 | } |
| 1044 | |
| 1045 | void ResetFirstScope(); |
| 1046 | const ScopeMap &FirstScope(AccessAddressType address_type) const { return first_scope->GetAccessStateMap(address_type); } |
| 1047 | IgnoreReason IsIgnoredByWait(CMD_TYPE cmd_type, VkPipelineStageFlags2KHR srcStageMask) const; |
| 1048 | bool HasBarrier(VkPipelineStageFlags2KHR stageMask, VkPipelineStageFlags2KHR exec_scope) const; |
| 1049 | }; |
| 1050 | |
| 1051 | class SyncEventsContext { |
| 1052 | public: |
| 1053 | using Map = layer_data::unordered_map<const EVENT_STATE *, std::shared_ptr<SyncEventState>>; |
| 1054 | using iterator = Map::iterator; |
| 1055 | using const_iterator = Map::const_iterator; |
| 1056 | |
| 1057 | SyncEventState *GetFromShared(const SyncEventState::EventPointer &event_state) { |
| 1058 | const auto find_it = map_.find(event_state.get()); |
| 1059 | if (find_it == map_.end()) { |
| 1060 | if (!event_state.get()) return nullptr; |
| 1061 | |
| 1062 | const auto *event_plain_ptr = event_state.get(); |
| 1063 | auto sync_state = std::make_shared<SyncEventState>(event_state); |
| 1064 | auto insert_pair = map_.emplace(event_plain_ptr, sync_state); |
| 1065 | return insert_pair.first->second.get(); |
| 1066 | } |
| 1067 | return find_it->second.get(); |
| 1068 | } |
| 1069 | |
| 1070 | const SyncEventState *Get(const EVENT_STATE *event_state) const { |
| 1071 | const auto find_it = map_.find(event_state); |
| 1072 | if (find_it == map_.end()) { |
| 1073 | return nullptr; |
| 1074 | } |
| 1075 | return find_it->second.get(); |
| 1076 | } |
| 1077 | const SyncEventState *Get(const SyncEventState::EventPointer &event_state) const { return Get(event_state.get()); } |
| 1078 | |
| 1079 | void ApplyBarrier(const SyncExecScope &src, const SyncExecScope &dst, ResourceUsageTag tag); |
| 1080 | void ApplyTaggedWait(VkQueueFlags queue_flags, ResourceUsageTag tag); |
| 1081 | |
| 1082 | // stl style naming for range-for support |
| 1083 | inline iterator begin() { return map_.begin(); } |
| 1084 | inline const_iterator begin() const { return map_.begin(); } |
| 1085 | inline iterator end() { return map_.end(); } |
| 1086 | inline const_iterator end() const { return map_.end(); } |
| 1087 | |
| 1088 | void Destroy(const EVENT_STATE *event_state) { |
| 1089 | auto sync_it = map_.find(event_state); |
| 1090 | if (sync_it != map_.end()) { |
| 1091 | sync_it->second->destroyed = true; |
| 1092 | map_.erase(sync_it); |
| 1093 | } |
| 1094 | } |
| 1095 | void Clear() { map_.clear(); } |
| 1096 | |
| 1097 | SyncEventsContext &DeepCopy(const SyncEventsContext &from); |
| 1098 | |
| 1099 | private: |
| 1100 | Map map_; |
| 1101 | }; |
| 1102 | |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1103 | class RenderPassAccessContext { |
| 1104 | public: |
John Zulauf | d0ec59f | 2021-03-13 14:25:08 -0700 | [diff] [blame] | 1105 | static AttachmentViewGenVector CreateAttachmentViewGen(const VkRect2D &render_area, |
| 1106 | const std::vector<const IMAGE_VIEW_STATE *> &attachment_views); |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 1107 | RenderPassAccessContext() : rp_state_(nullptr), render_area_(VkRect2D()), current_subpass_(0) {} |
| 1108 | RenderPassAccessContext(const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area, VkQueueFlags queue_flags, |
| 1109 | const std::vector<const IMAGE_VIEW_STATE *> &attachment_views, const AccessContext *external_context); |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1110 | |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 1111 | bool ValidateDrawSubpassAttachment(const CommandExecutionContext &ex_context, const CMD_BUFFER_STATE &cmd_buffer, |
| 1112 | CMD_TYPE cmd_type) const; |
| 1113 | void RecordDrawSubpassAttachment(const CMD_BUFFER_STATE &cmd_buffer, ResourceUsageTag tag); |
| 1114 | bool ValidateNextSubpass(const CommandExecutionContext &ex_context, CMD_TYPE cmd_type) const; |
| 1115 | bool ValidateEndRenderPass(const CommandExecutionContext &ex_context, CMD_TYPE cmd_type) const; |
| 1116 | bool ValidateFinalSubpassLayoutTransitions(const CommandExecutionContext &ex_context, CMD_TYPE cmd_type) const; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1117 | |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 1118 | void RecordLayoutTransitions(ResourceUsageTag tag); |
| 1119 | void RecordLoadOperations(ResourceUsageTag tag); |
John Zulauf | 41a9c7c | 2021-12-07 15:59:53 -0700 | [diff] [blame] | 1120 | void RecordBeginRenderPass(ResourceUsageTag tag, ResourceUsageTag load_tag); |
| 1121 | void RecordNextSubpass(ResourceUsageTag store_tag, ResourceUsageTag barrier_tag, ResourceUsageTag load_tag); |
| 1122 | void RecordEndRenderPass(AccessContext *external_context, ResourceUsageTag store_tag, ResourceUsageTag barrier_tag); |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1123 | |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 1124 | AccessContext &CurrentContext() { return subpass_contexts_[current_subpass_]; } |
| 1125 | const AccessContext &CurrentContext() const { return subpass_contexts_[current_subpass_]; } |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1126 | const std::vector<AccessContext> &GetContexts() const { return subpass_contexts_; } |
| 1127 | uint32_t GetCurrentSubpass() const { return current_subpass_; } |
| 1128 | const RENDER_PASS_STATE *GetRenderPassState() const { return rp_state_; } |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 1129 | AccessContext *CreateStoreResolveProxy() const; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1130 | |
| 1131 | private: |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1132 | const RENDER_PASS_STATE *rp_state_; |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 1133 | const VkRect2D render_area_; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1134 | uint32_t current_subpass_; |
| 1135 | std::vector<AccessContext> subpass_contexts_; |
John Zulauf | d0ec59f | 2021-03-13 14:25:08 -0700 | [diff] [blame] | 1136 | AttachmentViewGenVector attachment_views_; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1137 | }; |
| 1138 | |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 1139 | // Command execution context is the base class for command buffer and queue contexts |
| 1140 | // Preventing unintented leakage of subclass specific state, storing enough information |
| 1141 | // for message logging. |
| 1142 | // TODO: determine where to draw the design split for tag tracking (is there anything command to Queues and CB's) |
| 1143 | class CommandExecutionContext { |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1144 | public: |
John Zulauf | 3c788ef | 2022-02-22 12:12:30 -0700 | [diff] [blame] | 1145 | using AccessLog = std::vector<ResourceUsageRecord>; |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 1146 | CommandExecutionContext() : sync_state_(nullptr) {} |
John Zulauf | 3c788ef | 2022-02-22 12:12:30 -0700 | [diff] [blame] | 1147 | CommandExecutionContext(const SyncValidator *sync_validator) : sync_state_(sync_validator) {} |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 1148 | virtual ~CommandExecutionContext() = default; |
John Zulauf | bb89045 | 2021-12-14 11:30:18 -0700 | [diff] [blame] | 1149 | virtual AccessContext *GetCurrentAccessContext() = 0; |
| 1150 | virtual SyncEventsContext *GetCurrentEventsContext() = 0; |
| 1151 | virtual const AccessContext *GetCurrentAccessContext() const = 0; |
| 1152 | virtual const SyncEventsContext *GetCurrentEventsContext() const = 0; |
John Zulauf | 0011952 | 2022-05-23 19:07:42 -0600 | [diff] [blame] | 1153 | virtual QueueId GetQueueId() const = 0; |
John Zulauf | bb89045 | 2021-12-14 11:30:18 -0700 | [diff] [blame] | 1154 | |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 1155 | const SyncValidator &GetSyncState() const { |
| 1156 | assert(sync_state_); |
| 1157 | return *sync_state_; |
| 1158 | } |
John Zulauf | 3c788ef | 2022-02-22 12:12:30 -0700 | [diff] [blame] | 1159 | |
John Zulauf | 3c788ef | 2022-02-22 12:12:30 -0700 | [diff] [blame] | 1160 | ResourceUsageRange ImportRecordedAccessLog(const CommandBufferAccessContext &recorded_context); |
John Zulauf | 06f6f1e | 2022-04-19 15:28:11 -0600 | [diff] [blame] | 1161 | std::string FormatHazard(const HazardResult &hazard) const; |
| 1162 | |
John Zulauf | bb89045 | 2021-12-14 11:30:18 -0700 | [diff] [blame] | 1163 | virtual ResourceUsageTag GetTagLimit() const = 0; |
| 1164 | virtual VulkanTypedHandle Handle() const = 0; |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 1165 | virtual std::string FormatUsage(ResourceUsageTag tag) const = 0; |
John Zulauf | 3c788ef | 2022-02-22 12:12:30 -0700 | [diff] [blame] | 1166 | virtual void InsertRecordedAccessLogEntries(const CommandBufferAccessContext &cb_context) = 0; |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 1167 | |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 1168 | virtual HazardResult DetectFirstUseHazard(const ResourceUsageRange &tag_range) = 0; |
| 1169 | virtual void BeginRenderPassReplay(const SyncOpBeginRenderPass &begin_op, ResourceUsageTag tag) { |
| 1170 | assert("Must override if use by derived type is valid" == nullptr); |
| 1171 | } |
| 1172 | virtual void NextSubpassReplay() { assert("Must override if use by derived type is valid" == nullptr); } |
| 1173 | virtual void EndRenderPassReplay() { assert("Must override if use by derived type is valid" == nullptr); } |
| 1174 | |
John Zulauf | 0223f14 | 2022-07-06 09:05:39 -0600 | [diff] [blame] | 1175 | bool ValidForSyncOps() const; |
| 1176 | |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 1177 | protected: |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 1178 | class ReplayGuard { |
| 1179 | public: |
| 1180 | ReplayGuard(CommandExecutionContext &exec_context, const CommandBufferAccessContext &recorded_context) |
| 1181 | : exec_context_(exec_context) { |
| 1182 | exec_context_.BeginCommandBufferReplay(recorded_context); |
| 1183 | } |
| 1184 | ~ReplayGuard() { exec_context_.EndCommandBufferReplay(); } |
| 1185 | |
| 1186 | private: |
| 1187 | CommandExecutionContext &exec_context_; |
| 1188 | }; |
| 1189 | friend ReplayGuard; |
| 1190 | |
John Zulauf | 3c788ef | 2022-02-22 12:12:30 -0700 | [diff] [blame] | 1191 | const SyncValidator *sync_state_; |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 1192 | const CommandBufferAccessContext *current_replay_; |
| 1193 | |
| 1194 | private: |
| 1195 | // Only allow the replay guard to manage the begin/end |
| 1196 | void BeginCommandBufferReplay(const CommandBufferAccessContext &recorded) { current_replay_ = &recorded; } |
| 1197 | void EndCommandBufferReplay() { current_replay_ = nullptr; } |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 1198 | }; |
| 1199 | |
| 1200 | class CommandBufferAccessContext : public CommandExecutionContext { |
| 1201 | public: |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 1202 | using SyncOpPointer = std::shared_ptr<SyncOpBase>; |
| 1203 | struct SyncOpEntry { |
| 1204 | ResourceUsageTag tag; |
| 1205 | SyncOpPointer sync_op; |
| 1206 | SyncOpEntry(ResourceUsageTag tag_, SyncOpPointer &&sync_op_) : tag(tag_), sync_op(std::move(sync_op_)) {} |
| 1207 | SyncOpEntry() = default; |
| 1208 | SyncOpEntry(const SyncOpEntry &other) = default; |
| 1209 | }; |
| 1210 | |
John Zulauf | 3c788ef | 2022-02-22 12:12:30 -0700 | [diff] [blame] | 1211 | CommandBufferAccessContext(const SyncValidator *sync_validator = nullptr) |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 1212 | : CommandExecutionContext(sync_validator), |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 1213 | cb_state_(), |
| 1214 | queue_flags_(), |
| 1215 | destroyed_(false), |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 1216 | access_log_(), |
John Zulauf | 3c2a0b3 | 2021-07-14 11:14:52 -0600 | [diff] [blame] | 1217 | cbs_referenced_(), |
John Zulauf | faea0ee | 2021-01-14 14:01:32 -0700 | [diff] [blame] | 1218 | command_number_(0), |
| 1219 | subcommand_number_(0), |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1220 | reset_count_(0), |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1221 | cb_access_context_(), |
| 1222 | current_context_(&cb_access_context_), |
John Zulauf | 669dfd5 | 2021-01-27 17:15:28 -0700 | [diff] [blame] | 1223 | events_context_(), |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 1224 | render_pass_contexts_(), |
| 1225 | current_renderpass_context_(), |
| 1226 | sync_ops_() {} |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1227 | CommandBufferAccessContext(SyncValidator &sync_validator, std::shared_ptr<CMD_BUFFER_STATE> &cb_state, VkQueueFlags queue_flags) |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 1228 | : CommandBufferAccessContext(&sync_validator) { |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1229 | cb_state_ = cb_state; |
| 1230 | queue_flags_ = queue_flags; |
| 1231 | } |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 1232 | |
| 1233 | struct AsProxyContext {}; |
| 1234 | CommandBufferAccessContext(const CommandBufferAccessContext &real_context, AsProxyContext dummy); |
| 1235 | |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 1236 | ~CommandBufferAccessContext() override = default; |
| 1237 | CommandExecutionContext &GetExecutionContext() { return *this; } |
| 1238 | const CommandExecutionContext &GetExecutionContext() const { return *this; } |
John Zulauf | 5c5e88d | 2019-12-26 11:22:02 -0700 | [diff] [blame] | 1239 | |
| 1240 | void Reset() { |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 1241 | access_log_.clear(); |
John Zulauf | 3c2a0b3 | 2021-07-14 11:14:52 -0600 | [diff] [blame] | 1242 | cbs_referenced_.clear(); |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 1243 | sync_ops_.clear(); |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1244 | command_number_ = 0; |
John Zulauf | faea0ee | 2021-01-14 14:01:32 -0700 | [diff] [blame] | 1245 | subcommand_number_ = 0; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1246 | reset_count_++; |
| 1247 | cb_access_context_.Reset(); |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1248 | render_pass_contexts_.clear(); |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1249 | current_context_ = &cb_access_context_; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1250 | current_renderpass_context_ = nullptr; |
John Zulauf | 669dfd5 | 2021-01-27 17:15:28 -0700 | [diff] [blame] | 1251 | events_context_.Clear(); |
John Zulauf | 5c5e88d | 2019-12-26 11:22:02 -0700 | [diff] [blame] | 1252 | } |
John Zulauf | e7f6a5e | 2021-01-16 14:31:18 -0700 | [diff] [blame] | 1253 | void MarkDestroyed() { destroyed_ = true; } |
| 1254 | bool IsDestroyed() const { return destroyed_; } |
John Zulauf | 5c5e88d | 2019-12-26 11:22:02 -0700 | [diff] [blame] | 1255 | |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 1256 | std::string FormatUsage(ResourceUsageTag tag) const override; |
John Zulauf | 06f6f1e | 2022-04-19 15:28:11 -0600 | [diff] [blame] | 1257 | std::string FormatUsage(const ResourceFirstAccess &access) const; // Only command buffers have "first usage" |
John Zulauf | bb89045 | 2021-12-14 11:30:18 -0700 | [diff] [blame] | 1258 | AccessContext *GetCurrentAccessContext() override { return current_context_; } |
| 1259 | SyncEventsContext *GetCurrentEventsContext() override { return &events_context_; } |
| 1260 | const AccessContext *GetCurrentAccessContext() const override { return current_context_; } |
| 1261 | const SyncEventsContext *GetCurrentEventsContext() const override { return &events_context_; } |
John Zulauf | 0011952 | 2022-05-23 19:07:42 -0600 | [diff] [blame] | 1262 | QueueId GetQueueId() const override; |
John Zulauf | bb89045 | 2021-12-14 11:30:18 -0700 | [diff] [blame] | 1263 | |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 1264 | RenderPassAccessContext *GetCurrentRenderPassContext() { return current_renderpass_context_; } |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 1265 | const RenderPassAccessContext *GetCurrentRenderPassContext() const { return current_renderpass_context_; } |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 1266 | ResourceUsageTag RecordBeginRenderPass(CMD_TYPE cmd_type, const RENDER_PASS_STATE &rp_state, const VkRect2D &render_area, |
John Zulauf | 41a9c7c | 2021-12-07 15:59:53 -0700 | [diff] [blame] | 1267 | const std::vector<const IMAGE_VIEW_STATE *> &attachment_views); |
John Zulauf | d511570 | 2021-01-18 12:34:33 -0700 | [diff] [blame] | 1268 | |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 1269 | bool ValidateDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint, CMD_TYPE cmd_type) const; |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 1270 | void RecordDispatchDrawDescriptorSet(VkPipelineBindPoint pipelineBindPoint, ResourceUsageTag tag); |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 1271 | bool ValidateDrawVertex(uint32_t vertexCount, uint32_t firstVertex, CMD_TYPE cmd_type) const; |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 1272 | void RecordDrawVertex(uint32_t vertexCount, uint32_t firstVertex, ResourceUsageTag tag); |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 1273 | bool ValidateDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, CMD_TYPE cmd_type) const; |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 1274 | void RecordDrawVertexIndex(uint32_t indexCount, uint32_t firstIndex, ResourceUsageTag tag); |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 1275 | bool ValidateDrawSubpassAttachment(CMD_TYPE cmd_type) const; |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 1276 | void RecordDrawSubpassAttachment(ResourceUsageTag tag); |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 1277 | ResourceUsageTag RecordNextSubpass(CMD_TYPE cmd_type); |
| 1278 | ResourceUsageTag RecordEndRenderPass(CMD_TYPE cmd_type); |
John Zulauf | 4a6105a | 2020-11-17 15:11:05 -0700 | [diff] [blame] | 1279 | void RecordDestroyEvent(VkEvent event); |
John Zulauf | 49beb11 | 2020-11-04 16:06:31 -0700 | [diff] [blame] | 1280 | |
John Zulauf | 0223f14 | 2022-07-06 09:05:39 -0600 | [diff] [blame] | 1281 | bool ValidateFirstUse(CommandExecutionContext &exec_context, const char *func_name, uint32_t index) const; |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 1282 | void RecordExecutedCommandBuffer(const CommandBufferAccessContext &recorded_context); |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 1283 | void ResolveExecutedCommandBuffer(const AccessContext &recorded_context, ResourceUsageTag offset); |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 1284 | |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 1285 | HazardResult DetectFirstUseHazard(const ResourceUsageRange &tag_range) override; |
| 1286 | |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1287 | const CMD_BUFFER_STATE *GetCommandBufferState() const { return cb_state_.get(); } |
| 1288 | VkQueueFlags GetQueueFlags() const { return queue_flags_; } |
John Zulauf | faea0ee | 2021-01-14 14:01:32 -0700 | [diff] [blame] | 1289 | |
John Zulauf | 41a9c7c | 2021-12-07 15:59:53 -0700 | [diff] [blame] | 1290 | ResourceUsageTag NextSubcommandTag(CMD_TYPE command, ResourceUsageRecord::SubcommandType subcommand); |
John Zulauf | bb89045 | 2021-12-14 11:30:18 -0700 | [diff] [blame] | 1291 | ResourceUsageTag GetTagLimit() const override { return access_log_.size(); } |
| 1292 | VulkanTypedHandle Handle() const override { |
| 1293 | if (cb_state_) { |
| 1294 | return cb_state_->Handle(); |
| 1295 | } |
| 1296 | return VulkanTypedHandle(static_cast<VkCommandBuffer>(VK_NULL_HANDLE), kVulkanObjectTypeCommandBuffer); |
| 1297 | } |
John Zulauf | faea0ee | 2021-01-14 14:01:32 -0700 | [diff] [blame] | 1298 | |
John Zulauf | 41a9c7c | 2021-12-07 15:59:53 -0700 | [diff] [blame] | 1299 | ResourceUsageTag NextCommandTag(CMD_TYPE command, |
| 1300 | ResourceUsageRecord::SubcommandType subcommand = ResourceUsageRecord::SubcommandType::kNone); |
| 1301 | ResourceUsageTag NextIndexedCommandTag(CMD_TYPE command, uint32_t index); |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1302 | |
John Zulauf | 3c788ef | 2022-02-22 12:12:30 -0700 | [diff] [blame] | 1303 | std::shared_ptr<const CMD_BUFFER_STATE> GetCBStateShared() const { return cb_state_; } |
| 1304 | |
John Zulauf | faea0ee | 2021-01-14 14:01:32 -0700 | [diff] [blame] | 1305 | const CMD_BUFFER_STATE &GetCBState() const { |
| 1306 | assert(cb_state_); |
| 1307 | return *(cb_state_.get()); |
| 1308 | } |
| 1309 | CMD_BUFFER_STATE &GetCBState() { |
| 1310 | assert(cb_state_); |
| 1311 | return *(cb_state_.get()); |
| 1312 | } |
John Zulauf | faea0ee | 2021-01-14 14:01:32 -0700 | [diff] [blame] | 1313 | |
John Zulauf | 1bf3052 | 2021-09-03 15:39:06 -0600 | [diff] [blame] | 1314 | template <class T, class... Args> |
| 1315 | void RecordSyncOp(Args &&...args) { |
| 1316 | // T must be as derived from SyncOpBase or the compiler will flag the next line as an error. |
| 1317 | SyncOpPointer sync_op(std::make_shared<T>(std::forward<Args>(args)...)); |
John Zulauf | bb89045 | 2021-12-14 11:30:18 -0700 | [diff] [blame] | 1318 | RecordSyncOp(std::move(sync_op)); // Call the non-template version |
John Zulauf | 1bf3052 | 2021-09-03 15:39:06 -0600 | [diff] [blame] | 1319 | } |
John Zulauf | 3c788ef | 2022-02-22 12:12:30 -0700 | [diff] [blame] | 1320 | const AccessLog &GetAccessLog() const { return access_log_; } |
| 1321 | void InsertRecordedAccessLogEntries(const CommandBufferAccessContext &cb_context) override; |
John Zulauf | 06f6f1e | 2022-04-19 15:28:11 -0600 | [diff] [blame] | 1322 | const std::vector<SyncOpEntry> &GetSyncOps() const { return sync_ops_; }; |
John Zulauf | 8eda156 | 2021-04-13 17:06:41 -0600 | [diff] [blame] | 1323 | |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1324 | private: |
John Zulauf | bb89045 | 2021-12-14 11:30:18 -0700 | [diff] [blame] | 1325 | // As this is passing around a shared pointer to record, move to avoid needless atomics. |
| 1326 | void RecordSyncOp(SyncOpPointer &&sync_op); |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 1327 | std::shared_ptr<CMD_BUFFER_STATE> cb_state_; |
| 1328 | VkQueueFlags queue_flags_; |
| 1329 | bool destroyed_; |
| 1330 | |
John Zulauf | 3c788ef | 2022-02-22 12:12:30 -0700 | [diff] [blame] | 1331 | AccessLog access_log_; |
John Zulauf | 3c2a0b3 | 2021-07-14 11:14:52 -0600 | [diff] [blame] | 1332 | layer_data::unordered_set<std::shared_ptr<const CMD_BUFFER_STATE>> cbs_referenced_; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1333 | uint32_t command_number_; |
John Zulauf | faea0ee | 2021-01-14 14:01:32 -0700 | [diff] [blame] | 1334 | uint32_t subcommand_number_; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1335 | uint32_t reset_count_; |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 1336 | |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1337 | AccessContext cb_access_context_; |
John Zulauf | 540266b | 2020-04-06 18:54:53 -0600 | [diff] [blame] | 1338 | AccessContext *current_context_; |
John Zulauf | 669dfd5 | 2021-01-27 17:15:28 -0700 | [diff] [blame] | 1339 | SyncEventsContext events_context_; |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 1340 | |
| 1341 | // Don't need the following for an active proxy cb context |
John Zulauf | ab84f24 | 2022-08-04 18:38:40 -0600 | [diff] [blame] | 1342 | std::vector<std::unique_ptr<RenderPassAccessContext>> render_pass_contexts_; |
John Zulauf | 4fa6846 | 2021-04-26 21:04:22 -0600 | [diff] [blame] | 1343 | RenderPassAccessContext *current_renderpass_context_; |
| 1344 | std::vector<SyncOpEntry> sync_ops_; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 1345 | }; |
| 1346 | |
John Zulauf | 697c0e1 | 2022-04-19 16:31:12 -0600 | [diff] [blame] | 1347 | class QueueSyncState; |
| 1348 | |
| 1349 | // Store the ResourceUsageRecords for the global tag range. The prev_ field allows for |
| 1350 | // const Validation phase access from the cmd state "overlay" seamlessly. |
| 1351 | class AccessLogger { |
| 1352 | public: |
| 1353 | struct BatchRecord { |
| 1354 | BatchRecord() = default; |
| 1355 | BatchRecord(const BatchRecord &other) = default; |
| 1356 | BatchRecord(BatchRecord &&other) = default; |
| 1357 | BatchRecord(const QueueSyncState *q, uint64_t submit, uint32_t batch) |
| 1358 | : queue(q), submit_index(submit), batch_index(batch) {} |
| 1359 | BatchRecord &operator=(const BatchRecord &other) = default; |
| 1360 | const QueueSyncState *queue; |
| 1361 | uint64_t submit_index; |
| 1362 | uint32_t batch_index; |
| 1363 | }; |
| 1364 | |
| 1365 | struct AccessRecord { |
| 1366 | const BatchRecord *batch; |
| 1367 | const ResourceUsageRecord *record; |
| 1368 | bool IsValid() const { return batch && record; } |
| 1369 | }; |
| 1370 | |
| 1371 | // BatchLog lookup is batch relative, thus the batch doesn't need to track it's offset |
| 1372 | class BatchLog { |
| 1373 | public: |
| 1374 | BatchLog() = default; |
| 1375 | BatchLog(const BatchLog &batch) = default; |
| 1376 | BatchLog(BatchLog &&other) = default; |
| 1377 | BatchLog &operator=(const BatchLog &other) = default; |
| 1378 | BatchLog &operator=(BatchLog &&other) = default; |
| 1379 | BatchLog(const BatchRecord &batch) : batch_(batch) {} |
| 1380 | |
| 1381 | size_t Size() const { return log_.size(); } |
| 1382 | const BatchRecord &GetBatch() const { return batch_; } |
| 1383 | AccessRecord operator[](size_t index) const; |
| 1384 | |
| 1385 | void Append(const CommandExecutionContext::AccessLog &other); |
| 1386 | |
| 1387 | private: |
| 1388 | BatchRecord batch_; |
| 1389 | layer_data::unordered_set<std::shared_ptr<const CMD_BUFFER_STATE>> cbs_referenced_; |
| 1390 | CommandExecutionContext::AccessLog log_; |
| 1391 | }; |
| 1392 | |
| 1393 | using AccessLogRangeMap = sparse_container::range_map<ResourceUsageTag, BatchLog>; |
| 1394 | |
| 1395 | AccessLogger(const AccessLogger *prev = nullptr) : prev_(prev) {} |
| 1396 | // AccessLogger lookup is based on global tags |
| 1397 | AccessRecord operator[](ResourceUsageTag tag) const; |
| 1398 | BatchLog *AddBatch(const QueueSyncState *queue_state, uint64_t submit_id, uint32_t batch_id, const ResourceUsageRange &range); |
| 1399 | void MergeMove(AccessLogger &&child); |
| 1400 | void Reset(); |
| 1401 | |
| 1402 | private: |
| 1403 | const AccessLogger *prev_; |
| 1404 | AccessLogRangeMap access_log_map_; |
| 1405 | }; |
| 1406 | |
John Zulauf | 697c0e1 | 2022-04-19 16:31:12 -0600 | [diff] [blame] | 1407 | class QueueBatchContext : public CommandExecutionContext { |
| 1408 | public: |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 1409 | struct RenderPassReplayState { |
| 1410 | // A minimal subset of the functionality present in the RenderPassAccessContext. Since the accesses are recorded in the |
| 1411 | // first_use information of the recorded access contexts, s.t. all we need to support is the barrier/resolve operations |
| 1412 | RenderPassReplayState() { Reset(); } |
| 1413 | AccessContext *Begin(VkQueueFlags queue_flags, const SyncOpBeginRenderPass &begin_op_, |
| 1414 | const AccessContext &external_context); |
| 1415 | AccessContext *Next(); |
| 1416 | void End(AccessContext &external_context); |
| 1417 | |
| 1418 | const SyncOpBeginRenderPass *begin_op = nullptr; |
| 1419 | const AccessContext *replay_context = nullptr; |
| 1420 | uint32_t subpass = VK_SUBPASS_EXTERNAL; |
| 1421 | std::vector<AccessContext> subpass_contexts; |
| 1422 | void Reset() { |
| 1423 | begin_op = nullptr; |
| 1424 | replay_context = nullptr; |
| 1425 | subpass = VK_SUBPASS_EXTERNAL; |
| 1426 | subpass_contexts.clear(); |
| 1427 | } |
| 1428 | operator bool() const { return begin_op != nullptr; } |
| 1429 | }; |
| 1430 | |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 1431 | using ConstBatchSet = layer_data::unordered_set<std::shared_ptr<const QueueBatchContext>>; |
| 1432 | using BatchSet = layer_data::unordered_set<std::shared_ptr<QueueBatchContext>>; |
| 1433 | static constexpr bool TruePred(const std::shared_ptr<const QueueBatchContext> &) { return true; } |
John Zulauf | 697c0e1 | 2022-04-19 16:31:12 -0600 | [diff] [blame] | 1434 | struct CmdBufferEntry { |
| 1435 | uint32_t index = 0; |
| 1436 | std::shared_ptr<const CommandBufferAccessContext> cb; |
| 1437 | CmdBufferEntry(uint32_t index_, std::shared_ptr<const CommandBufferAccessContext> &&cb_) |
| 1438 | : index(index_), cb(std::move(cb_)) {} |
| 1439 | }; |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 1440 | |
John Zulauf | 697c0e1 | 2022-04-19 16:31:12 -0600 | [diff] [blame] | 1441 | using CommandBuffers = std::vector<CmdBufferEntry>; |
| 1442 | |
John Zulauf | a8700a5 | 2022-08-18 16:22:08 -0600 | [diff] [blame] | 1443 | QueueBatchContext(const SyncValidator &sync_state, const QueueSyncState &queue_state); |
| 1444 | QueueBatchContext() = delete; |
| 1445 | |
John Zulauf | 697c0e1 | 2022-04-19 16:31:12 -0600 | [diff] [blame] | 1446 | std::string FormatUsage(ResourceUsageTag tag) const override; |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 1447 | AccessContext *GetCurrentAccessContext() override { return current_access_context_; } |
| 1448 | const AccessContext *GetCurrentAccessContext() const override { return current_access_context_; } |
John Zulauf | 697c0e1 | 2022-04-19 16:31:12 -0600 | [diff] [blame] | 1449 | SyncEventsContext *GetCurrentEventsContext() override { return &events_context_; } |
| 1450 | const SyncEventsContext *GetCurrentEventsContext() const override { return &events_context_; } |
| 1451 | const QueueSyncState *GetQueueSyncState() const { return queue_state_; } |
| 1452 | VkQueueFlags GetQueueFlags() const; |
John Zulauf | 0011952 | 2022-05-23 19:07:42 -0600 | [diff] [blame] | 1453 | QueueId GetQueueId() const override; |
John Zulauf | 697c0e1 | 2022-04-19 16:31:12 -0600 | [diff] [blame] | 1454 | |
| 1455 | void SetBatchLog(AccessLogger &loggger, uint64_t sumbit_id, uint32_t batch_id); |
| 1456 | void ResetAccessLog() { |
| 1457 | logger_ = nullptr; |
| 1458 | batch_log_ = nullptr; |
| 1459 | } |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 1460 | void ResetEventsContext() { events_context_.Clear(); } |
John Zulauf | 697c0e1 | 2022-04-19 16:31:12 -0600 | [diff] [blame] | 1461 | ResourceUsageTag GetTagLimit() const override { return batch_log_->Size() + tag_range_.begin; } |
| 1462 | // begin is the tag bias / .size() is the number of total records that should eventually be in access_log_ |
| 1463 | ResourceUsageRange GetTagRange() const { return tag_range_; } |
| 1464 | void InsertRecordedAccessLogEntries(const CommandBufferAccessContext &cb_context) override; |
| 1465 | |
| 1466 | void SetTagBias(ResourceUsageTag); |
John Zulauf | a8700a5 | 2022-08-18 16:22:08 -0600 | [diff] [blame] | 1467 | void SetupAccessContext(const std::shared_ptr<const QueueBatchContext> &prev, const VkSubmitInfo2 &submit_info, |
| 1468 | SignaledSemaphores &signaled_semaphores); |
| 1469 | void SetupCommandBufferInfo(const VkSubmitInfo2 &submit_info); |
John Zulauf | 697c0e1 | 2022-04-19 16:31:12 -0600 | [diff] [blame] | 1470 | |
John Zulauf | a8700a5 | 2022-08-18 16:22:08 -0600 | [diff] [blame] | 1471 | bool DoQueueSubmitValidate(const SyncValidator &sync_state, QueueSubmitCmdState &cmd_state, const VkSubmitInfo2 &submit_info); |
John Zulauf | cb7e167 | 2022-05-04 13:46:08 -0600 | [diff] [blame] | 1472 | |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 1473 | void ResolveSubmittedCommandBuffer(const AccessContext &recorded_context, ResourceUsageTag offset); |
| 1474 | |
John Zulauf | 697c0e1 | 2022-04-19 16:31:12 -0600 | [diff] [blame] | 1475 | VulkanTypedHandle Handle() const override; |
| 1476 | |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 1477 | void ApplyTaggedWait(QueueId queue_id, ResourceUsageTag tag); |
| 1478 | void ApplyDeviceWait(); |
| 1479 | |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 1480 | HazardResult DetectFirstUseHazard(const ResourceUsageRange &tag_range) override; |
| 1481 | void BeginRenderPassReplay(const SyncOpBeginRenderPass &begin_op, ResourceUsageTag tag) override; |
| 1482 | void NextSubpassReplay() override; |
| 1483 | void EndRenderPassReplay() override; |
| 1484 | |
John Zulauf | 697c0e1 | 2022-04-19 16:31:12 -0600 | [diff] [blame] | 1485 | private: |
John Zulauf | ecf4ac5 | 2022-06-06 10:08:42 -0600 | [diff] [blame] | 1486 | std::shared_ptr<QueueBatchContext> ResolveOneWaitSemaphore(VkSemaphore sem, VkPipelineStageFlags2 wait_mask, |
| 1487 | SignaledSemaphores &signaled); |
John Zulauf | 697c0e1 | 2022-04-19 16:31:12 -0600 | [diff] [blame] | 1488 | |
| 1489 | const QueueSyncState *queue_state_ = nullptr; |
| 1490 | ResourceUsageRange tag_range_ = ResourceUsageRange(0, 0); // Range of tags referenced by cbs_referenced |
| 1491 | |
| 1492 | AccessContext access_context_; |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 1493 | AccessContext *current_access_context_; |
John Zulauf | 697c0e1 | 2022-04-19 16:31:12 -0600 | [diff] [blame] | 1494 | SyncEventsContext events_context_; |
| 1495 | |
| 1496 | // Clear these after validation and import |
| 1497 | CommandBuffers command_buffers_; |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 1498 | ConstBatchSet async_batches_; |
John Zulauf | 697c0e1 | 2022-04-19 16:31:12 -0600 | [diff] [blame] | 1499 | // When null use the global logger |
| 1500 | AccessLogger *logger_ = nullptr; |
| 1501 | AccessLogger::BatchLog *batch_log_ = nullptr; |
John Zulauf | dab327f | 2022-07-08 12:02:05 -0600 | [diff] [blame] | 1502 | RenderPassReplayState rp_replay_; |
John Zulauf | 697c0e1 | 2022-04-19 16:31:12 -0600 | [diff] [blame] | 1503 | }; |
John Zulauf | bbda457 | 2022-04-19 16:20:45 -0600 | [diff] [blame] | 1504 | |
| 1505 | class QueueSyncState { |
| 1506 | public: |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 1507 | constexpr static QueueId kQueueIdBase = QueueId(0); |
| 1508 | constexpr static QueueId kQueueIdInvalid = ~kQueueIdBase; |
| 1509 | QueueSyncState(const std::shared_ptr<QUEUE_STATE> &queue_state, VkQueueFlags queue_flags, QueueId id) |
| 1510 | : submit_index_(0), queue_state_(queue_state), last_batch_(), queue_flags_(queue_flags), id_(id) {} |
John Zulauf | bbda457 | 2022-04-19 16:20:45 -0600 | [diff] [blame] | 1511 | |
| 1512 | VulkanTypedHandle Handle() const { |
| 1513 | if (queue_state_) { |
| 1514 | return queue_state_->Handle(); |
| 1515 | } |
| 1516 | return VulkanTypedHandle(static_cast<VkQueue>(VK_NULL_HANDLE), kVulkanObjectTypeQueue); |
| 1517 | } |
| 1518 | std::shared_ptr<const QueueBatchContext> LastBatch() const { return last_batch_; } |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 1519 | std::shared_ptr<QueueBatchContext> LastBatch() { return last_batch_; } |
John Zulauf | 697c0e1 | 2022-04-19 16:31:12 -0600 | [diff] [blame] | 1520 | void SetLastBatch(std::shared_ptr<QueueBatchContext> &&last); |
John Zulauf | bbda457 | 2022-04-19 16:20:45 -0600 | [diff] [blame] | 1521 | QUEUE_STATE *GetQueueState() { return queue_state_.get(); } |
| 1522 | const QUEUE_STATE *GetQueueState() const { return queue_state_.get(); } |
| 1523 | VkQueueFlags GetQueueFlags() const { return queue_flags_; } |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 1524 | QueueId GetQueueId() const { return id_; } |
John Zulauf | bbda457 | 2022-04-19 16:20:45 -0600 | [diff] [blame] | 1525 | |
John Zulauf | 697c0e1 | 2022-04-19 16:31:12 -0600 | [diff] [blame] | 1526 | uint64_t ReserveSubmitId() const; // Method is const but updates mutable sumbit_index atomically. |
| 1527 | |
John Zulauf | bbda457 | 2022-04-19 16:20:45 -0600 | [diff] [blame] | 1528 | private: |
| 1529 | mutable std::atomic<uint64_t> submit_index_; |
| 1530 | std::shared_ptr<QUEUE_STATE> queue_state_; |
| 1531 | std::shared_ptr<QueueBatchContext> last_batch_; |
| 1532 | const VkQueueFlags queue_flags_; |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 1533 | QueueId id_; |
John Zulauf | bbda457 | 2022-04-19 16:20:45 -0600 | [diff] [blame] | 1534 | }; |
| 1535 | |
John Zulauf | a8700a5 | 2022-08-18 16:22:08 -0600 | [diff] [blame] | 1536 | // The converter needs to be more complex than simply an array of VkSubmitInfo2 structures. |
| 1537 | // In order to convert from Info->Info2, arrays of VkSemaphoreSubmitInfo and VkCommandBufferSubmitInfo |
| 1538 | // structures must be created for the pWaitSemaphoreInfos, pCommandBufferInfos, and pSignalSemaphoreInfos |
| 1539 | // which comprise the converted VkSubmitInfo information. The created VkSubmitInfo2 structure then references the storage |
| 1540 | // of the arrays, which must have a lifespan longer than the conversion, s.t. the ensuing valdation/record operations |
| 1541 | // can reference them. The resulting VkSubmitInfo2 is then copied into an additional which takes the place of the pSubmits |
| 1542 | // parameter. |
| 1543 | struct SubmitInfoConverter { |
| 1544 | struct BatchStore { |
| 1545 | BatchStore(const VkSubmitInfo &info); |
| 1546 | |
| 1547 | static VkSemaphoreSubmitInfo WaitSemaphore(const VkSubmitInfo &info, uint32_t index); |
| 1548 | static VkCommandBufferSubmitInfo CommandBuffer(const VkSubmitInfo &info, uint32_t index); |
| 1549 | static VkSemaphoreSubmitInfo SignalSemaphore(const VkSubmitInfo &info, uint32_t index); |
| 1550 | |
| 1551 | std::vector<VkSemaphoreSubmitInfo> waits; |
| 1552 | std::vector<VkCommandBufferSubmitInfo> cbs; |
| 1553 | std::vector<VkSemaphoreSubmitInfo> signals; |
| 1554 | VkSubmitInfo2 info2; |
| 1555 | }; |
| 1556 | |
| 1557 | SubmitInfoConverter(uint32_t count, const VkSubmitInfo *infos); |
| 1558 | |
| 1559 | std::vector<BatchStore> info_store; |
| 1560 | std::vector<VkSubmitInfo2> info2s; |
| 1561 | }; |
| 1562 | |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 1563 | class SyncValidator : public ValidationStateTracker, public SyncStageAccess { |
| 1564 | public: |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 1565 | using StateTracker = ValidationStateTracker; |
John Zulauf | ea943c5 | 2022-02-22 11:05:17 -0700 | [diff] [blame] | 1566 | SyncValidator() { container_type = LayerObjectTypeSyncValidation; } |
John Zulauf | 888bb9d | 2022-05-20 16:13:00 -0600 | [diff] [blame] | 1567 | virtual ~SyncValidator() { ResetCommandBufferCallbacks(); }; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 1568 | |
John Zulauf | 697c0e1 | 2022-04-19 16:31:12 -0600 | [diff] [blame] | 1569 | // Global tag range for submitted command buffers resource usage logs |
| 1570 | mutable std::atomic<ResourceUsageTag> tag_limit_{0}; // This is reserved in Validation phase, thus mutable and atomic |
| 1571 | ResourceUsageRange ReserveGlobalTagRange(size_t tag_count) const; // Note that the tag_limit_ is mutable this has side effects |
| 1572 | // This is a snapshot value only |
John Zulauf | 697c0e1 | 2022-04-19 16:31:12 -0600 | [diff] [blame] | 1573 | AccessLogger global_access_log_; |
| 1574 | |
John Zulauf | ea943c5 | 2022-02-22 11:05:17 -0700 | [diff] [blame] | 1575 | layer_data::unordered_map<VkCommandBuffer, std::shared_ptr<CommandBufferAccessContext>> cb_access_state; |
John Zulauf | e7f6a5e | 2021-01-16 14:31:18 -0700 | [diff] [blame] | 1576 | |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 1577 | using QueueSyncStatesMap = layer_data::unordered_map<VkQueue, std::shared_ptr<QueueSyncState>>; |
John Zulauf | bbda457 | 2022-04-19 16:20:45 -0600 | [diff] [blame] | 1578 | layer_data::unordered_map<VkQueue, std::shared_ptr<QueueSyncState>> queue_sync_states_; |
John Zulauf | cb7e167 | 2022-05-04 13:46:08 -0600 | [diff] [blame] | 1579 | SignaledSemaphores signaled_semaphores_; |
John Zulauf | e7f6a5e | 2021-01-16 14:31:18 -0700 | [diff] [blame] | 1580 | |
John Zulauf | 3da08bb | 2022-08-01 17:56:56 -0600 | [diff] [blame] | 1581 | using SignaledFences = layer_data::unordered_map<VkFence, FenceSyncState>; |
| 1582 | using SignaledFence = SignaledFences::value_type; |
| 1583 | SignaledFences waitable_fences_; |
| 1584 | |
| 1585 | void ApplyTaggedWait(QueueId queue_id, ResourceUsageTag tag); |
| 1586 | |
| 1587 | void UpdateFenceWaitInfo(VkFence fence, QueueId queue_id, ResourceUsageTag tag); |
| 1588 | void WaitForFence(VkFence fence); |
| 1589 | |
John Zulauf | bbda457 | 2022-04-19 16:20:45 -0600 | [diff] [blame] | 1590 | const QueueSyncState *GetQueueSyncState(VkQueue queue) const; |
| 1591 | QueueSyncState *GetQueueSyncState(VkQueue queue); |
| 1592 | std::shared_ptr<const QueueSyncState> GetQueueSyncStateShared(VkQueue queue) const; |
| 1593 | std::shared_ptr<QueueSyncState> GetQueueSyncStateShared(VkQueue queue); |
| 1594 | |
John Zulauf | e0757ba | 2022-06-10 16:51:45 -0600 | [diff] [blame] | 1595 | QueueBatchContext::BatchSet GetQueueBatchSnapshot(); |
| 1596 | |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 1597 | template <typename Predicate> |
| 1598 | QueueBatchContext::ConstBatchSet GetQueueLastBatchSnapshot(Predicate &&pred) const; |
| 1599 | QueueBatchContext::ConstBatchSet GetQueueLastBatchSnapshot() const { |
| 1600 | return GetQueueLastBatchSnapshot(QueueBatchContext::TruePred); |
| 1601 | }; |
| 1602 | |
| 1603 | template <typename Predicate> |
| 1604 | QueueBatchContext::BatchSet GetQueueLastBatchSnapshot(Predicate &&pred); |
| 1605 | QueueBatchContext::BatchSet GetQueueLastBatchSnapshot() { return GetQueueLastBatchSnapshot(QueueBatchContext::TruePred); }; |
John Zulauf | bbda457 | 2022-04-19 16:20:45 -0600 | [diff] [blame] | 1606 | |
| 1607 | std::shared_ptr<CommandBufferAccessContext> AccessContextFactory(VkCommandBuffer command_buffer); |
John Zulauf | ea943c5 | 2022-02-22 11:05:17 -0700 | [diff] [blame] | 1608 | CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer); |
| 1609 | CommandBufferAccessContext *GetAccessContextNoInsert(VkCommandBuffer command_buffer); |
| 1610 | const CommandBufferAccessContext *GetAccessContext(VkCommandBuffer command_buffer) const; |
| 1611 | std::shared_ptr<CommandBufferAccessContext> GetAccessContextShared(VkCommandBuffer command_buffer); |
| 1612 | std::shared_ptr<const CommandBufferAccessContext> GetAccessContextShared(VkCommandBuffer command_buffer) const; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 1613 | |
John Zulauf | d1f85d4 | 2020-04-15 12:23:15 -0600 | [diff] [blame] | 1614 | void ResetCommandBufferCallback(VkCommandBuffer command_buffer); |
| 1615 | void FreeCommandBufferCallback(VkCommandBuffer command_buffer); |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1616 | void RecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 1617 | const VkSubpassBeginInfo *pSubpassBeginInfo, CMD_TYPE cmd_type); |
John Zulauf | 64ffe55 | 2021-02-06 10:25:07 -0700 | [diff] [blame] | 1618 | void RecordCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo, |
sfricke-samsung | 85584a7 | 2021-09-30 21:43:38 -0700 | [diff] [blame] | 1619 | const VkSubpassEndInfo *pSubpassEndInfo, CMD_TYPE command); |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 1620 | void RecordCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo, CMD_TYPE cmd_type); |
John Zulauf | 33fc1d5 | 2020-07-17 11:01:10 -0600 | [diff] [blame] | 1621 | bool SupressedBoundDescriptorWAW(const HazardResult &hazard) const; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 1622 | |
Jeremy Gebben | 36a3b83 | 2022-03-23 10:54:18 -0600 | [diff] [blame] | 1623 | void CreateDevice(const VkDeviceCreateInfo *pCreateInfo) override; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 1624 | |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1625 | bool ValidateBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 1626 | const VkSubpassBeginInfo *pSubpassBeginInfo, CMD_TYPE cmd_type) const; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1627 | |
| 1628 | bool PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1629 | VkSubpassContents contents) const override; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1630 | |
| 1631 | bool PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1632 | const VkSubpassBeginInfo *pSubpassBeginInfo) const override; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1633 | |
| 1634 | bool PreCallValidateCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1635 | const VkSubpassBeginInfo *pSubpassBeginInfo) const override; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1636 | |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 1637 | bool PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1638 | const VkBufferCopy *pRegions) const override; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 1639 | |
| 1640 | void PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1641 | const VkBufferCopy *pRegions) override; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 1642 | |
John Zulauf | 4a6105a | 2020-11-17 15:11:05 -0700 | [diff] [blame] | 1643 | void PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) override; |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1644 | bool PreCallValidateCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR *pCopyBufferInfos) const override; |
Tony-LunarG | ef03547 | 2021-11-02 10:23:33 -0600 | [diff] [blame] | 1645 | bool PreCallValidateCmdCopyBuffer2(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 *pCopyBufferInfos) const override; |
| 1646 | bool ValidateCmdCopyBuffer2(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 *pCopyBufferInfos, CMD_TYPE cmd_type) const; |
Jeff Leger | 178b1e5 | 2020-10-05 12:22:23 -0400 | [diff] [blame] | 1647 | |
Tony-LunarG | ef03547 | 2021-11-02 10:23:33 -0600 | [diff] [blame] | 1648 | void RecordCmdCopyBuffer2(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR *pCopyBufferInfos, CMD_TYPE cmd_type); |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1649 | void PreCallRecordCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR *pCopyBufferInfos) override; |
Tony-LunarG | ef03547 | 2021-11-02 10:23:33 -0600 | [diff] [blame] | 1650 | void PreCallRecordCmdCopyBuffer2(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 *pCopyBufferInfos) override; |
Jeff Leger | 178b1e5 | 2020-10-05 12:22:23 -0400 | [diff] [blame] | 1651 | |
John Zulauf | 5c5e88d | 2019-12-26 11:22:02 -0700 | [diff] [blame] | 1652 | bool PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, |
| 1653 | VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1654 | const VkImageCopy *pRegions) const override; |
John Zulauf | 5c5e88d | 2019-12-26 11:22:02 -0700 | [diff] [blame] | 1655 | |
| 1656 | void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1657 | VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions) override; |
John Zulauf | 5c5e88d | 2019-12-26 11:22:02 -0700 | [diff] [blame] | 1658 | |
Tony-LunarG | b61514a | 2021-11-02 12:36:51 -0600 | [diff] [blame] | 1659 | bool ValidateCmdCopyImage2(VkCommandBuffer commandBuffer, const VkCopyImageInfo2 *pCopyImageInfo, CMD_TYPE cmd_type) const; |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1660 | bool PreCallValidateCmdCopyImage2KHR(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR *pCopyImageInfo) const override; |
Tony-LunarG | b61514a | 2021-11-02 12:36:51 -0600 | [diff] [blame] | 1661 | bool PreCallValidateCmdCopyImage2(VkCommandBuffer commandBuffer, const VkCopyImageInfo2 *pCopyImageInfo) const override; |
Jeff Leger | 178b1e5 | 2020-10-05 12:22:23 -0400 | [diff] [blame] | 1662 | |
Tony-LunarG | b61514a | 2021-11-02 12:36:51 -0600 | [diff] [blame] | 1663 | void RecordCmdCopyImage2(VkCommandBuffer commandBuffer, const VkCopyImageInfo2 *pCopyImageInfo, CMD_TYPE cmd_type); |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1664 | void PreCallRecordCmdCopyImage2KHR(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR *pCopyImageInfo) override; |
Tony-LunarG | b61514a | 2021-11-02 12:36:51 -0600 | [diff] [blame] | 1665 | void PreCallRecordCmdCopyImage2(VkCommandBuffer commandBuffer, const VkCopyImageInfo2 *pCopyImageInfo) override; |
Jeff Leger | 178b1e5 | 2020-10-05 12:22:23 -0400 | [diff] [blame] | 1666 | |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 1667 | bool PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, |
| 1668 | VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, |
| 1669 | uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, |
| 1670 | uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, |
| 1671 | uint32_t imageMemoryBarrierCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1672 | const VkImageMemoryBarrier *pImageMemoryBarriers) const override; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 1673 | |
| 1674 | void PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, |
| 1675 | VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, |
| 1676 | uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, |
| 1677 | uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1678 | uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) override; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1679 | |
Jeremy Gebben | df3fcc3 | 2021-02-15 08:53:17 -0700 | [diff] [blame] | 1680 | bool PreCallValidateCmdPipelineBarrier2KHR(VkCommandBuffer commandBuffer, |
| 1681 | const VkDependencyInfoKHR *pDependencyInfo) const override; |
Tony-LunarG | 3f6eceb | 2021-11-18 14:34:49 -0700 | [diff] [blame] | 1682 | bool PreCallValidateCmdPipelineBarrier2(VkCommandBuffer commandBuffer, const VkDependencyInfo *pDependencyInfo) const override; |
Jeremy Gebben | df3fcc3 | 2021-02-15 08:53:17 -0700 | [diff] [blame] | 1683 | void PreCallRecordCmdPipelineBarrier2KHR(VkCommandBuffer commandBuffer, const VkDependencyInfoKHR *pDependencyInfo) override; |
Tony-LunarG | 3f6eceb | 2021-11-18 14:34:49 -0700 | [diff] [blame] | 1684 | void PreCallRecordCmdPipelineBarrier2(VkCommandBuffer commandBuffer, const VkDependencyInfo *pDependencyInfo) override; |
Jeremy Gebben | df3fcc3 | 2021-02-15 08:53:17 -0700 | [diff] [blame] | 1685 | |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1686 | void PostCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1687 | VkResult result) override; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1688 | |
| 1689 | void PostCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1690 | VkSubpassContents contents) override; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1691 | void PostCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1692 | const VkSubpassBeginInfo *pSubpassBeginInfo) override; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1693 | void PostCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1694 | const VkSubpassBeginInfo *pSubpassBeginInfo) override; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1695 | |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1696 | bool ValidateCmdNextSubpass(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo, |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 1697 | const VkSubpassEndInfo *pSubpassEndInfo, CMD_TYPE cmd_type) const; |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1698 | bool PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const override; |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1699 | bool PreCallValidateCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo, |
| 1700 | const VkSubpassEndInfo *pSubpassEndInfo) const override; |
| 1701 | bool PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo, |
| 1702 | const VkSubpassEndInfo *pSubpassEndInfo) const override; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1703 | |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1704 | void PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) override; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1705 | void PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1706 | const VkSubpassEndInfo *pSubpassEndInfo) override; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1707 | void PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1708 | const VkSubpassEndInfo *pSubpassEndInfo) override; |
John Zulauf | 3d84f1b | 2020-03-09 13:33:25 -0600 | [diff] [blame] | 1709 | |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 1710 | bool ValidateCmdEndRenderPass(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo, CMD_TYPE cmd_type) const; |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1711 | bool PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) const override; |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1712 | bool PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) const override; |
| 1713 | bool PreCallValidateCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) const override; |
John Zulauf | 355e49b | 2020-04-24 15:11:15 -0600 | [diff] [blame] | 1714 | |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1715 | void PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) override; |
| 1716 | void PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) override; |
| 1717 | void PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) override; |
Jeff Leger | 178b1e5 | 2020-10-05 12:22:23 -0400 | [diff] [blame] | 1718 | |
sfricke-samsung | 71f04e3 | 2022-03-16 01:21:21 -0500 | [diff] [blame] | 1719 | template <typename RegionType> |
Jeff Leger | 178b1e5 | 2020-10-05 12:22:23 -0400 | [diff] [blame] | 1720 | bool ValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, |
sfricke-samsung | 71f04e3 | 2022-03-16 01:21:21 -0500 | [diff] [blame] | 1721 | VkImageLayout dstImageLayout, uint32_t regionCount, const RegionType *pRegions, |
Tony Barbour | 845d29b | 2021-11-09 11:43:14 -0700 | [diff] [blame] | 1722 | CMD_TYPE cmd_type) const; |
locke-lunarg | a19c71d | 2020-03-02 18:17:04 -0700 | [diff] [blame] | 1723 | bool PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, |
| 1724 | VkImageLayout dstImageLayout, uint32_t regionCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1725 | const VkBufferImageCopy *pRegions) const override; |
Jeff Leger | 178b1e5 | 2020-10-05 12:22:23 -0400 | [diff] [blame] | 1726 | bool PreCallValidateCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1727 | const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) const override; |
Tony Barbour | 845d29b | 2021-11-09 11:43:14 -0700 | [diff] [blame] | 1728 | bool PreCallValidateCmdCopyBufferToImage2(VkCommandBuffer commandBuffer, |
| 1729 | const VkCopyBufferToImageInfo2 *pCopyBufferToImageInfo) const override; |
locke-lunarg | a19c71d | 2020-03-02 18:17:04 -0700 | [diff] [blame] | 1730 | |
sfricke-samsung | 71f04e3 | 2022-03-16 01:21:21 -0500 | [diff] [blame] | 1731 | template <typename RegionType> |
Jeff Leger | 178b1e5 | 2020-10-05 12:22:23 -0400 | [diff] [blame] | 1732 | void RecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, |
sfricke-samsung | 71f04e3 | 2022-03-16 01:21:21 -0500 | [diff] [blame] | 1733 | VkImageLayout dstImageLayout, uint32_t regionCount, const RegionType *pRegions, |
Tony Barbour | 845d29b | 2021-11-09 11:43:14 -0700 | [diff] [blame] | 1734 | CMD_TYPE cmd_type); |
locke-lunarg | a19c71d | 2020-03-02 18:17:04 -0700 | [diff] [blame] | 1735 | void PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1736 | VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions) override; |
Jeff Leger | 178b1e5 | 2020-10-05 12:22:23 -0400 | [diff] [blame] | 1737 | void PreCallRecordCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1738 | const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) override; |
Tony Barbour | 845d29b | 2021-11-09 11:43:14 -0700 | [diff] [blame] | 1739 | void PreCallRecordCmdCopyBufferToImage2(VkCommandBuffer commandBuffer, |
| 1740 | const VkCopyBufferToImageInfo2 *pCopyBufferToImageInfo) override; |
locke-lunarg | a19c71d | 2020-03-02 18:17:04 -0700 | [diff] [blame] | 1741 | |
sfricke-samsung | 71f04e3 | 2022-03-16 01:21:21 -0500 | [diff] [blame] | 1742 | template <typename RegionType> |
Jeff Leger | 178b1e5 | 2020-10-05 12:22:23 -0400 | [diff] [blame] | 1743 | bool ValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, |
sfricke-samsung | 71f04e3 | 2022-03-16 01:21:21 -0500 | [diff] [blame] | 1744 | VkBuffer dstBuffer, uint32_t regionCount, const RegionType *pRegions, |
Tony-LunarG | af3632a | 2021-11-10 15:51:57 -0700 | [diff] [blame] | 1745 | CMD_TYPE cmd_type) const; |
locke-lunarg | a19c71d | 2020-03-02 18:17:04 -0700 | [diff] [blame] | 1746 | bool PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1747 | VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) const override; |
Jeff Leger | 178b1e5 | 2020-10-05 12:22:23 -0400 | [diff] [blame] | 1748 | bool PreCallValidateCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1749 | const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) const override; |
Tony-LunarG | af3632a | 2021-11-10 15:51:57 -0700 | [diff] [blame] | 1750 | bool PreCallValidateCmdCopyImageToBuffer2(VkCommandBuffer commandBuffer, |
| 1751 | const VkCopyImageToBufferInfo2 *pCopyImageToBufferInfo) const override; |
locke-lunarg | a19c71d | 2020-03-02 18:17:04 -0700 | [diff] [blame] | 1752 | |
sfricke-samsung | 71f04e3 | 2022-03-16 01:21:21 -0500 | [diff] [blame] | 1753 | template <typename RegionType> |
Jeff Leger | 178b1e5 | 2020-10-05 12:22:23 -0400 | [diff] [blame] | 1754 | void RecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, |
sfricke-samsung | 71f04e3 | 2022-03-16 01:21:21 -0500 | [diff] [blame] | 1755 | VkBuffer dstBuffer, uint32_t regionCount, const RegionType *pRegions, CMD_TYPE cmd_type); |
locke-lunarg | a19c71d | 2020-03-02 18:17:04 -0700 | [diff] [blame] | 1756 | void PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1757 | VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) override; |
Jeff Leger | 178b1e5 | 2020-10-05 12:22:23 -0400 | [diff] [blame] | 1758 | void PreCallRecordCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1759 | const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) override; |
Tony-LunarG | af3632a | 2021-11-10 15:51:57 -0700 | [diff] [blame] | 1760 | void PreCallRecordCmdCopyImageToBuffer2(VkCommandBuffer commandBuffer, |
| 1761 | const VkCopyImageToBufferInfo2 *pCopyImageToBufferInfo) override; |
Jeff Leger | 178b1e5 | 2020-10-05 12:22:23 -0400 | [diff] [blame] | 1762 | |
| 1763 | template <typename RegionType> |
| 1764 | bool ValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, |
| 1765 | VkImageLayout dstImageLayout, uint32_t regionCount, const RegionType *pRegions, VkFilter filter, |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 1766 | CMD_TYPE cmd_type) const; |
locke-lunarg | a19c71d | 2020-03-02 18:17:04 -0700 | [diff] [blame] | 1767 | |
| 1768 | bool PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, |
| 1769 | VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1770 | const VkImageBlit *pRegions, VkFilter filter) const override; |
| 1771 | bool PreCallValidateCmdBlitImage2KHR(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR *pBlitImageInfo) const override; |
Tony-LunarG | 542ae91 | 2021-11-04 16:06:44 -0600 | [diff] [blame] | 1772 | bool PreCallValidateCmdBlitImage2(VkCommandBuffer commandBuffer, const VkBlitImageInfo2 *pBlitImageInfo) const override; |
locke-lunarg | a19c71d | 2020-03-02 18:17:04 -0700 | [diff] [blame] | 1773 | |
Jeff Leger | 178b1e5 | 2020-10-05 12:22:23 -0400 | [diff] [blame] | 1774 | template <typename RegionType> |
| 1775 | void RecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, |
| 1776 | VkImageLayout dstImageLayout, uint32_t regionCount, const RegionType *pRegions, VkFilter filter, |
| 1777 | ResourceUsageTag tag); |
locke-lunarg | a19c71d | 2020-03-02 18:17:04 -0700 | [diff] [blame] | 1778 | void PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, |
| 1779 | VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1780 | VkFilter filter) override; |
| 1781 | void PreCallRecordCmdBlitImage2KHR(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR *pBlitImageInfo) override; |
Tony-LunarG | 542ae91 | 2021-11-04 16:06:44 -0600 | [diff] [blame] | 1782 | void PreCallRecordCmdBlitImage2(VkCommandBuffer commandBuffer, const VkBlitImageInfo2 *pBlitImageInfo) override; |
locke-lunarg | 36ba259 | 2020-04-03 09:42:04 -0600 | [diff] [blame] | 1783 | |
John Zulauf | faea0ee | 2021-01-14 14:01:32 -0700 | [diff] [blame] | 1784 | bool ValidateIndirectBuffer(const CommandBufferAccessContext &cb_context, const AccessContext &context, |
| 1785 | VkCommandBuffer commandBuffer, const VkDeviceSize struct_size, const VkBuffer buffer, |
| 1786 | const VkDeviceSize offset, const uint32_t drawCount, const uint32_t stride, |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 1787 | CMD_TYPE cmd_type) const; |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 1788 | void RecordIndirectBuffer(AccessContext &context, ResourceUsageTag tag, const VkDeviceSize struct_size, const VkBuffer buffer, |
| 1789 | const VkDeviceSize offset, const uint32_t drawCount, uint32_t stride); |
locke-lunarg | 36ba259 | 2020-04-03 09:42:04 -0600 | [diff] [blame] | 1790 | |
John Zulauf | faea0ee | 2021-01-14 14:01:32 -0700 | [diff] [blame] | 1791 | bool ValidateCountBuffer(const CommandBufferAccessContext &cb_context, const AccessContext &context, |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 1792 | VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, CMD_TYPE cmd_type) const; |
John Zulauf | 1494072 | 2021-04-12 15:19:02 -0600 | [diff] [blame] | 1793 | void RecordCountBuffer(AccessContext &context, ResourceUsageTag tag, VkBuffer buffer, VkDeviceSize offset); |
locke-lunarg | 93d68af | 2020-05-12 17:18:03 -0600 | [diff] [blame] | 1794 | |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1795 | bool PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) const override; |
| 1796 | void PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1797 | |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1798 | bool PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const override; |
| 1799 | void PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1800 | |
| 1801 | bool PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1802 | uint32_t firstInstance) const override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1803 | void PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1804 | uint32_t firstInstance) override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1805 | |
| 1806 | bool PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1807 | uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) const override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1808 | void PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1809 | uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1810 | |
| 1811 | bool PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1812 | uint32_t stride) const override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1813 | void PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1814 | uint32_t stride) override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1815 | |
| 1816 | bool PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1817 | uint32_t drawCount, uint32_t stride) const override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1818 | void PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1819 | uint32_t drawCount, uint32_t stride) override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1820 | |
locke-lunarg | ff255f9 | 2020-05-13 18:53:52 -0600 | [diff] [blame] | 1821 | bool ValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, |
| 1822 | VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 1823 | CMD_TYPE cmd_type) const; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1824 | bool PreCallValidateCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 1825 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1826 | uint32_t stride) const override; |
sfricke-samsung | 85584a7 | 2021-09-30 21:43:38 -0700 | [diff] [blame] | 1827 | void RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, |
| 1828 | VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, CMD_TYPE cmd_type); |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1829 | void PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 1830 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1831 | uint32_t stride) override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1832 | bool PreCallValidateCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 1833 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1834 | uint32_t stride) const override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1835 | void PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 1836 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1837 | uint32_t stride) override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1838 | bool PreCallValidateCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 1839 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1840 | uint32_t stride) const override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1841 | void PreCallRecordCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 1842 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1843 | uint32_t stride) override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1844 | |
locke-lunarg | ff255f9 | 2020-05-13 18:53:52 -0600 | [diff] [blame] | 1845 | bool ValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 1846 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
sjfricke | 0bea06e | 2022-06-05 09:22:26 +0900 | [diff] [blame] | 1847 | uint32_t stride, CMD_TYPE cmd_type) const; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1848 | bool PreCallValidateCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 1849 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1850 | uint32_t stride) const override; |
sfricke-samsung | 85584a7 | 2021-09-30 21:43:38 -0700 | [diff] [blame] | 1851 | void RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 1852 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
| 1853 | uint32_t stride, CMD_TYPE cmd_type); |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1854 | void PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 1855 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1856 | uint32_t stride) override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1857 | bool PreCallValidateCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 1858 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1859 | uint32_t stride) const override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1860 | void PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 1861 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1862 | uint32_t stride) override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1863 | bool PreCallValidateCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 1864 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1865 | uint32_t stride) const override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1866 | void PreCallRecordCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, |
| 1867 | VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1868 | uint32_t stride) override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1869 | |
| 1870 | bool PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, |
| 1871 | const VkClearColorValue *pColor, uint32_t rangeCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1872 | const VkImageSubresourceRange *pRanges) const override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1873 | void PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, |
| 1874 | const VkClearColorValue *pColor, uint32_t rangeCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1875 | const VkImageSubresourceRange *pRanges) override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1876 | |
| 1877 | bool PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, |
| 1878 | const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1879 | const VkImageSubresourceRange *pRanges) const override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1880 | void PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, |
| 1881 | const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1882 | const VkImageSubresourceRange *pRanges) override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1883 | |
| 1884 | bool PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, |
| 1885 | uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1886 | VkDeviceSize stride, VkQueryResultFlags flags) const override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1887 | void PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, |
| 1888 | uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1889 | VkQueryResultFlags flags) override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1890 | |
| 1891 | bool PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1892 | uint32_t data) const override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1893 | void PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1894 | uint32_t data) override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1895 | |
| 1896 | bool PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, |
| 1897 | VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1898 | const VkImageResolve *pRegions) const override; |
Jeff Leger | 178b1e5 | 2020-10-05 12:22:23 -0400 | [diff] [blame] | 1899 | |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1900 | void PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, |
| 1901 | VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1902 | const VkImageResolve *pRegions) override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1903 | |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1904 | bool PreCallValidateCmdResolveImage2KHR(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR *pResolveImageInfo) const override; |
Tony-LunarG | 562fc10 | 2021-11-12 13:58:35 -0700 | [diff] [blame] | 1905 | bool PreCallValidateCmdResolveImage2(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo) const override; |
| 1906 | bool ValidateCmdResolveImage2(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo, CMD_TYPE cmd_type) const; |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1907 | void PreCallRecordCmdResolveImage2KHR(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR *pResolveImageInfo) override; |
Tony-LunarG | 562fc10 | 2021-11-12 13:58:35 -0700 | [diff] [blame] | 1908 | void PreCallRecordCmdResolveImage2(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo) override; |
| 1909 | void RecordCmdResolveImage2(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo, CMD_TYPE cmd_type); |
Jeff Leger | 178b1e5 | 2020-10-05 12:22:23 -0400 | [diff] [blame] | 1910 | |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1911 | bool PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1912 | VkDeviceSize dataSize, const void *pData) const override; |
locke-lunarg | e1a6702 | 2020-04-29 00:15:36 -0600 | [diff] [blame] | 1913 | void PreCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1914 | VkDeviceSize dataSize, const void *pData) override; |
locke-lunarg | ff255f9 | 2020-05-13 18:53:52 -0600 | [diff] [blame] | 1915 | |
| 1916 | bool PreCallValidateCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1917 | VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) const override; |
locke-lunarg | ff255f9 | 2020-05-13 18:53:52 -0600 | [diff] [blame] | 1918 | void PreCallRecordCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, |
Jeremy Gebben | f892469 | 2020-10-28 16:27:14 -0600 | [diff] [blame] | 1919 | VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) override; |
John Zulauf | 49beb11 | 2020-11-04 16:06:31 -0700 | [diff] [blame] | 1920 | |
| 1921 | bool PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const override; |
| 1922 | void PostCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) override; |
| 1923 | |
John Zulauf | 4edde62 | 2021-02-15 08:54:50 -0700 | [diff] [blame] | 1924 | bool PreCallValidateCmdSetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event, |
| 1925 | const VkDependencyInfoKHR *pDependencyInfo) const override; |
Tony-LunarG | c43525f | 2021-11-15 16:12:38 -0700 | [diff] [blame] | 1926 | bool PreCallValidateCmdSetEvent2(VkCommandBuffer commandBuffer, VkEvent event, |
| 1927 | const VkDependencyInfo *pDependencyInfo) const override; |
John Zulauf | 4edde62 | 2021-02-15 08:54:50 -0700 | [diff] [blame] | 1928 | void PostCallRecordCmdSetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event, |
| 1929 | const VkDependencyInfoKHR *pDependencyInfo) override; |
Tony-LunarG | c43525f | 2021-11-15 16:12:38 -0700 | [diff] [blame] | 1930 | void PostCallRecordCmdSetEvent2(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo *pDependencyInfo) override; |
John Zulauf | 4edde62 | 2021-02-15 08:54:50 -0700 | [diff] [blame] | 1931 | |
John Zulauf | 49beb11 | 2020-11-04 16:06:31 -0700 | [diff] [blame] | 1932 | bool PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const override; |
| 1933 | void PostCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) override; |
| 1934 | |
John Zulauf | 4edde62 | 2021-02-15 08:54:50 -0700 | [diff] [blame] | 1935 | bool PreCallValidateCmdResetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event, |
| 1936 | VkPipelineStageFlags2KHR stageMask) const override; |
Tony-LunarG | a2662db | 2021-11-16 07:26:24 -0700 | [diff] [blame] | 1937 | bool PreCallValidateCmdResetEvent2(VkCommandBuffer commandBuffer, VkEvent event, |
| 1938 | VkPipelineStageFlags2 stageMask) const override; |
John Zulauf | 4edde62 | 2021-02-15 08:54:50 -0700 | [diff] [blame] | 1939 | void PostCallRecordCmdResetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask) override; |
Tony-LunarG | a2662db | 2021-11-16 07:26:24 -0700 | [diff] [blame] | 1940 | void PostCallRecordCmdResetEvent2(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask) override; |
John Zulauf | 4edde62 | 2021-02-15 08:54:50 -0700 | [diff] [blame] | 1941 | |
John Zulauf | 49beb11 | 2020-11-04 16:06:31 -0700 | [diff] [blame] | 1942 | bool PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, |
| 1943 | VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask, |
| 1944 | uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, |
| 1945 | uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, |
| 1946 | uint32_t imageMemoryBarrierCount, |
| 1947 | const VkImageMemoryBarrier *pImageMemoryBarriers) const override; |
| 1948 | void PostCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, |
| 1949 | VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask, |
| 1950 | uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, |
| 1951 | uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, |
| 1952 | uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) override; |
John Zulauf | 4edde62 | 2021-02-15 08:54:50 -0700 | [diff] [blame] | 1953 | bool PreCallValidateCmdWaitEvents2KHR(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, |
| 1954 | const VkDependencyInfoKHR *pDependencyInfos) const override; |
| 1955 | void PostCallRecordCmdWaitEvents2KHR(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, |
| 1956 | const VkDependencyInfoKHR *pDependencyInfos) override; |
Tony-LunarG | 1364cf5 | 2021-11-17 16:10:11 -0700 | [diff] [blame] | 1957 | bool PreCallValidateCmdWaitEvents2(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, |
| 1958 | const VkDependencyInfo *pDependencyInfos) const override; |
| 1959 | void PostCallRecordCmdWaitEvents2(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, |
| 1960 | const VkDependencyInfo *pDependencyInfos) override; |
Jeremy Gebben | df3fcc3 | 2021-02-15 08:53:17 -0700 | [diff] [blame] | 1961 | bool PreCallValidateCmdWriteBufferMarker2AMD(VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkBuffer dstBuffer, |
| 1962 | VkDeviceSize dstOffset, uint32_t marker) const override; |
| 1963 | void PreCallRecordCmdWriteBufferMarker2AMD(VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkBuffer dstBuffer, |
| 1964 | VkDeviceSize dstOffset, uint32_t marker) override; |
John Zulauf | ae84200 | 2021-04-15 18:20:55 -0600 | [diff] [blame] | 1965 | bool PreCallValidateCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, |
| 1966 | const VkCommandBuffer *pCommandBuffers) const override; |
| 1967 | void PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, |
| 1968 | const VkCommandBuffer *pCommandBuffers) override; |
John Zulauf | 1d5f9c1 | 2022-05-13 14:51:08 -0600 | [diff] [blame] | 1969 | void PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) override; |
| 1970 | void PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) override; |
John Zulauf | a8700a5 | 2022-08-18 16:22:08 -0600 | [diff] [blame] | 1971 | bool ValidateQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 *pSubmits, VkFence fence, |
| 1972 | const char *func_name) const; |
John Zulauf | bbda457 | 2022-04-19 16:20:45 -0600 | [diff] [blame] | 1973 | bool PreCallValidateQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, |
| 1974 | VkFence fence) const override; |
John Zulauf | a8700a5 | 2022-08-18 16:22:08 -0600 | [diff] [blame] | 1975 | void RecordQueueSubmit(VkQueue queue, VkFence fence, VkResult result); |
John Zulauf | bbda457 | 2022-04-19 16:20:45 -0600 | [diff] [blame] | 1976 | void PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence, |
| 1977 | VkResult result) override; |
| 1978 | bool PreCallValidateQueueSubmit2KHR(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR *pSubmits, |
| 1979 | VkFence fence) const override; |
| 1980 | void PostCallRecordQueueSubmit2KHR(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR *pSubmits, VkFence fence, |
| 1981 | VkResult result) override; |
John Zulauf | a8700a5 | 2022-08-18 16:22:08 -0600 | [diff] [blame] | 1982 | bool PreCallValidateQueueSubmit2(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR *pSubmits, |
| 1983 | VkFence fence) const override; |
| 1984 | void PostCallRecordQueueSubmit2(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR *pSubmits, VkFence fence, |
| 1985 | VkResult result) override; |
John Zulauf | 3da08bb | 2022-08-01 17:56:56 -0600 | [diff] [blame] | 1986 | void PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) override; |
| 1987 | void PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll, |
| 1988 | uint64_t timeout, VkResult result) override; |
John Zulauf | 9cb530d | 2019-09-30 14:14:10 -0600 | [diff] [blame] | 1989 | }; |