blob: f30356cfe669eda1d40bb722df4a8a5e6248f388 [file] [log] [blame]
Jeremy Gebben0d9143e2022-01-01 12:29:36 -07001/* Copyright (c) 2015-2022 The Khronos Group Inc.
2 * Copyright (c) 2015-2022 Valve Corporation
3 * Copyright (c) 2015-2022 LunarG, Inc.
4 * Copyright (C) 2015-2022 Google Inc.
Tobin Ehlis0a43bde2016-05-03 08:31:08 -06005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Tobin Ehlis <tobine@google.com>
John Zulaufc483f442017-12-15 14:02:06 -070019 * John Zulauf <jzulauf@lunarg.com>
Jeremy Kniagere6827432020-04-01 09:05:56 -060020 * Jeremy Kniager <jeremyk@lunarg.com>
Jeremy Gebbenbb718a32022-05-12 08:51:10 -060021 * Jeremy Gebben <jeremyg@lunarg.com>
Tobin Ehlis0a43bde2016-05-03 08:31:08 -060022 */
23
24#include "descriptor_sets.h"
Jeremy Gebbenbb718a32022-05-12 08:51:10 -060025#include "cmd_buffer_state.h"
Tobin Ehlis0a43bde2016-05-03 08:31:08 -060026
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060027static DESCRIPTOR_POOL_STATE::TypeCountMap GetMaxTypeCounts(const VkDescriptorPoolCreateInfo *create_info) {
28 DESCRIPTOR_POOL_STATE::TypeCountMap counts;
29 // Collect maximums per descriptor type.
30 for (uint32_t i = 0; i < create_info->poolSizeCount; ++i) {
31 const auto &pool_size = create_info->pPoolSizes[i];
32 uint32_t type = static_cast<uint32_t>(pool_size.type);
33 // Same descriptor types can appear several times
34 counts[type] += pool_size.descriptorCount;
35 }
36 return counts;
37}
38
39DESCRIPTOR_POOL_STATE::DESCRIPTOR_POOL_STATE(ValidationStateTracker *dev, const VkDescriptorPool pool,
40 const VkDescriptorPoolCreateInfo *pCreateInfo)
41 : BASE_NODE(pool, kVulkanObjectTypeDescriptorPool),
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060042 maxSets(pCreateInfo->maxSets),
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060043 createInfo(pCreateInfo),
44 maxDescriptorTypeCount(GetMaxTypeCounts(pCreateInfo)),
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070045 available_sets_(pCreateInfo->maxSets),
46 available_counts_(maxDescriptorTypeCount),
47 dev_data_(dev) {}
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060048
49void DESCRIPTOR_POOL_STATE::Allocate(const VkDescriptorSetAllocateInfo *alloc_info, const VkDescriptorSet *descriptor_sets,
50 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070051 auto guard = WriteLock();
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060052 // Account for sets and individual descriptors allocated from pool
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070053 available_sets_ -= alloc_info->descriptorSetCount;
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060054 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070055 available_counts_[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060056 }
57
58 const auto *variable_count_info = LvlFindInChain<VkDescriptorSetVariableDescriptorCountAllocateInfo>(alloc_info->pNext);
59 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == alloc_info->descriptorSetCount;
60
61 // Create tracking object for each descriptor set; insert into global map and the pool's set.
62 for (uint32_t i = 0; i < alloc_info->descriptorSetCount; i++) {
63 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
64
65 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], this, ds_data->layout_nodes[i],
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070066 variable_count, dev_data_);
67 sets_.emplace(descriptor_sets[i], new_ds.get());
68 dev_data_->Add(std::move(new_ds));
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060069 }
70}
71
72void DESCRIPTOR_POOL_STATE::Free(uint32_t count, const VkDescriptorSet *descriptor_sets) {
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070073 auto guard = WriteLock();
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060074 // Update available descriptor sets in pool
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070075 available_sets_ += count;
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060076
Jeremy Gebben082a9832021-10-28 13:40:11 -060077 // For each freed descriptor add its resources back into the pool as available and remove from pool and device data
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060078 for (uint32_t i = 0; i < count; ++i) {
79 if (descriptor_sets[i] != VK_NULL_HANDLE) {
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070080 auto iter = sets_.find(descriptor_sets[i]);
81 assert(iter != sets_.end());
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060082 auto *set_state = iter->second;
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -060083 const auto &layout = set_state->Layout();
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060084 uint32_t type_index = 0, descriptor_count = 0;
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -060085 for (uint32_t j = 0; j < layout.GetBindingCount(); ++j) {
86 type_index = static_cast<uint32_t>(layout.GetTypeFromIndex(j));
87 descriptor_count = layout.GetDescriptorCountFromIndex(j);
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070088 available_counts_[type_index] += descriptor_count;
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060089 }
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070090 dev_data_->Destroy<cvdescriptorset::DescriptorSet>(iter->first);
91 sets_.erase(iter);
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060092 }
93 }
94}
95
96void DESCRIPTOR_POOL_STATE::Reset() {
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070097 auto guard = WriteLock();
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060098 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070099 for (auto entry : sets_) {
100 dev_data_->Destroy<cvdescriptorset::DescriptorSet>(entry.first);
Jeremy Gebben1fbebb82021-10-27 10:27:27 -0600101 }
Jeremy Gebben0d9143e2022-01-01 12:29:36 -0700102 sets_.clear();
Jeremy Gebben1fbebb82021-10-27 10:27:27 -0600103 // Reset available count for each type and available sets for this pool
Jeremy Gebben0d9143e2022-01-01 12:29:36 -0700104 available_counts_ = maxDescriptorTypeCount;
105 available_sets_ = maxSets;
106}
107
108bool DESCRIPTOR_POOL_STATE::InUse() const {
109 auto guard = ReadLock();
110 for (const auto &entry : sets_) {
111 const auto *ds = entry.second;
112 if (ds && ds->InUse()) {
113 return true;
114 }
115 }
116 return false;
Jeremy Gebben1fbebb82021-10-27 10:27:27 -0600117}
118
119void DESCRIPTOR_POOL_STATE::Destroy() {
120 Reset();
121 BASE_NODE::Destroy();
122}
123
Jeff Bolzfdf96072018-04-10 14:32:18 -0500124// ExtendedBinding collects a VkDescriptorSetLayoutBinding and any extended
125// state that comes from a different array/structure so they can stay together
126// while being sorted by binding number.
127struct ExtendedBinding {
Mike Schuchardt2df08912020-12-15 16:28:09 -0800128 ExtendedBinding(const VkDescriptorSetLayoutBinding *l, VkDescriptorBindingFlags f) : layout_binding(l), binding_flags(f) {}
Jeff Bolzfdf96072018-04-10 14:32:18 -0500129
130 const VkDescriptorSetLayoutBinding *layout_binding;
Mike Schuchardt2df08912020-12-15 16:28:09 -0800131 VkDescriptorBindingFlags binding_flags;
Jeff Bolzfdf96072018-04-10 14:32:18 -0500132};
133
John Zulauf508d13a2018-01-05 15:10:34 -0700134struct BindingNumCmp {
Jeff Bolzfdf96072018-04-10 14:32:18 -0500135 bool operator()(const ExtendedBinding &a, const ExtendedBinding &b) const {
136 return a.layout_binding->binding < b.layout_binding->binding;
John Zulauf508d13a2018-01-05 15:10:34 -0700137 }
138};
139
Jeremy Gebben02f83202022-05-04 13:54:07 -0600140cvdescriptorset::DescriptorClass cvdescriptorset::DescriptorTypeToClass(VkDescriptorType type) {
141 switch (type) {
142 case VK_DESCRIPTOR_TYPE_SAMPLER:
143 return PlainSampler;
144 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
145 return ImageSampler;
146 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
147 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
148 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
149 return Image;
150 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
151 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
152 return TexelBuffer;
153 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
154 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
155 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
156 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
157 return GeneralBuffer;
158 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
159 return InlineUniform;
160 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
161 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV:
162 return AccelerationStructure;
163 case VK_DESCRIPTOR_TYPE_MUTABLE_VALVE:
164 return Mutable;
165 default:
166 break;
167 }
168 return NoDescriptorClass;
169}
170
171
John Zulauf613fd982019-06-04 15:14:41 -0600172using DescriptorSet = cvdescriptorset::DescriptorSet;
John Zulauf4a015c92019-06-04 09:50:05 -0600173using DescriptorSetLayout = cvdescriptorset::DescriptorSetLayout;
John Zulaufd47d0612018-02-16 13:00:34 -0700174using DescriptorSetLayoutDef = cvdescriptorset::DescriptorSetLayoutDef;
175using DescriptorSetLayoutId = cvdescriptorset::DescriptorSetLayoutId;
176
John Zulauf34ebf272018-02-16 13:08:47 -0700177// Canonical dictionary of DescriptorSetLayoutDef (without any handle/device specific information)
178cvdescriptorset::DescriptorSetLayoutDict descriptor_set_layout_dict;
John Zulaufd47d0612018-02-16 13:00:34 -0700179
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600180DescriptorSetLayoutId GetCanonicalId(const VkDescriptorSetLayoutCreateInfo *p_create_info) {
John Zulauf34ebf272018-02-16 13:08:47 -0700181 return descriptor_set_layout_dict.look_up(DescriptorSetLayoutDef(p_create_info));
John Zulaufd47d0612018-02-16 13:00:34 -0700182}
John Zulauf34ebf272018-02-16 13:08:47 -0700183
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600184// Construct DescriptorSetLayout instance from given create info
John Zulauf48a6a702017-12-22 17:14:54 -0700185// Proactively reserve and resize as possible, as the reallocation was visible in profiling
John Zulauf1f8174b2018-02-16 12:58:37 -0700186cvdescriptorset::DescriptorSetLayoutDef::DescriptorSetLayoutDef(const VkDescriptorSetLayoutCreateInfo *p_create_info)
187 : flags_(p_create_info->flags), binding_count_(0), descriptor_count_(0), dynamic_descriptor_count_(0) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700188 const auto *flags_create_info = LvlFindInChain<VkDescriptorSetLayoutBindingFlagsCreateInfo>(p_create_info->pNext);
Jeff Bolzfdf96072018-04-10 14:32:18 -0500189
sfricke-samsung0d00aed2021-03-08 23:31:17 -0800190 binding_type_stats_ = {0, 0};
Jeff Bolzfdf96072018-04-10 14:32:18 -0500191 std::set<ExtendedBinding, BindingNumCmp> sorted_bindings;
John Zulauf508d13a2018-01-05 15:10:34 -0700192 const uint32_t input_bindings_count = p_create_info->bindingCount;
193 // Sort the input bindings in binding number order, eliminating duplicates
194 for (uint32_t i = 0; i < input_bindings_count; i++) {
Mike Schuchardt2df08912020-12-15 16:28:09 -0800195 VkDescriptorBindingFlags flags = 0;
Jeff Bolzfdf96072018-04-10 14:32:18 -0500196 if (flags_create_info && flags_create_info->bindingCount == p_create_info->bindingCount) {
197 flags = flags_create_info->pBindingFlags[i];
198 }
Jeremy Gebbenfc6f8152021-03-18 16:58:55 -0600199 sorted_bindings.emplace(p_create_info->pBindings + i, flags);
John Zulaufb6d71202017-12-22 16:47:09 -0700200 }
201
ziga-lunargd67b5f52021-10-16 23:52:59 +0200202 const auto *mutable_descriptor_type_create_info = LvlFindInChain<VkMutableDescriptorTypeCreateInfoVALVE>(p_create_info->pNext);
203 if (mutable_descriptor_type_create_info) {
204 mutable_types_.resize(mutable_descriptor_type_create_info->mutableDescriptorTypeListCount);
205 for (uint32_t i = 0; i < mutable_descriptor_type_create_info->mutableDescriptorTypeListCount; ++i) {
206 const auto &list = mutable_descriptor_type_create_info->pMutableDescriptorTypeLists[i];
207 mutable_types_[i].reserve(list.descriptorTypeCount);
208 for (uint32_t j = 0; j < list.descriptorTypeCount; ++j) {
209 mutable_types_[i].push_back(list.pDescriptorTypes[j]);
210 }
ziga-lunarg2ab8c472021-10-27 22:54:02 +0200211 std::sort(mutable_types_[i].begin(), mutable_types_[i].end());
ziga-lunargd67b5f52021-10-16 23:52:59 +0200212 }
213 }
214
John Zulaufb6d71202017-12-22 16:47:09 -0700215 // Store the create info in the sorted order from above
John Zulauf508d13a2018-01-05 15:10:34 -0700216 uint32_t index = 0;
217 binding_count_ = static_cast<uint32_t>(sorted_bindings.size());
218 bindings_.reserve(binding_count_);
Jeff Bolzfdf96072018-04-10 14:32:18 -0500219 binding_flags_.reserve(binding_count_);
John Zulauf508d13a2018-01-05 15:10:34 -0700220 binding_to_index_map_.reserve(binding_count_);
John Zulauf79f06582021-02-27 18:38:39 -0700221 for (const auto &input_binding : sorted_bindings) {
John Zulauf508d13a2018-01-05 15:10:34 -0700222 // Add to binding and map, s.t. it is robust to invalid duplication of binding_num
Jeff Bolzfdf96072018-04-10 14:32:18 -0500223 const auto binding_num = input_binding.layout_binding->binding;
John Zulauf508d13a2018-01-05 15:10:34 -0700224 binding_to_index_map_[binding_num] = index++;
Jeff Bolzfdf96072018-04-10 14:32:18 -0500225 bindings_.emplace_back(input_binding.layout_binding);
John Zulauf508d13a2018-01-05 15:10:34 -0700226 auto &binding_info = bindings_.back();
Jeff Bolzfdf96072018-04-10 14:32:18 -0500227 binding_flags_.emplace_back(input_binding.binding_flags);
John Zulauf508d13a2018-01-05 15:10:34 -0700228
John Zulaufb6d71202017-12-22 16:47:09 -0700229 descriptor_count_ += binding_info.descriptorCount;
230 if (binding_info.descriptorCount > 0) {
231 non_empty_bindings_.insert(binding_num);
Tobin Ehlis9637fb22016-12-12 15:59:34 -0700232 }
John Zulaufb6d71202017-12-22 16:47:09 -0700233
sfricke-samsung60f29ec2021-03-10 20:37:25 -0800234 if (IsDynamicDescriptor(binding_info.descriptorType)) {
sfricke-samsung0d00aed2021-03-08 23:31:17 -0800235 dynamic_descriptor_count_ += binding_info.descriptorCount;
236 }
237
238 // Get stats depending on descriptor type for caching later
sfricke-samsung60f29ec2021-03-10 20:37:25 -0800239 if (IsBufferDescriptor(binding_info.descriptorType)) {
240 if (IsDynamicDescriptor(binding_info.descriptorType)) {
241 binding_type_stats_.dynamic_buffer_count++;
242 } else {
243 binding_type_stats_.non_dynamic_buffer_count++;
244 }
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600245 }
246 }
Tobin Ehlis9637fb22016-12-12 15:59:34 -0700247 assert(bindings_.size() == binding_count_);
Jeff Bolzfdf96072018-04-10 14:32:18 -0500248 assert(binding_flags_.size() == binding_count_);
Tobin Ehlis9637fb22016-12-12 15:59:34 -0700249 uint32_t global_index = 0;
John Zulauf7705bfc2019-06-10 09:52:04 -0600250 global_index_range_.reserve(binding_count_);
251 // Vector order is finalized so build vectors of descriptors and dynamic offsets by binding index
Tobin Ehlis9637fb22016-12-12 15:59:34 -0700252 for (uint32_t i = 0; i < binding_count_; ++i) {
John Zulaufc483f442017-12-15 14:02:06 -0700253 auto final_index = global_index + bindings_[i].descriptorCount;
John Zulauf7705bfc2019-06-10 09:52:04 -0600254 global_index_range_.emplace_back(global_index, final_index);
John Zulaufc483f442017-12-15 14:02:06 -0700255 global_index = final_index;
Tobin Ehlis9637fb22016-12-12 15:59:34 -0700256 }
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600257}
Tobin Ehlis154c2692016-10-25 09:36:53 -0600258
John Zulaufd47d0612018-02-16 13:00:34 -0700259size_t cvdescriptorset::DescriptorSetLayoutDef::hash() const {
260 hash_util::HashCombiner hc;
261 hc << flags_;
262 hc.Combine(bindings_);
John Zulauf223b69d2018-11-09 16:00:59 -0700263 hc.Combine(binding_flags_);
John Zulaufd47d0612018-02-16 13:00:34 -0700264 return hc.Value();
265}
266//
267
John Zulauf1f8174b2018-02-16 12:58:37 -0700268// Return valid index or "end" i.e. binding_count_;
269// The asserts in "Get" are reduced to the set where no valid answer(like null or 0) could be given
270// Common code for all binding lookups.
271uint32_t cvdescriptorset::DescriptorSetLayoutDef::GetIndexFromBinding(uint32_t binding) const {
272 const auto &bi_itr = binding_to_index_map_.find(binding);
273 if (bi_itr != binding_to_index_map_.cend()) return bi_itr->second;
274 return GetBindingCount();
275}
276VkDescriptorSetLayoutBinding const *cvdescriptorset::DescriptorSetLayoutDef::GetDescriptorSetLayoutBindingPtrFromIndex(
277 const uint32_t index) const {
278 if (index >= bindings_.size()) return nullptr;
279 return bindings_[index].ptr();
280}
281// Return descriptorCount for given index, 0 if index is unavailable
282uint32_t cvdescriptorset::DescriptorSetLayoutDef::GetDescriptorCountFromIndex(const uint32_t index) const {
283 if (index >= bindings_.size()) return 0;
284 return bindings_[index].descriptorCount;
285}
286// For the given index, return descriptorType
287VkDescriptorType cvdescriptorset::DescriptorSetLayoutDef::GetTypeFromIndex(const uint32_t index) const {
288 assert(index < bindings_.size());
289 if (index < bindings_.size()) return bindings_[index].descriptorType;
290 return VK_DESCRIPTOR_TYPE_MAX_ENUM;
291}
292// For the given index, return stageFlags
293VkShaderStageFlags cvdescriptorset::DescriptorSetLayoutDef::GetStageFlagsFromIndex(const uint32_t index) const {
294 assert(index < bindings_.size());
295 if (index < bindings_.size()) return bindings_[index].stageFlags;
296 return VkShaderStageFlags(0);
297}
Jeff Bolzfdf96072018-04-10 14:32:18 -0500298// Return binding flags for given index, 0 if index is unavailable
Mike Schuchardt2df08912020-12-15 16:28:09 -0800299VkDescriptorBindingFlags cvdescriptorset::DescriptorSetLayoutDef::GetDescriptorBindingFlagsFromIndex(const uint32_t index) const {
Jeff Bolzfdf96072018-04-10 14:32:18 -0500300 if (index >= binding_flags_.size()) return 0;
301 return binding_flags_[index];
302}
John Zulauf1f8174b2018-02-16 12:58:37 -0700303
John Zulauf7705bfc2019-06-10 09:52:04 -0600304const cvdescriptorset::IndexRange &cvdescriptorset::DescriptorSetLayoutDef::GetGlobalIndexRangeFromIndex(uint32_t index) const {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700305 const static IndexRange k_invalid_range = {0xFFFFFFFF, 0xFFFFFFFF};
306 if (index >= binding_flags_.size()) return k_invalid_range;
John Zulauf7705bfc2019-06-10 09:52:04 -0600307 return global_index_range_[index];
John Zulauf1f8174b2018-02-16 12:58:37 -0700308}
309
John Zulauf7705bfc2019-06-10 09:52:04 -0600310// For the given binding, return the global index range (half open)
311// As start and end are often needed in pairs, get both with a single lookup.
John Zulauf1f8174b2018-02-16 12:58:37 -0700312const cvdescriptorset::IndexRange &cvdescriptorset::DescriptorSetLayoutDef::GetGlobalIndexRangeFromBinding(
313 const uint32_t binding) const {
John Zulauf7705bfc2019-06-10 09:52:04 -0600314 uint32_t index = GetIndexFromBinding(binding);
315 return GetGlobalIndexRangeFromIndex(index);
John Zulauf1f8174b2018-02-16 12:58:37 -0700316}
317
John Zulauf1f8174b2018-02-16 12:58:37 -0700318// Move to next valid binding having a non-zero binding count
319uint32_t cvdescriptorset::DescriptorSetLayoutDef::GetNextValidBinding(const uint32_t binding) const {
320 auto it = non_empty_bindings_.upper_bound(binding);
321 assert(it != non_empty_bindings_.cend());
322 if (it != non_empty_bindings_.cend()) return *it;
323 return GetMaxBinding() + 1;
324}
325// For given index, return ptr to ImmutableSampler array
326VkSampler const *cvdescriptorset::DescriptorSetLayoutDef::GetImmutableSamplerPtrFromIndex(const uint32_t index) const {
327 if (index < bindings_.size()) {
328 return bindings_[index].pImmutableSamplers;
329 }
330 return nullptr;
331}
John Zulauf9ce3b252019-06-06 15:20:22 -0600332
ziga-lunargd67b5f52021-10-16 23:52:59 +0200333bool cvdescriptorset::DescriptorSetLayoutDef::IsTypeMutable(const VkDescriptorType type, uint32_t binding) const {
334 if (binding < mutable_types_.size()) {
335 if (mutable_types_[binding].size() > 0) {
336 for (const auto mutable_type : mutable_types_[binding]) {
337 if (type == mutable_type) {
338 return true;
339 }
340 }
341 return false;
342 }
343 }
344 // If mutableDescriptorTypeListCount is zero or if VkMutableDescriptorTypeCreateInfoVALVE structure is not included in the pNext
345 // chain, the VkMutableDescriptorTypeListVALVE for each element is considered to be zero or NULL for each member.
346 return false;
347}
348
ziga-lunarg2ab8c472021-10-27 22:54:02 +0200349const std::vector<std::vector<VkDescriptorType>>& cvdescriptorset::DescriptorSetLayoutDef::GetMutableTypes() const {
350 return mutable_types_;
351}
352
ziga-lunarge5d28542021-10-24 21:14:25 +0200353const std::vector<VkDescriptorType> &cvdescriptorset::DescriptorSetLayoutDef::GetMutableTypes(uint32_t binding) const {
354 if (binding >= mutable_types_.size()) {
355 static const std::vector<VkDescriptorType> empty = {};
356 return empty;
357 }
358 return mutable_types_[binding];
359}
360
John Zulauf1f8174b2018-02-16 12:58:37 -0700361bool cvdescriptorset::DescriptorSetLayoutDef::IsNextBindingConsistent(const uint32_t binding) const {
362 if (!binding_to_index_map_.count(binding + 1)) return false;
363 auto const &bi_itr = binding_to_index_map_.find(binding);
364 if (bi_itr != binding_to_index_map_.end()) {
365 const auto &next_bi_itr = binding_to_index_map_.find(binding + 1);
366 if (next_bi_itr != binding_to_index_map_.end()) {
367 auto type = bindings_[bi_itr->second].descriptorType;
368 auto stage_flags = bindings_[bi_itr->second].stageFlags;
369 auto immut_samp = bindings_[bi_itr->second].pImmutableSamplers ? true : false;
Jeff Bolzfdf96072018-04-10 14:32:18 -0500370 auto flags = binding_flags_[bi_itr->second];
John Zulauf1f8174b2018-02-16 12:58:37 -0700371 if ((type != bindings_[next_bi_itr->second].descriptorType) ||
372 (stage_flags != bindings_[next_bi_itr->second].stageFlags) ||
Jeff Bolzfdf96072018-04-10 14:32:18 -0500373 (immut_samp != (bindings_[next_bi_itr->second].pImmutableSamplers ? true : false)) ||
374 (flags != binding_flags_[next_bi_itr->second])) {
John Zulauf1f8174b2018-02-16 12:58:37 -0700375 return false;
376 }
377 return true;
378 }
379 }
380 return false;
381}
John Zulauf1f8174b2018-02-16 12:58:37 -0700382
Jeremy Gebbenbb718a32022-05-12 08:51:10 -0600383// If our layout is compatible with rh_ds_layout, return true.
384bool cvdescriptorset::DescriptorSetLayout::IsCompatible(DescriptorSetLayout const *rh_ds_layout) const {
385 bool compatible = (this == rh_ds_layout) || (GetLayoutDef() == rh_ds_layout->GetLayoutDef());
386 return compatible;
387}
388
John Zulauf1f8174b2018-02-16 12:58:37 -0700389// The DescriptorSetLayout stores the per handle data for a descriptor set layout, and references the common defintion for the
390// handle invariant portion
391cvdescriptorset::DescriptorSetLayout::DescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo *p_create_info,
392 const VkDescriptorSetLayout layout)
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600393 : BASE_NODE(layout, kVulkanObjectTypeDescriptorSetLayout), layout_id_(GetCanonicalId(p_create_info)) {}
John Zulauf1f8174b2018-02-16 12:58:37 -0700394
Mark Lobodzinskie12b6e32020-06-29 11:44:15 -0600395void cvdescriptorset::AllocateDescriptorSetsData::Init(uint32_t count) {
396 layout_nodes.resize(count);
Mark Lobodzinskie12b6e32020-06-29 11:44:15 -0600397}
Tobin Ehlis68d0adf2016-06-01 11:33:50 -0600398
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500399cvdescriptorset::DescriptorSet::DescriptorSet(const VkDescriptorSet set, DESCRIPTOR_POOL_STATE *pool_state,
Jeff Bolzfdf96072018-04-10 14:32:18 -0500400 const std::shared_ptr<DescriptorSetLayout const> &layout, uint32_t variable_count,
John Zulaufd2c3dae2019-12-12 11:02:17 -0700401 const cvdescriptorset::DescriptorSet::StateTracker *state_data)
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600402 : BASE_NODE(set, kVulkanObjectTypeDescriptorSet),
403 some_update_(false),
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500404 pool_state_(pool_state),
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700405 layout_(layout),
John Zulaufd2c3dae2019-12-12 11:02:17 -0700406 state_data_(state_data),
Jeff Bolzdd4cfa12019-08-11 20:57:51 -0500407 variable_count_(variable_count),
408 change_count_(0) {
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600409 // Foreach binding, create default descriptors of given type
Jeremy Gebben7b963d82022-06-28 15:18:16 -0600410 auto binding_count = layout_->GetBindingCount();
411 bindings_.reserve(binding_count);
412 bindings_store_.reserve(binding_count);
413 auto free_binding = bindings_store_.data();
414 for (uint32_t i = 0; i < binding_count; ++i) {
Jeremy Gebben080210f2022-05-05 13:37:08 -0600415 auto create_info = layout_->GetDescriptorSetLayoutBindingPtrFromIndex(i);
416 assert(create_info);
417 uint32_t descriptor_count = create_info->descriptorCount;
418 auto flags = layout_->GetDescriptorBindingFlagsFromIndex(i);
419 if (flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT) {
ziga-lunarg9b3f6772022-04-17 20:33:42 +0200420 descriptor_count = variable_count;
421 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700422 auto type = layout_->GetTypeFromIndex(i);
Jeremy Gebben02f83202022-05-04 13:54:07 -0600423 auto descriptor_class = DescriptorTypeToClass(type);
424 switch (descriptor_class) {
425 case PlainSampler: {
Jeremy Gebben7b963d82022-06-28 15:18:16 -0600426 auto binding = MakeBinding<SamplerBinding>(free_binding++, *create_info, descriptor_count, flags);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600427 auto immut = layout_->GetImmutableSamplerPtrFromIndex(i);
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600428 if (immut) {
429 for (uint32_t di = 0; di < descriptor_count; ++di) {
430 auto sampler = state_data->GetConstCastShared<SAMPLER_STATE>(immut[di]);
431 if (sampler) {
432 some_update_ = true; // Immutable samplers are updated at creation
Jeremy Gebbenf4816f72022-07-15 08:56:06 -0600433 binding->updated[di] = true;
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600434 binding->descriptors[di].SetSamplerState(std::move(sampler));
435 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700436 }
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700437 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600438 bindings_.push_back(std::move(binding));
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700439 break;
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600440 }
Jeremy Gebben02f83202022-05-04 13:54:07 -0600441 case ImageSampler: {
Jeremy Gebben7b963d82022-06-28 15:18:16 -0600442 auto binding = MakeBinding<ImageSamplerBinding>(free_binding++, *create_info, descriptor_count, flags);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700443 auto immut = layout_->GetImmutableSamplerPtrFromIndex(i);
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600444 if (immut) {
445 for (uint32_t di = 0; di < descriptor_count; ++di) {
446 auto sampler = state_data->GetConstCastShared<SAMPLER_STATE>(immut[di]);
447 if (sampler) {
448 some_update_ = true; // Immutable samplers are updated at creation
Jeremy Gebbenf4816f72022-07-15 08:56:06 -0600449 binding->updated[di] = true;
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600450 binding->descriptors[di].SetSamplerState(std::move(sampler));
451 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700452 }
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700453 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600454 bindings_.push_back(std::move(binding));
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700455 break;
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600456 }
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700457 // ImageDescriptors
Jeremy Gebben080210f2022-05-05 13:37:08 -0600458 case Image: {
Jeremy Gebben7b963d82022-06-28 15:18:16 -0600459 bindings_.push_back(MakeBinding<ImageBinding>(free_binding++, *create_info, descriptor_count, flags));
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700460 break;
Jeremy Gebben080210f2022-05-05 13:37:08 -0600461 }
462 case TexelBuffer: {
Jeremy Gebben7b963d82022-06-28 15:18:16 -0600463 bindings_.push_back(MakeBinding<TexelBinding>(free_binding++, *create_info, descriptor_count, flags));
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700464 break;
Jeremy Gebben080210f2022-05-05 13:37:08 -0600465 }
466 case GeneralBuffer: {
Jeremy Gebben7b963d82022-06-28 15:18:16 -0600467 auto binding = MakeBinding<BufferBinding>(free_binding++, *create_info, descriptor_count, flags);
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600468 if (IsDynamicDescriptor(type)) {
469 for (uint32_t di = 0; di < descriptor_count; ++di) {
Jeremy Gebben080210f2022-05-05 13:37:08 -0600470 dynamic_offset_idx_to_descriptor_list_.push_back({i, di});
Jeremy Gebben02f83202022-05-04 13:54:07 -0600471 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700472 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600473 bindings_.push_back(std::move(binding));
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700474 break;
Jeremy Gebben080210f2022-05-05 13:37:08 -0600475 }
476 case InlineUniform: {
Jeremy Gebben7b963d82022-06-28 15:18:16 -0600477 bindings_.push_back(MakeBinding<InlineUniformBinding>(free_binding++, *create_info, descriptor_count, flags));
Jeff Bolze54ae892018-09-08 12:16:29 -0500478 break;
Jeremy Gebben080210f2022-05-05 13:37:08 -0600479 }
480 case AccelerationStructure: {
Jeremy Gebben7b963d82022-06-28 15:18:16 -0600481 bindings_.push_back(
482 MakeBinding<AccelerationStructureBinding>(free_binding++, *create_info, descriptor_count, flags));
Jeff Bolzfbe51582018-09-13 10:01:35 -0500483 break;
Jeremy Gebben080210f2022-05-05 13:37:08 -0600484 }
485 case Mutable: {
Jeremy Gebben7b963d82022-06-28 15:18:16 -0600486 bindings_.push_back(MakeBinding<MutableBinding>(free_binding++, *create_info, descriptor_count, flags));
Tony-LunarGf563b362021-03-18 16:13:18 -0600487 break;
Jeremy Gebben080210f2022-05-05 13:37:08 -0600488 }
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700489 default:
Jeremy Gebben02f83202022-05-04 13:54:07 -0600490 assert(0); // Bad descriptor type specified
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700491 break;
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600492 }
493 }
494}
Tobin Ehlis56a30942016-05-19 08:00:00 -0600495
Jeremy Gebben610d3a62022-01-01 12:53:17 -0700496void cvdescriptorset::DescriptorSet::LinkChildNodes() {
497 // Connect child node(s), which cannot safely be done in the constructor.
Jeremy Gebben080210f2022-05-05 13:37:08 -0600498 for (auto &binding : bindings_) {
499 binding->AddParent(this);
Jeremy Gebben610d3a62022-01-01 12:53:17 -0700500 }
501}
502
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600503void cvdescriptorset::DescriptorSet::Destroy() {
Jeremy Gebben080210f2022-05-05 13:37:08 -0600504 for (auto &binding : bindings_) {
505 binding->RemoveParent(this);
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600506 }
507 BASE_NODE::Destroy();
508}
John Zulauf1d27e0a2018-11-05 10:12:48 -0700509// Loop through the write updates to do for a push descriptor set, ignoring dstSet
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500510void cvdescriptorset::DescriptorSet::PerformPushDescriptorsUpdate(ValidationStateTracker *dev_data, uint32_t write_count,
511 const VkWriteDescriptorSet *p_wds) {
John Zulauf1d27e0a2018-11-05 10:12:48 -0700512 assert(IsPushDescriptor());
513 for (uint32_t i = 0; i < write_count; i++) {
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500514 PerformWriteUpdate(dev_data, &p_wds[i]);
John Zulauf1d27e0a2018-11-05 10:12:48 -0700515 }
Jason Macnak83cfd582019-07-31 10:14:24 -0700516
517 push_descriptor_set_writes.clear();
518 push_descriptor_set_writes.reserve(static_cast<std::size_t>(write_count));
519 for (uint32_t i = 0; i < write_count; i++) {
520 push_descriptor_set_writes.push_back(safe_VkWriteDescriptorSet(&p_wds[i]));
521 }
John Zulauf1d27e0a2018-11-05 10:12:48 -0700522}
523
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600524// Perform write update in given update struct
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500525void cvdescriptorset::DescriptorSet::PerformWriteUpdate(ValidationStateTracker *dev_data, const VkWriteDescriptorSet *update) {
Tobin Ehlisf922ef82016-11-30 10:19:14 -0700526 // Perform update on a per-binding basis as consecutive updates roll over to next binding
527 auto descriptors_remaining = update->descriptorCount;
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600528 auto iter = FindDescriptor(update->dstBinding, update->dstArrayElement);
529 assert(!iter.AtEnd());
530 auto &orig_binding = iter.CurrentBinding();
locke-lunarge46b7782019-09-10 01:44:20 -0600531
locke-lunarge46b7782019-09-10 01:44:20 -0600532 // Verify next consecutive binding matches type, stage flags & immutable sampler use and if AtEnd
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600533 for (uint32_t i = 0; i < descriptors_remaining; ++i, ++iter) {
534 if (iter.AtEnd() || !orig_binding.IsConsistent(iter.CurrentBinding())) {
535 break;
locke-lunarge46b7782019-09-10 01:44:20 -0600536 }
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600537 iter->WriteUpdate(this, state_data_, update, i, iter.CurrentBinding().IsBindless());
Jeremy Gebbenf4816f72022-07-15 08:56:06 -0600538 iter.updated(true);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600539 }
Jeff Bolzdd4cfa12019-08-11 20:57:51 -0500540 if (update->descriptorCount) {
541 some_update_ = true;
542 change_count_++;
543 }
Tobin Ehlis56a30942016-05-19 08:00:00 -0600544
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600545 if (!IsPushDescriptor() && !(orig_binding.binding_flags & (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT |
546 VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT))) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600547 Invalidate(false);
Jeff Bolzfdf96072018-04-10 14:32:18 -0500548 }
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600549}
Tobin Ehlis300888c2016-05-18 13:43:26 -0600550// Perform Copy update
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500551void cvdescriptorset::DescriptorSet::PerformCopyUpdate(ValidationStateTracker *dev_data, const VkCopyDescriptorSet *update,
552 const DescriptorSet *src_set) {
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600553 auto src_iter = src_set->FindDescriptor(update->srcBinding, update->srcArrayElement);
554 auto dst_iter = FindDescriptor(update->dstBinding, update->dstArrayElement);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600555 // Update parameters all look good so perform update
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600556 for (uint32_t i = 0; i < update->descriptorCount; ++i, ++src_iter, ++dst_iter) {
557 auto &src = *src_iter;
558 auto &dst = *dst_iter;
Jeremy Gebbenf4816f72022-07-15 08:56:06 -0600559 if (src_iter.updated()) {
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600560 dst.CopyUpdate(this, state_data_, &src, src_iter.CurrentBinding().IsBindless());
Józef Kucia5297e372017-10-13 22:31:34 +0200561 some_update_ = true;
Jeff Bolzdd4cfa12019-08-11 20:57:51 -0500562 change_count_++;
Jeremy Gebbenf4816f72022-07-15 08:56:06 -0600563 dst_iter.updated(true);
Józef Kucia5297e372017-10-13 22:31:34 +0200564 } else {
Jeremy Gebbenf4816f72022-07-15 08:56:06 -0600565 dst_iter.updated(false);
Józef Kucia5297e372017-10-13 22:31:34 +0200566 }
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600567 dst.SetDescriptorType(&src);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600568 }
Tobin Ehlis56a30942016-05-19 08:00:00 -0600569
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700570 if (!(layout_->GetDescriptorBindingFlagsFromBinding(update->dstBinding) &
Mike Schuchardt2df08912020-12-15 16:28:09 -0800571 (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT))) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600572 Invalidate(false);
Jeff Bolzfdf96072018-04-10 14:32:18 -0500573 }
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600574}
Tobin Ehlis56a30942016-05-19 08:00:00 -0600575
John Zulauf6f3d2bd2018-10-29 17:08:42 -0600576// Update the drawing state for the affected descriptors.
577// Set cb_node to this set and this set to cb_node.
578// Add the bindings of the descriptor
579// Set the layout based on the current descriptor layout (will mask subsequent layer mismatch errors)
580// TODO: Modify the UpdateDrawState virtural functions to *only* set initial layout and not change layouts
Tobin Ehlisf9519102016-08-17 09:49:13 -0600581// Prereq: This should be called for a set that has been confirmed to be active for the given cb_node, meaning it's going
582// to be used in a draw by the given cb_node
Jeremy Kniagere6827432020-04-01 09:05:56 -0600583void cvdescriptorset::DescriptorSet::UpdateDrawState(ValidationStateTracker *device_data, CMD_BUFFER_STATE *cb_node,
584 CMD_TYPE cmd_type, const PIPELINE_STATE *pipe,
sfricke-samsung85584a72021-09-30 21:43:38 -0700585 const BindingReqMap &binding_req_map) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600586 // Descriptor UpdateDrawState only call image layout validation callbacks. If it is disabled, skip the entire loop.
587 if (device_data->disabled[image_layout_validation]) {
Jeff Bolze18e7242019-08-12 20:55:22 -0500588 return;
589 }
590
Tobin Ehlisf9519102016-08-17 09:49:13 -0600591 // For the active slots, use set# to look up descriptorSet from boundDescriptorSets, and bind all of that descriptor set's
592 // resources
locke-lunarg540b2252020-08-03 13:23:36 -0600593 CMD_BUFFER_STATE::CmdDrawDispatchInfo cmd_info = {};
John Zulauf79f06582021-02-27 18:38:39 -0700594 for (const auto &binding_req_pair : binding_req_map) {
Jeremy Gebben080210f2022-05-05 13:37:08 -0600595 auto binding = GetBinding(binding_req_pair.first);
596 assert(binding);
locke-gb3ce08f2019-09-30 12:30:56 -0600597
Tony-LunarG62c5dba2018-12-20 14:27:23 -0700598 // We aren't validating descriptors created with PARTIALLY_BOUND or UPDATE_AFTER_BIND, so don't record state
Jeremy Gebben080210f2022-05-05 13:37:08 -0600599 if (binding->IsBindless()) {
600 if (!(binding->binding_flags & VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT)) {
locke-lunarg36045992020-08-20 16:54:37 -0600601 cmd_info.binding_infos.emplace_back(binding_req_pair);
locke-gb3ce08f2019-09-30 12:30:56 -0600602 }
Tony-LunarG62c5dba2018-12-20 14:27:23 -0700603 continue;
604 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600605 switch (binding->descriptor_class) {
606 case Image: {
607 auto *image_binding = static_cast<ImageBinding *>(binding);
608 for (uint32_t i = 0; i < image_binding->count; ++i) {
609 image_binding->descriptors[i].UpdateDrawState(device_data, cb_node);
610 }
611 break;
612 }
Jeremy Gebben02f83202022-05-04 13:54:07 -0600613 case ImageSampler: {
Jeremy Gebben080210f2022-05-05 13:37:08 -0600614 auto *image_binding = static_cast<ImageSamplerBinding *>(binding);
615 for (uint32_t i = 0; i < image_binding->count; ++i) {
616 image_binding->descriptors[i].UpdateDrawState(device_data, cb_node);
Jeremy Gebben3f6f5c82022-05-03 15:25:57 -0600617 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600618 break;
619 }
Jeremy Gebben02f83202022-05-04 13:54:07 -0600620 case Mutable: {
Jeremy Gebben080210f2022-05-05 13:37:08 -0600621 auto *mutable_binding = static_cast<MutableBinding *>(binding);
622 for (uint32_t i = 0; i < mutable_binding->count; ++i) {
623 mutable_binding->descriptors[i].UpdateDrawState(device_data, cb_node);
Jeremy Gebben3f6f5c82022-05-03 15:25:57 -0600624 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600625 break;
626 }
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600627 default:
628 break;
Mark Lobodzinski2872f4a2018-09-03 17:00:53 -0600629 }
630 }
locke-lunarg540b2252020-08-03 13:23:36 -0600631
632 if (cmd_info.binding_infos.size() > 0) {
633 cmd_info.cmd_type = cmd_type;
locke-lunarg540b2252020-08-03 13:23:36 -0600634 if (cb_node->activeFramebuffer) {
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600635 cmd_info.framebuffer = cb_node->activeFramebuffer->framebuffer();
locke-lunargfc78e932020-11-19 17:06:24 -0700636 cmd_info.attachments = cb_node->active_attachments;
637 cmd_info.subpasses = cb_node->active_subpasses;
locke-lunarg540b2252020-08-03 13:23:36 -0600638 }
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600639 cb_node->validate_descriptorsets_in_queuesubmit[GetSet()].emplace_back(cmd_info);
locke-lunarg540b2252020-08-03 13:23:36 -0600640 }
Mark Lobodzinski2872f4a2018-09-03 17:00:53 -0600641}
642
John Zulauffbf3c202019-07-17 14:57:14 -0600643void cvdescriptorset::DescriptorSet::FilterOneBindingReq(const BindingReqMap::value_type &binding_req_pair, BindingReqMap *out_req,
644 const TrackedBindings &bindings, uint32_t limit) {
645 if (bindings.size() < limit) {
646 const auto it = bindings.find(binding_req_pair.first);
647 if (it == bindings.cend()) out_req->emplace(binding_req_pair);
John Zulauf48a6a702017-12-22 17:14:54 -0700648 }
649}
Mark Lobodzinski2872f4a2018-09-03 17:00:53 -0600650
John Zulauffbf3c202019-07-17 14:57:14 -0600651void cvdescriptorset::DescriptorSet::FilterBindingReqs(const CMD_BUFFER_STATE &cb_state, const PIPELINE_STATE &pipeline,
652 const BindingReqMap &in_req, BindingReqMap *out_req) const {
653 // For const cleanliness we have to find in the maps...
Jeremy Gebben87db52f2021-10-14 13:55:09 -0600654 const auto validated_it = cb_state.descriptorset_cache.find(this);
655 if (validated_it == cb_state.descriptorset_cache.end()) {
John Zulauffbf3c202019-07-17 14:57:14 -0600656 // We have nothing validated, copy in to out
657 for (const auto &binding_req_pair : in_req) {
658 out_req->emplace(binding_req_pair);
John Zulauf48a6a702017-12-22 17:14:54 -0700659 }
John Zulauffbf3c202019-07-17 14:57:14 -0600660 return;
John Zulauf48a6a702017-12-22 17:14:54 -0700661 }
John Zulauffbf3c202019-07-17 14:57:14 -0600662 const auto &validated = validated_it->second;
John Zulauf48a6a702017-12-22 17:14:54 -0700663
John Zulauffbf3c202019-07-17 14:57:14 -0600664 const auto image_sample_version_it = validated.image_samplers.find(&pipeline);
665 const VersionedBindings *image_sample_version = nullptr;
666 if (image_sample_version_it != validated.image_samplers.cend()) {
667 image_sample_version = &(image_sample_version_it->second);
668 }
669 const auto &dynamic_buffers = validated.dynamic_buffers;
670 const auto &non_dynamic_buffers = validated.non_dynamic_buffers;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700671 const auto &stats = layout_->GetBindingTypeStats();
John Zulauf48a6a702017-12-22 17:14:54 -0700672 for (const auto &binding_req_pair : in_req) {
673 auto binding = binding_req_pair.first;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700674 VkDescriptorSetLayoutBinding const *layout_binding = layout_->GetDescriptorSetLayoutBindingPtrFromBinding(binding);
John Zulauf48a6a702017-12-22 17:14:54 -0700675 if (!layout_binding) {
676 continue;
677 }
678 // Caching criteria differs per type.
679 // If image_layout have changed , the image descriptors need to be validated against them.
sfricke-samsung60f29ec2021-03-10 20:37:25 -0800680 if (IsBufferDescriptor(layout_binding->descriptorType)) {
681 if (IsDynamicDescriptor(layout_binding->descriptorType)) {
682 FilterOneBindingReq(binding_req_pair, out_req, dynamic_buffers, stats.dynamic_buffer_count);
683 } else {
684 FilterOneBindingReq(binding_req_pair, out_req, non_dynamic_buffers, stats.non_dynamic_buffer_count);
685 }
John Zulauf48a6a702017-12-22 17:14:54 -0700686 } else {
687 // This is rather crude, as the changed layouts may not impact the bound descriptors,
688 // but the simple "versioning" is a simple "dirt" test.
John Zulauffbf3c202019-07-17 14:57:14 -0600689 bool stale = true;
690 if (image_sample_version) {
691 const auto version_it = image_sample_version->find(binding);
692 if (version_it != image_sample_version->cend() && (version_it->second == cb_state.image_layout_change_count)) {
693 stale = false;
694 }
695 }
696 if (stale) {
John Zulauf48a6a702017-12-22 17:14:54 -0700697 out_req->emplace(binding_req_pair);
698 }
699 }
700 }
701}
Tobin Ehlis9252c2b2016-07-21 14:40:22 -0600702
Jeremy Gebben87db52f2021-10-14 13:55:09 -0600703void cvdescriptorset::DescriptorSet::UpdateValidationCache(CMD_BUFFER_STATE &cb_state, const PIPELINE_STATE &pipeline,
John Zulauffbf3c202019-07-17 14:57:14 -0600704 const BindingReqMap &updated_bindings) {
Jeremy Gebben87db52f2021-10-14 13:55:09 -0600705 auto &validated = cb_state.descriptorset_cache[this];
John Zulauffbf3c202019-07-17 14:57:14 -0600706
707 auto &image_sample_version = validated.image_samplers[&pipeline];
708 auto &dynamic_buffers = validated.dynamic_buffers;
709 auto &non_dynamic_buffers = validated.non_dynamic_buffers;
710 for (const auto &binding_req_pair : updated_bindings) {
711 auto binding = binding_req_pair.first;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700712 VkDescriptorSetLayoutBinding const *layout_binding = layout_->GetDescriptorSetLayoutBindingPtrFromBinding(binding);
John Zulauffbf3c202019-07-17 14:57:14 -0600713 if (!layout_binding) {
714 continue;
715 }
716 // Caching criteria differs per type.
sfricke-samsung60f29ec2021-03-10 20:37:25 -0800717 if (IsBufferDescriptor(layout_binding->descriptorType)) {
718 if (IsDynamicDescriptor(layout_binding->descriptorType)) {
719 dynamic_buffers.emplace(binding);
720 } else {
721 non_dynamic_buffers.emplace(binding);
722 }
John Zulauffbf3c202019-07-17 14:57:14 -0600723 } else {
724 // Save the layout change version...
725 image_sample_version[binding] = cb_state.image_layout_change_count;
726 }
727 }
728}
729
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600730cvdescriptorset::SamplerDescriptor::SamplerDescriptor(const VkDescriptorType) : Descriptor(PlainSampler), immutable_(false) {}
731
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600732// Helper template to change shared pointer members of a Descriptor, while
733// correctly managing links to the parent DescriptorSet.
734// src and dst are shared pointers.
735template <typename T>
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200736static void ReplaceStatePtr(DescriptorSet *set_state, T &dst, const T &src, bool is_bindless) {
737 if (dst && !is_bindless) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600738 dst->RemoveParent(set_state);
739 }
740 dst = src;
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200741 // For descriptor bindings with UPDATE_AFTER_BIND or PARTIALLY_BOUND only set the object as a child, but not the descriptor as a
742 // parent, so that destroying the object wont invalidate the descriptor
743 if (dst && !is_bindless) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600744 dst->AddParent(set_state);
745 }
746}
747
748void cvdescriptorset::SamplerDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600749 const VkWriteDescriptorSet *update, const uint32_t index, bool is_bindless) {
Chris Forbesfea2c542018-04-13 09:34:15 -0700750 if (!immutable_) {
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200751 ReplaceStatePtr(set_state, sampler_state_, dev_data->GetConstCastShared<SAMPLER_STATE>(update->pImageInfo[index].sampler),
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600752 is_bindless);
Chris Forbesfea2c542018-04-13 09:34:15 -0700753 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600754 SetDescriptorType(update->descriptorType, 0);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600755}
756
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600757void cvdescriptorset::SamplerDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200758 const Descriptor *src, bool is_bindless) {
Tony-LunarG80358322021-04-16 07:58:13 -0600759 if (src->descriptor_class == Mutable) {
zigada4b1512021-11-28 15:53:06 +0100760 auto *sampler_src = static_cast<const MutableDescriptor *>(src);
761 if (!immutable_) {
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200762 ReplaceStatePtr(set_state, sampler_state_, sampler_src->GetSharedSamplerState(), is_bindless);
zigada4b1512021-11-28 15:53:06 +0100763 }
Tony-LunarG80358322021-04-16 07:58:13 -0600764 return;
765 }
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600766 auto *sampler_src = static_cast<const SamplerDescriptor *>(src);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600767 if (!immutable_) {
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200768 ReplaceStatePtr(set_state, sampler_state_, sampler_src->sampler_state_, is_bindless);
Tobin Ehlis8020eea2016-08-17 11:10:41 -0600769 }
770}
771
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600772cvdescriptorset::ImageSamplerDescriptor::ImageSamplerDescriptor(const VkDescriptorType)
773 : ImageDescriptor(ImageSampler), immutable_(false) {}
Tobin Ehlis56a30942016-05-19 08:00:00 -0600774
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600775void cvdescriptorset::ImageSamplerDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600776 const VkWriteDescriptorSet *update, const uint32_t index,
777 bool is_bindless) {
Tobin Ehlis56a30942016-05-19 08:00:00 -0600778 const auto &image_info = update->pImageInfo[index];
Chris Forbesfea2c542018-04-13 09:34:15 -0700779 if (!immutable_) {
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600780 ReplaceStatePtr(set_state, sampler_state_, dev_data->GetConstCastShared<SAMPLER_STATE>(image_info.sampler), is_bindless);
Chris Forbesfea2c542018-04-13 09:34:15 -0700781 }
Tobin Ehlis300888c2016-05-18 13:43:26 -0600782 image_layout_ = image_info.imageLayout;
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200783 ReplaceStatePtr(set_state, image_view_state_, dev_data->GetConstCastShared<IMAGE_VIEW_STATE>(image_info.imageView),
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600784 is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600785 SetDescriptorType(update->descriptorType, 0);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600786}
787
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600788void cvdescriptorset::ImageSamplerDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200789 const Descriptor *src, bool is_bindless) {
Tony-LunarG80358322021-04-16 07:58:13 -0600790 if (src->descriptor_class == Mutable) {
zigada4b1512021-11-28 15:53:06 +0100791 auto *image_src = static_cast<const MutableDescriptor *>(src);
792 if (!immutable_) {
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200793 ReplaceStatePtr(set_state, sampler_state_, image_src->GetSharedSamplerState(), is_bindless);
zigada4b1512021-11-28 15:53:06 +0100794 }
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200795 ImageDescriptor::CopyUpdate(set_state, dev_data, src, is_bindless);
Tony-LunarG80358322021-04-16 07:58:13 -0600796 return;
797 }
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600798 auto *image_src = static_cast<const ImageSamplerDescriptor *>(src);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600799 if (!immutable_) {
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200800 ReplaceStatePtr(set_state, sampler_state_, image_src->sampler_state_, is_bindless);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600801 }
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200802 ImageDescriptor::CopyUpdate(set_state, dev_data, src, is_bindless);
Tobin Ehlis8020eea2016-08-17 11:10:41 -0600803}
804
Jeremy Gebben059ab502021-04-26 11:25:02 -0600805cvdescriptorset::ImageDescriptor::ImageDescriptor(const VkDescriptorType type)
806 : Descriptor(Image), image_layout_(VK_IMAGE_LAYOUT_UNDEFINED) {}
807
808cvdescriptorset::ImageDescriptor::ImageDescriptor(DescriptorClass class_)
809 : Descriptor(class_), image_layout_(VK_IMAGE_LAYOUT_UNDEFINED) {}
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600810
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600811void cvdescriptorset::ImageDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600812 const VkWriteDescriptorSet *update, const uint32_t index, bool is_bindless) {
Tobin Ehlis56a30942016-05-19 08:00:00 -0600813 const auto &image_info = update->pImageInfo[index];
Tobin Ehlis300888c2016-05-18 13:43:26 -0600814 image_layout_ = image_info.imageLayout;
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200815 ReplaceStatePtr(set_state, image_view_state_, dev_data->GetConstCastShared<IMAGE_VIEW_STATE>(image_info.imageView),
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600816 is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600817 SetDescriptorType(update->descriptorType, 0);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600818}
819
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600820void cvdescriptorset::ImageDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200821 const Descriptor *src, bool is_bindless) {
Tony-LunarG80358322021-04-16 07:58:13 -0600822 if (src->descriptor_class == Mutable) {
zigada4b1512021-11-28 15:53:06 +0100823 auto *image_src = static_cast<const MutableDescriptor *>(src);
824
825 image_layout_ = image_src->GetImageLayout();
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200826 ReplaceStatePtr(set_state, image_view_state_, image_src->GetSharedImageViewState(), is_bindless);
Tony-LunarG80358322021-04-16 07:58:13 -0600827 return;
828 }
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600829 auto *image_src = static_cast<const ImageDescriptor *>(src);
Tony-LunarG80358322021-04-16 07:58:13 -0600830
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600831 image_layout_ = image_src->image_layout_;
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200832 ReplaceStatePtr(set_state, image_view_state_, image_src->image_view_state_, is_bindless);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600833}
834
John Zulauffbf3c202019-07-17 14:57:14 -0600835void cvdescriptorset::ImageDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) {
Tobin Ehlis81e46372016-08-17 13:33:44 -0600836 // Add binding for image
Jeff Bolzfaffeb32019-10-04 12:47:16 -0500837 auto iv_state = GetImageViewState();
Tobin Ehlis8b26a382016-09-14 08:02:49 -0600838 if (iv_state) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600839 dev_data->CallSetImageViewInitialLayoutCallback(cb_node, *iv_state, image_layout_);
Jeff Bolz148d94e2018-12-13 21:25:56 -0600840 }
Tobin Ehlis8020eea2016-08-17 11:10:41 -0600841}
842
Jeremy Gebben059ab502021-04-26 11:25:02 -0600843cvdescriptorset::BufferDescriptor::BufferDescriptor(const VkDescriptorType type)
844 : Descriptor(GeneralBuffer), offset_(0), range_(0) {}
845
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600846void cvdescriptorset::BufferDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600847 const VkWriteDescriptorSet *update, const uint32_t index, bool is_bindless) {
Tobin Ehlis56a30942016-05-19 08:00:00 -0600848 const auto &buffer_info = update->pBufferInfo[index];
Tobin Ehlis300888c2016-05-18 13:43:26 -0600849 offset_ = buffer_info.offset;
850 range_ = buffer_info.range;
Jeremy Gebben080210f2022-05-05 13:37:08 -0600851 auto buffer_state = dev_data->GetConstCastShared<BUFFER_STATE>(buffer_info.buffer);
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600852 ReplaceStatePtr(set_state, buffer_state_, buffer_state, is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600853 SetDescriptorType(update->descriptorType, buffer_state ? buffer_state->createInfo.size : 0);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600854}
855
zigada4b1512021-11-28 15:53:06 +0100856void cvdescriptorset::BufferDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200857 const Descriptor *src, bool is_bindless) {
Tony-LunarG80358322021-04-16 07:58:13 -0600858 if (src->descriptor_class == Mutable) {
zigada4b1512021-11-28 15:53:06 +0100859 const auto buff_desc = static_cast<const MutableDescriptor *>(src);
860 offset_ = buff_desc->GetOffset();
861 range_ = buff_desc->GetRange();
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200862 ReplaceStatePtr(set_state, buffer_state_, buff_desc->GetSharedBufferState(), is_bindless);
Tony-LunarG80358322021-04-16 07:58:13 -0600863 return;
864 }
Karl Schultz76d16a42020-11-11 05:05:33 -0700865 const auto buff_desc = static_cast<const BufferDescriptor *>(src);
Tobin Ehlis300888c2016-05-18 13:43:26 -0600866 offset_ = buff_desc->offset_;
867 range_ = buff_desc->range_;
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200868 ReplaceStatePtr(set_state, buffer_state_, buff_desc->buffer_state_, is_bindless);
Tobin Ehlis8020eea2016-08-17 11:10:41 -0600869}
870
Jeremy Gebben059ab502021-04-26 11:25:02 -0600871cvdescriptorset::TexelDescriptor::TexelDescriptor(const VkDescriptorType type) : Descriptor(TexelBuffer) {}
Tobin Ehlis56a30942016-05-19 08:00:00 -0600872
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600873void cvdescriptorset::TexelDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600874 const VkWriteDescriptorSet *update, const uint32_t index, bool is_bindless) {
Jeremy Gebben080210f2022-05-05 13:37:08 -0600875 auto buffer_view = dev_data->GetConstCastShared<BUFFER_VIEW_STATE>(update->pTexelBufferView[index]);
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600876 ReplaceStatePtr(set_state, buffer_view_state_, buffer_view, is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600877 SetDescriptorType(update->descriptorType, buffer_view ? buffer_view->buffer_state->createInfo.size : 0);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600878}
879
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600880void cvdescriptorset::TexelDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200881 const Descriptor *src, bool is_bindless) {
Tony-LunarG80358322021-04-16 07:58:13 -0600882 if (src->descriptor_class == Mutable) {
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200883 ReplaceStatePtr(set_state, buffer_view_state_, static_cast<const MutableDescriptor *>(src)->GetSharedBufferViewState(),
884 is_bindless);
Tony-LunarG80358322021-04-16 07:58:13 -0600885 return;
886 }
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200887 ReplaceStatePtr(set_state, buffer_view_state_, static_cast<const TexelDescriptor *>(src)->buffer_view_state_, is_bindless);
Tobin Ehlis8020eea2016-08-17 11:10:41 -0600888}
889
Jeff Bolz95176d02020-04-01 00:36:16 -0500890cvdescriptorset::AccelerationStructureDescriptor::AccelerationStructureDescriptor(const VkDescriptorType type)
Jeremy Gebben059ab502021-04-26 11:25:02 -0600891 : Descriptor(AccelerationStructure), acc_(VK_NULL_HANDLE), acc_nv_(VK_NULL_HANDLE) {
sourav parmarcd5fb182020-07-17 12:58:44 -0700892 is_khr_ = false;
Jeff Bolz95176d02020-04-01 00:36:16 -0500893}
zigada4b1512021-11-28 15:53:06 +0100894void cvdescriptorset::AccelerationStructureDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600895 const VkWriteDescriptorSet *update, const uint32_t index,
896 bool is_bindless) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700897 const auto *acc_info = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureKHR>(update->pNext);
898 const auto *acc_info_nv = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureNV>(update->pNext);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700899 assert(acc_info || acc_info_nv);
900 is_khr_ = (acc_info != NULL);
sourav parmarcd5fb182020-07-17 12:58:44 -0700901 if (is_khr_) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700902 acc_ = acc_info->pAccelerationStructures[index];
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600903 ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_), is_bindless);
sourav parmarcd5fb182020-07-17 12:58:44 -0700904 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700905 acc_nv_ = acc_info_nv->pAccelerationStructures[index];
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600906 ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_), is_bindless);
sourav parmarcd5fb182020-07-17 12:58:44 -0700907 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600908 SetDescriptorType(update->descriptorType, 0);
Jeff Bolz95176d02020-04-01 00:36:16 -0500909}
910
zigada4b1512021-11-28 15:53:06 +0100911void cvdescriptorset::AccelerationStructureDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200912 const Descriptor *src, bool is_bindless) {
Tony-LunarG80358322021-04-16 07:58:13 -0600913 if (src->descriptor_class == Mutable) {
zigada4b1512021-11-28 15:53:06 +0100914 auto acc_desc = static_cast<const MutableDescriptor *>(src);
915 if (is_khr_) {
916 acc_ = acc_desc->GetAccelerationStructure();
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200917 ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_),
918 is_bindless);
zigada4b1512021-11-28 15:53:06 +0100919 } else {
920 acc_nv_ = acc_desc->GetAccelerationStructureNV();
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200921 ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_),
922 is_bindless);
zigada4b1512021-11-28 15:53:06 +0100923 }
Tony-LunarG80358322021-04-16 07:58:13 -0600924 return;
925 }
zigada4b1512021-11-28 15:53:06 +0100926 auto acc_desc = static_cast<const AccelerationStructureDescriptor *>(src);
sourav parmarcd5fb182020-07-17 12:58:44 -0700927 if (is_khr_) {
928 acc_ = acc_desc->acc_;
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200929 ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_), is_bindless);
sourav parmarcd5fb182020-07-17 12:58:44 -0700930 } else {
931 acc_nv_ = acc_desc->acc_nv_;
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200932 ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_), is_bindless);
sourav parmarcd5fb182020-07-17 12:58:44 -0700933 }
Jeff Bolz95176d02020-04-01 00:36:16 -0500934}
935
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600936cvdescriptorset::MutableDescriptor::MutableDescriptor(const VkDescriptorType)
zigada4b1512021-11-28 15:53:06 +0100937 : Descriptor(Mutable),
938 buffer_size_(0),
939 immutable_(false),
940 image_layout_(VK_IMAGE_LAYOUT_UNDEFINED),
941 offset_(0),
942 range_(0),
943 is_khr_(false),
944 acc_(VK_NULL_HANDLE),
945 acc_nv_(VK_NULL_HANDLE) {
ziga7a255fb2021-11-20 21:17:07 +0100946 active_descriptor_class_ = NoDescriptorClass;
947}
Tony-LunarGf563b362021-03-18 16:13:18 -0600948
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600949void cvdescriptorset::MutableDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600950 const VkWriteDescriptorSet *update, const uint32_t index, bool is_bindless) {
Jeremy Gebben080210f2022-05-05 13:37:08 -0600951 VkDeviceSize buffer_size = 0;
Jeremy Gebben080210f2022-05-05 13:37:08 -0600952 switch (DescriptorTypeToClass(update->descriptorType)) {
953 case DescriptorClass::PlainSampler:
954 if (!immutable_) {
955 ReplaceStatePtr(set_state, sampler_state_,
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600956 dev_data->GetConstCastShared<SAMPLER_STATE>(update->pImageInfo[index].sampler), is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600957 }
958 break;
959 case DescriptorClass::ImageSampler: {
960 const auto &image_info = update->pImageInfo[index];
961 if (!immutable_) {
962 ReplaceStatePtr(set_state, sampler_state_, dev_data->GetConstCastShared<SAMPLER_STATE>(image_info.sampler),
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600963 is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600964 }
965 image_layout_ = image_info.imageLayout;
966 ReplaceStatePtr(set_state, image_view_state_, dev_data->GetConstCastShared<IMAGE_VIEW_STATE>(image_info.imageView),
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600967 is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600968 break;
zigada4b1512021-11-28 15:53:06 +0100969 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600970 case DescriptorClass::Image: {
971 const auto &image_info = update->pImageInfo[index];
972 image_layout_ = image_info.imageLayout;
973 ReplaceStatePtr(set_state, image_view_state_, dev_data->GetConstCastShared<IMAGE_VIEW_STATE>(image_info.imageView),
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600974 is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600975 break;
zigada4b1512021-11-28 15:53:06 +0100976 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600977 case DescriptorClass::GeneralBuffer: {
978 const auto &buffer_info = update->pBufferInfo[index];
979 offset_ = buffer_info.offset;
980 range_ = buffer_info.range;
981 const auto buffer_state = dev_data->GetConstCastShared<BUFFER_STATE>(update->pBufferInfo->buffer);
982 if (buffer_state) {
983 buffer_size = buffer_state->createInfo.size;
984 }
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600985 ReplaceStatePtr(set_state, buffer_state_, buffer_state, is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600986 break;
zigada4b1512021-11-28 15:53:06 +0100987 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600988 case DescriptorClass::TexelBuffer: {
989 const auto buffer_view = dev_data->GetConstCastShared<BUFFER_VIEW_STATE>(update->pTexelBufferView[index]);
990 if (buffer_view) {
991 buffer_size = buffer_view->buffer_state->createInfo.size;
992 }
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600993 ReplaceStatePtr(set_state, buffer_view_state_, buffer_view, is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600994 break;
995 }
996 case DescriptorClass::AccelerationStructure: {
997 const auto *acc_info = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureKHR>(update->pNext);
998 const auto *acc_info_nv = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureNV>(update->pNext);
999 assert(acc_info || acc_info_nv);
1000 is_khr_ = (acc_info != NULL);
Jeremy Gebben080210f2022-05-05 13:37:08 -06001001 if (is_khr_) {
1002 acc_ = acc_info->pAccelerationStructures[index];
1003 ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_),
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -06001004 is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -06001005 } else {
1006 acc_nv_ = acc_info_nv->pAccelerationStructures[index];
1007 ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_),
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -06001008 is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -06001009 }
1010 break;
1011 }
1012 default:
1013 break;
zigada4b1512021-11-28 15:53:06 +01001014 }
Jeremy Gebben080210f2022-05-05 13:37:08 -06001015 SetDescriptorType(update->descriptorType, buffer_size);
Tony-LunarGf563b362021-03-18 16:13:18 -06001016}
1017
Jeremy Gebben5570abe2021-05-16 18:35:13 -06001018void cvdescriptorset::MutableDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
ziga-lunargb4f3f7d2022-04-17 15:36:03 +02001019 const Descriptor *src, bool is_bindless) {
zigada4b1512021-11-28 15:53:06 +01001020 if (src->descriptor_class == DescriptorClass::PlainSampler) {
1021 auto *sampler_src = static_cast<const SamplerDescriptor *>(src);
1022 if (!immutable_) {
ziga-lunargb4f3f7d2022-04-17 15:36:03 +02001023 ReplaceStatePtr(set_state, sampler_state_, sampler_src->GetSharedSamplerState(), is_bindless);
zigada4b1512021-11-28 15:53:06 +01001024 }
1025 } else if (src->descriptor_class == DescriptorClass::ImageSampler) {
1026 auto *image_src = static_cast<const ImageSamplerDescriptor *>(src);
1027 if (!immutable_) {
ziga-lunargb4f3f7d2022-04-17 15:36:03 +02001028 ReplaceStatePtr(set_state, sampler_state_, image_src->GetSharedSamplerState(), is_bindless);
zigada4b1512021-11-28 15:53:06 +01001029 }
1030
1031 image_layout_ = image_src->GetImageLayout();
ziga-lunargb4f3f7d2022-04-17 15:36:03 +02001032 ReplaceStatePtr(set_state, image_view_state_, image_src->GetSharedImageViewState(), is_bindless);
zigada4b1512021-11-28 15:53:06 +01001033 } else if (src->descriptor_class == DescriptorClass::Image) {
1034 auto *image_src = static_cast<const ImageDescriptor *>(src);
1035
1036 image_layout_ = image_src->GetImageLayout();
ziga-lunargb4f3f7d2022-04-17 15:36:03 +02001037 ReplaceStatePtr(set_state, image_view_state_, image_src->GetSharedImageViewState(), is_bindless);
zigada4b1512021-11-28 15:53:06 +01001038 } else if (src->descriptor_class == DescriptorClass::TexelBuffer) {
ziga-lunargb4f3f7d2022-04-17 15:36:03 +02001039 ReplaceStatePtr(set_state, buffer_view_state_, static_cast<const TexelDescriptor *>(src)->GetSharedBufferViewState(),
1040 is_bindless);
zigada4b1512021-11-28 15:53:06 +01001041 } else if (src->descriptor_class == DescriptorClass::GeneralBuffer) {
1042 const auto buff_desc = static_cast<const BufferDescriptor *>(src);
1043 offset_ = buff_desc->GetOffset();
1044 range_ = buff_desc->GetRange();
ziga-lunargb4f3f7d2022-04-17 15:36:03 +02001045 ReplaceStatePtr(set_state, buffer_state_, buff_desc->GetSharedBufferState(), is_bindless);
zigada4b1512021-11-28 15:53:06 +01001046 } else if (src->descriptor_class == DescriptorClass::AccelerationStructure) {
1047 auto acc_desc = static_cast<const AccelerationStructureDescriptor *>(src);
1048 if (is_khr_) {
1049 acc_ = acc_desc->GetAccelerationStructure();
ziga-lunargb4f3f7d2022-04-17 15:36:03 +02001050 ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_),
1051 is_bindless);
zigada4b1512021-11-28 15:53:06 +01001052 } else {
1053 acc_nv_ = acc_desc->GetAccelerationStructureNV();
ziga-lunargb4f3f7d2022-04-17 15:36:03 +02001054 ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_),
1055 is_bindless);
zigada4b1512021-11-28 15:53:06 +01001056 }
1057 } else if (src->descriptor_class == DescriptorClass::Mutable) {
Jeremy Gebben02f83202022-05-04 13:54:07 -06001058 auto active_class = DescriptorTypeToClass(src->active_descriptor_type);
1059 switch (active_class) {
1060 case PlainSampler: {
1061 auto *sampler_src = static_cast<const MutableDescriptor *>(src);
1062 if (!immutable_) {
1063 ReplaceStatePtr(set_state, sampler_state_, sampler_src->GetSharedSamplerState(), is_bindless);
1064 }
1065 } break;
1066 case ImageSampler: {
1067 auto *image_src = static_cast<const MutableDescriptor *>(src);
1068 if (!immutable_) {
1069 ReplaceStatePtr(set_state, sampler_state_, image_src->GetSharedSamplerState(), is_bindless);
1070 }
zigada4b1512021-11-28 15:53:06 +01001071
Jeremy Gebben02f83202022-05-04 13:54:07 -06001072 image_layout_ = image_src->GetImageLayout();
1073 ReplaceStatePtr(set_state, image_view_state_, image_src->GetSharedImageViewState(), is_bindless);
1074 } break;
1075 case Image: {
1076 auto *image_src = static_cast<const MutableDescriptor *>(src);
zigada4b1512021-11-28 15:53:06 +01001077
Jeremy Gebben02f83202022-05-04 13:54:07 -06001078 image_layout_ = image_src->GetImageLayout();
1079 ReplaceStatePtr(set_state, image_view_state_, image_src->GetSharedImageViewState(), is_bindless);
1080 } break;
1081 case GeneralBuffer: {
1082 const auto buff_desc = static_cast<const MutableDescriptor *>(src);
1083 offset_ = buff_desc->GetOffset();
1084 range_ = buff_desc->GetRange();
1085 ReplaceStatePtr(set_state, buffer_state_, buff_desc->GetSharedBufferState(), is_bindless);
1086 } break;
1087 case TexelBuffer: {
1088 ReplaceStatePtr(set_state, buffer_view_state_,
1089 static_cast<const MutableDescriptor *>(src)->GetSharedBufferViewState(), is_bindless);
1090 } break;
1091 case AccelerationStructure: {
1092 auto acc_desc = static_cast<const MutableDescriptor *>(src);
1093 if (is_khr_) {
1094 acc_ = acc_desc->GetAccelerationStructure();
1095 ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_),
1096 is_bindless);
1097 } else {
1098 acc_nv_ = acc_desc->GetAccelerationStructureNV();
1099 ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_),
1100 is_bindless);
1101 }
1102
1103 } break;
1104 default:
1105 break;
1106 }
1107 }
1108}
1109
1110void cvdescriptorset::MutableDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) {
1111 auto active_class = DescriptorTypeToClass(active_descriptor_type);
1112 if (active_class == Image || active_class == ImageSampler) {
1113 if (image_view_state_) {
1114 dev_data->CallSetImageViewInitialLayoutCallback(cb_node, *image_view_state_, image_layout_);
zigada4b1512021-11-28 15:53:06 +01001115 }
1116 }
1117}
1118
1119bool cvdescriptorset::MutableDescriptor::AddParent(BASE_NODE *base_node) {
1120 bool result = false;
Jeremy Gebben02f83202022-05-04 13:54:07 -06001121 auto active_class = DescriptorTypeToClass(active_descriptor_type);
1122 switch (active_class) {
1123 case PlainSampler:
1124 if (sampler_state_) {
1125 result |= sampler_state_->AddParent(base_node);
1126 }
1127 break;
1128 case ImageSampler:
1129 if (sampler_state_) {
1130 result |= sampler_state_->AddParent(base_node);
1131 }
1132 if (image_view_state_) {
1133 result = image_view_state_->AddParent(base_node);
1134 }
1135 break;
1136 case TexelBuffer:
1137 if (buffer_view_state_) {
1138 result = buffer_view_state_->AddParent(base_node);
1139 }
1140 break;
1141 case Image:
1142 if (image_view_state_) {
1143 result = image_view_state_->AddParent(base_node);
1144 }
1145 break;
1146 case GeneralBuffer:
1147 if (buffer_state_) {
1148 result = buffer_state_->AddParent(base_node);
1149 }
1150 break;
1151 case AccelerationStructure:
1152 if (acc_state_) {
1153 result |= acc_state_->AddParent(base_node);
1154 }
1155 if (acc_state_nv_) {
1156 result |= acc_state_nv_->AddParent(base_node);
1157 }
Jeremy Gebbenbb718a32022-05-12 08:51:10 -06001158 break;
1159 default:
1160 break;
zigada4b1512021-11-28 15:53:06 +01001161 }
1162 return result;
1163}
1164void cvdescriptorset::MutableDescriptor::RemoveParent(BASE_NODE *base_node) {
1165 if (sampler_state_) {
1166 sampler_state_->RemoveParent(base_node);
1167 }
1168 if (image_view_state_) {
1169 image_view_state_->RemoveParent(base_node);
1170 }
1171 if (buffer_view_state_) {
1172 buffer_view_state_->RemoveParent(base_node);
1173 }
1174 if (buffer_state_) {
1175 buffer_state_->RemoveParent(base_node);
1176 }
1177 if (acc_state_) {
1178 acc_state_->RemoveParent(base_node);
1179 }
1180 if (acc_state_nv_) {
1181 acc_state_nv_->RemoveParent(base_node);
1182 }
Tony-LunarGf563b362021-03-18 16:13:18 -06001183}
1184
Jeremy Gebbena08da232022-02-01 15:14:52 -07001185bool cvdescriptorset::MutableDescriptor::Invalid() const {
Jeremy Gebben02f83202022-05-04 13:54:07 -06001186 switch (DescriptorTypeToClass(active_descriptor_type)) {
1187 case PlainSampler:
Jeremy Gebbena08da232022-02-01 15:14:52 -07001188 return !sampler_state_ || sampler_state_->Destroyed();
1189
Jeremy Gebben02f83202022-05-04 13:54:07 -06001190 case ImageSampler:
Jeremy Gebbena08da232022-02-01 15:14:52 -07001191 return !sampler_state_ || sampler_state_->Invalid() || !image_view_state_ || image_view_state_->Invalid();
1192
Jeremy Gebben02f83202022-05-04 13:54:07 -06001193 case TexelBuffer:
Jeremy Gebbena08da232022-02-01 15:14:52 -07001194 return !buffer_view_state_ || buffer_view_state_->Invalid();
1195
Jeremy Gebben02f83202022-05-04 13:54:07 -06001196 case Image:
Jeremy Gebbena08da232022-02-01 15:14:52 -07001197 return !image_view_state_ || image_view_state_->Invalid();
1198
Jeremy Gebben02f83202022-05-04 13:54:07 -06001199 case GeneralBuffer:
Jeremy Gebbena08da232022-02-01 15:14:52 -07001200 return !buffer_state_ || buffer_state_->Invalid();
1201
Jeremy Gebben02f83202022-05-04 13:54:07 -06001202 case AccelerationStructure:
1203 if (is_khr_) {
1204 return !acc_state_ || acc_state_->Invalid();
1205 } else {
1206 return !acc_state_nv_ || acc_state_nv_->Invalid();
1207 }
Jeremy Gebbena08da232022-02-01 15:14:52 -07001208 default:
1209 return false;
1210 }
1211}
1212
Tobin Ehlis300888c2016-05-18 13:43:26 -06001213// This is a helper function that iterates over a set of Write and Copy updates, pulls the DescriptorSet* for updated
Tobin Ehlis300888c2016-05-18 13:43:26 -06001214// sets, and then calls their respective Perform[Write|Copy]Update functions.
1215// Prerequisite : ValidateUpdateDescriptorSets() should be called and return "false" prior to calling PerformUpdateDescriptorSets()
1216// with the same set of updates.
1217// This is split from the validate code to allow validation prior to calling down the chain, and then update after
1218// calling down the chain.
John Zulaufe3b35f32019-06-25 14:21:21 -06001219void cvdescriptorset::PerformUpdateDescriptorSets(ValidationStateTracker *dev_data, uint32_t write_count,
1220 const VkWriteDescriptorSet *p_wds, uint32_t copy_count,
1221 const VkCopyDescriptorSet *p_cds) {
Tobin Ehlis300888c2016-05-18 13:43:26 -06001222 // Write updates first
1223 uint32_t i = 0;
1224 for (i = 0; i < write_count; ++i) {
1225 auto dest_set = p_wds[i].dstSet;
Jeremy Gebbenb20a8242021-11-05 15:14:43 -06001226 auto set_node = dev_data->Get<cvdescriptorset::DescriptorSet>(dest_set);
Tobin Ehlis6a72dc72016-06-01 16:41:17 -06001227 if (set_node) {
Jeff Bolz41a1ced2019-10-11 11:40:49 -05001228 set_node->PerformWriteUpdate(dev_data, &p_wds[i]);
Tobin Ehlis300888c2016-05-18 13:43:26 -06001229 }
1230 }
1231 // Now copy updates
1232 for (i = 0; i < copy_count; ++i) {
1233 auto dst_set = p_cds[i].dstSet;
1234 auto src_set = p_cds[i].srcSet;
Jeremy Gebbenb20a8242021-11-05 15:14:43 -06001235 auto src_node = dev_data->Get<cvdescriptorset::DescriptorSet>(src_set);
1236 auto dst_node = dev_data->Get<cvdescriptorset::DescriptorSet>(dst_set);
Tobin Ehlis6a72dc72016-06-01 16:41:17 -06001237 if (src_node && dst_node) {
Jeremy Gebben9f537102021-10-05 16:37:12 -06001238 dst_node->PerformCopyUpdate(dev_data, &p_cds[i], src_node.get());
Tobin Ehlis300888c2016-05-18 13:43:26 -06001239 }
1240 }
1241}
Jeff Bolzdd4cfa12019-08-11 20:57:51 -05001242const BindingReqMap &cvdescriptorset::PrefilterBindRequestMap::FilteredMap(const CMD_BUFFER_STATE &cb_state,
1243 const PIPELINE_STATE &pipeline) {
John Zulauffbf3c202019-07-17 14:57:14 -06001244 if (IsManyDescriptors()) {
Karl Schultz7090a052020-11-10 08:54:21 -07001245 filtered_map_.reset(new BindingReqMap);
John Zulauffbf3c202019-07-17 14:57:14 -06001246 descriptor_set_.FilterBindingReqs(cb_state, pipeline, orig_map_, filtered_map_.get());
1247 return *filtered_map_;
John Zulauf48a6a702017-12-22 17:14:54 -07001248 }
John Zulauffbf3c202019-07-17 14:57:14 -06001249 return orig_map_;
Artem Kharytoniuk2456f992018-01-12 14:17:41 +01001250}