blob: 80d4473718a4c7979bdc06c627cc723de9a29fd6 [file] [log] [blame]
Jeremy Gebben0d9143e2022-01-01 12:29:36 -07001/* Copyright (c) 2015-2022 The Khronos Group Inc.
2 * Copyright (c) 2015-2022 Valve Corporation
3 * Copyright (c) 2015-2022 LunarG, Inc.
4 * Copyright (C) 2015-2022 Google Inc.
Tobin Ehlis0a43bde2016-05-03 08:31:08 -06005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Tobin Ehlis <tobine@google.com>
John Zulaufc483f442017-12-15 14:02:06 -070019 * John Zulauf <jzulauf@lunarg.com>
Jeremy Kniagere6827432020-04-01 09:05:56 -060020 * Jeremy Kniager <jeremyk@lunarg.com>
Jeremy Gebbenbb718a32022-05-12 08:51:10 -060021 * Jeremy Gebben <jeremyg@lunarg.com>
Tobin Ehlis0a43bde2016-05-03 08:31:08 -060022 */
23
24#include "descriptor_sets.h"
Jeremy Gebbenbb718a32022-05-12 08:51:10 -060025#include "cmd_buffer_state.h"
Tobin Ehlis0a43bde2016-05-03 08:31:08 -060026
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060027static DESCRIPTOR_POOL_STATE::TypeCountMap GetMaxTypeCounts(const VkDescriptorPoolCreateInfo *create_info) {
28 DESCRIPTOR_POOL_STATE::TypeCountMap counts;
29 // Collect maximums per descriptor type.
30 for (uint32_t i = 0; i < create_info->poolSizeCount; ++i) {
31 const auto &pool_size = create_info->pPoolSizes[i];
32 uint32_t type = static_cast<uint32_t>(pool_size.type);
33 // Same descriptor types can appear several times
34 counts[type] += pool_size.descriptorCount;
35 }
36 return counts;
37}
38
39DESCRIPTOR_POOL_STATE::DESCRIPTOR_POOL_STATE(ValidationStateTracker *dev, const VkDescriptorPool pool,
40 const VkDescriptorPoolCreateInfo *pCreateInfo)
41 : BASE_NODE(pool, kVulkanObjectTypeDescriptorPool),
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060042 maxSets(pCreateInfo->maxSets),
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060043 createInfo(pCreateInfo),
44 maxDescriptorTypeCount(GetMaxTypeCounts(pCreateInfo)),
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070045 available_sets_(pCreateInfo->maxSets),
46 available_counts_(maxDescriptorTypeCount),
47 dev_data_(dev) {}
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060048
49void DESCRIPTOR_POOL_STATE::Allocate(const VkDescriptorSetAllocateInfo *alloc_info, const VkDescriptorSet *descriptor_sets,
50 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070051 auto guard = WriteLock();
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060052 // Account for sets and individual descriptors allocated from pool
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070053 available_sets_ -= alloc_info->descriptorSetCount;
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060054 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070055 available_counts_[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060056 }
57
58 const auto *variable_count_info = LvlFindInChain<VkDescriptorSetVariableDescriptorCountAllocateInfo>(alloc_info->pNext);
59 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == alloc_info->descriptorSetCount;
60
61 // Create tracking object for each descriptor set; insert into global map and the pool's set.
62 for (uint32_t i = 0; i < alloc_info->descriptorSetCount; i++) {
63 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
64
65 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], this, ds_data->layout_nodes[i],
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070066 variable_count, dev_data_);
67 sets_.emplace(descriptor_sets[i], new_ds.get());
68 dev_data_->Add(std::move(new_ds));
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060069 }
70}
71
72void DESCRIPTOR_POOL_STATE::Free(uint32_t count, const VkDescriptorSet *descriptor_sets) {
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070073 auto guard = WriteLock();
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060074 // Update available descriptor sets in pool
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070075 available_sets_ += count;
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060076
Jeremy Gebben082a9832021-10-28 13:40:11 -060077 // For each freed descriptor add its resources back into the pool as available and remove from pool and device data
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060078 for (uint32_t i = 0; i < count; ++i) {
79 if (descriptor_sets[i] != VK_NULL_HANDLE) {
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070080 auto iter = sets_.find(descriptor_sets[i]);
81 assert(iter != sets_.end());
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060082 auto *set_state = iter->second;
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -060083 const auto &layout = set_state->Layout();
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060084 uint32_t type_index = 0, descriptor_count = 0;
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -060085 for (uint32_t j = 0; j < layout.GetBindingCount(); ++j) {
86 type_index = static_cast<uint32_t>(layout.GetTypeFromIndex(j));
87 descriptor_count = layout.GetDescriptorCountFromIndex(j);
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070088 available_counts_[type_index] += descriptor_count;
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060089 }
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070090 dev_data_->Destroy<cvdescriptorset::DescriptorSet>(iter->first);
91 sets_.erase(iter);
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060092 }
93 }
94}
95
96void DESCRIPTOR_POOL_STATE::Reset() {
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070097 auto guard = WriteLock();
Jeremy Gebben1fbebb82021-10-27 10:27:27 -060098 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
Jeremy Gebben0d9143e2022-01-01 12:29:36 -070099 for (auto entry : sets_) {
100 dev_data_->Destroy<cvdescriptorset::DescriptorSet>(entry.first);
Jeremy Gebben1fbebb82021-10-27 10:27:27 -0600101 }
Jeremy Gebben0d9143e2022-01-01 12:29:36 -0700102 sets_.clear();
Jeremy Gebben1fbebb82021-10-27 10:27:27 -0600103 // Reset available count for each type and available sets for this pool
Jeremy Gebben0d9143e2022-01-01 12:29:36 -0700104 available_counts_ = maxDescriptorTypeCount;
105 available_sets_ = maxSets;
106}
107
108bool DESCRIPTOR_POOL_STATE::InUse() const {
109 auto guard = ReadLock();
110 for (const auto &entry : sets_) {
111 const auto *ds = entry.second;
112 if (ds && ds->InUse()) {
113 return true;
114 }
115 }
116 return false;
Jeremy Gebben1fbebb82021-10-27 10:27:27 -0600117}
118
119void DESCRIPTOR_POOL_STATE::Destroy() {
120 Reset();
121 BASE_NODE::Destroy();
122}
123
Jeff Bolzfdf96072018-04-10 14:32:18 -0500124// ExtendedBinding collects a VkDescriptorSetLayoutBinding and any extended
125// state that comes from a different array/structure so they can stay together
126// while being sorted by binding number.
127struct ExtendedBinding {
Mike Schuchardt2df08912020-12-15 16:28:09 -0800128 ExtendedBinding(const VkDescriptorSetLayoutBinding *l, VkDescriptorBindingFlags f) : layout_binding(l), binding_flags(f) {}
Jeff Bolzfdf96072018-04-10 14:32:18 -0500129
130 const VkDescriptorSetLayoutBinding *layout_binding;
Mike Schuchardt2df08912020-12-15 16:28:09 -0800131 VkDescriptorBindingFlags binding_flags;
Jeff Bolzfdf96072018-04-10 14:32:18 -0500132};
133
John Zulauf508d13a2018-01-05 15:10:34 -0700134struct BindingNumCmp {
Jeff Bolzfdf96072018-04-10 14:32:18 -0500135 bool operator()(const ExtendedBinding &a, const ExtendedBinding &b) const {
136 return a.layout_binding->binding < b.layout_binding->binding;
John Zulauf508d13a2018-01-05 15:10:34 -0700137 }
138};
139
Jeremy Gebben02f83202022-05-04 13:54:07 -0600140cvdescriptorset::DescriptorClass cvdescriptorset::DescriptorTypeToClass(VkDescriptorType type) {
141 switch (type) {
142 case VK_DESCRIPTOR_TYPE_SAMPLER:
143 return PlainSampler;
144 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
145 return ImageSampler;
146 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
147 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
148 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
149 return Image;
150 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
151 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
152 return TexelBuffer;
153 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
154 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
155 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
156 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
157 return GeneralBuffer;
158 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
159 return InlineUniform;
160 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
161 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV:
162 return AccelerationStructure;
Mike Schuchardt2d523e52022-09-15 12:25:58 -0700163 case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
Jeremy Gebben02f83202022-05-04 13:54:07 -0600164 return Mutable;
165 default:
166 break;
167 }
168 return NoDescriptorClass;
169}
170
171
John Zulauf613fd982019-06-04 15:14:41 -0600172using DescriptorSet = cvdescriptorset::DescriptorSet;
John Zulauf4a015c92019-06-04 09:50:05 -0600173using DescriptorSetLayout = cvdescriptorset::DescriptorSetLayout;
John Zulaufd47d0612018-02-16 13:00:34 -0700174using DescriptorSetLayoutDef = cvdescriptorset::DescriptorSetLayoutDef;
175using DescriptorSetLayoutId = cvdescriptorset::DescriptorSetLayoutId;
176
John Zulauf34ebf272018-02-16 13:08:47 -0700177// Canonical dictionary of DescriptorSetLayoutDef (without any handle/device specific information)
178cvdescriptorset::DescriptorSetLayoutDict descriptor_set_layout_dict;
John Zulaufd47d0612018-02-16 13:00:34 -0700179
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600180DescriptorSetLayoutId GetCanonicalId(const VkDescriptorSetLayoutCreateInfo *p_create_info) {
John Zulauf34ebf272018-02-16 13:08:47 -0700181 return descriptor_set_layout_dict.look_up(DescriptorSetLayoutDef(p_create_info));
John Zulaufd47d0612018-02-16 13:00:34 -0700182}
John Zulauf34ebf272018-02-16 13:08:47 -0700183
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600184// Construct DescriptorSetLayout instance from given create info
John Zulauf48a6a702017-12-22 17:14:54 -0700185// Proactively reserve and resize as possible, as the reallocation was visible in profiling
John Zulauf1f8174b2018-02-16 12:58:37 -0700186cvdescriptorset::DescriptorSetLayoutDef::DescriptorSetLayoutDef(const VkDescriptorSetLayoutCreateInfo *p_create_info)
187 : flags_(p_create_info->flags), binding_count_(0), descriptor_count_(0), dynamic_descriptor_count_(0) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700188 const auto *flags_create_info = LvlFindInChain<VkDescriptorSetLayoutBindingFlagsCreateInfo>(p_create_info->pNext);
Jeff Bolzfdf96072018-04-10 14:32:18 -0500189
sfricke-samsung0d00aed2021-03-08 23:31:17 -0800190 binding_type_stats_ = {0, 0};
Jeff Bolzfdf96072018-04-10 14:32:18 -0500191 std::set<ExtendedBinding, BindingNumCmp> sorted_bindings;
John Zulauf508d13a2018-01-05 15:10:34 -0700192 const uint32_t input_bindings_count = p_create_info->bindingCount;
193 // Sort the input bindings in binding number order, eliminating duplicates
194 for (uint32_t i = 0; i < input_bindings_count; i++) {
Mike Schuchardt2df08912020-12-15 16:28:09 -0800195 VkDescriptorBindingFlags flags = 0;
Jeff Bolzfdf96072018-04-10 14:32:18 -0500196 if (flags_create_info && flags_create_info->bindingCount == p_create_info->bindingCount) {
197 flags = flags_create_info->pBindingFlags[i];
198 }
Jeremy Gebbenfc6f8152021-03-18 16:58:55 -0600199 sorted_bindings.emplace(p_create_info->pBindings + i, flags);
John Zulaufb6d71202017-12-22 16:47:09 -0700200 }
201
Mike Schuchardt2d523e52022-09-15 12:25:58 -0700202 const auto *mutable_descriptor_type_create_info = LvlFindInChain<VkMutableDescriptorTypeCreateInfoEXT>(p_create_info->pNext);
ziga-lunargd67b5f52021-10-16 23:52:59 +0200203 if (mutable_descriptor_type_create_info) {
204 mutable_types_.resize(mutable_descriptor_type_create_info->mutableDescriptorTypeListCount);
205 for (uint32_t i = 0; i < mutable_descriptor_type_create_info->mutableDescriptorTypeListCount; ++i) {
206 const auto &list = mutable_descriptor_type_create_info->pMutableDescriptorTypeLists[i];
207 mutable_types_[i].reserve(list.descriptorTypeCount);
208 for (uint32_t j = 0; j < list.descriptorTypeCount; ++j) {
209 mutable_types_[i].push_back(list.pDescriptorTypes[j]);
210 }
ziga-lunarg2ab8c472021-10-27 22:54:02 +0200211 std::sort(mutable_types_[i].begin(), mutable_types_[i].end());
ziga-lunargd67b5f52021-10-16 23:52:59 +0200212 }
213 }
214
John Zulaufb6d71202017-12-22 16:47:09 -0700215 // Store the create info in the sorted order from above
John Zulauf508d13a2018-01-05 15:10:34 -0700216 uint32_t index = 0;
217 binding_count_ = static_cast<uint32_t>(sorted_bindings.size());
218 bindings_.reserve(binding_count_);
Jeff Bolzfdf96072018-04-10 14:32:18 -0500219 binding_flags_.reserve(binding_count_);
John Zulauf508d13a2018-01-05 15:10:34 -0700220 binding_to_index_map_.reserve(binding_count_);
John Zulauf79f06582021-02-27 18:38:39 -0700221 for (const auto &input_binding : sorted_bindings) {
John Zulauf508d13a2018-01-05 15:10:34 -0700222 // Add to binding and map, s.t. it is robust to invalid duplication of binding_num
Jeff Bolzfdf96072018-04-10 14:32:18 -0500223 const auto binding_num = input_binding.layout_binding->binding;
John Zulauf508d13a2018-01-05 15:10:34 -0700224 binding_to_index_map_[binding_num] = index++;
Jeff Bolzfdf96072018-04-10 14:32:18 -0500225 bindings_.emplace_back(input_binding.layout_binding);
John Zulauf508d13a2018-01-05 15:10:34 -0700226 auto &binding_info = bindings_.back();
Jeff Bolzfdf96072018-04-10 14:32:18 -0500227 binding_flags_.emplace_back(input_binding.binding_flags);
John Zulauf508d13a2018-01-05 15:10:34 -0700228
John Zulaufb6d71202017-12-22 16:47:09 -0700229 descriptor_count_ += binding_info.descriptorCount;
230 if (binding_info.descriptorCount > 0) {
231 non_empty_bindings_.insert(binding_num);
Tobin Ehlis9637fb22016-12-12 15:59:34 -0700232 }
John Zulaufb6d71202017-12-22 16:47:09 -0700233
sfricke-samsung60f29ec2021-03-10 20:37:25 -0800234 if (IsDynamicDescriptor(binding_info.descriptorType)) {
sfricke-samsung0d00aed2021-03-08 23:31:17 -0800235 dynamic_descriptor_count_ += binding_info.descriptorCount;
236 }
237
238 // Get stats depending on descriptor type for caching later
sfricke-samsung60f29ec2021-03-10 20:37:25 -0800239 if (IsBufferDescriptor(binding_info.descriptorType)) {
240 if (IsDynamicDescriptor(binding_info.descriptorType)) {
241 binding_type_stats_.dynamic_buffer_count++;
242 } else {
243 binding_type_stats_.non_dynamic_buffer_count++;
244 }
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600245 }
246 }
Tobin Ehlis9637fb22016-12-12 15:59:34 -0700247 assert(bindings_.size() == binding_count_);
Jeff Bolzfdf96072018-04-10 14:32:18 -0500248 assert(binding_flags_.size() == binding_count_);
Tobin Ehlis9637fb22016-12-12 15:59:34 -0700249 uint32_t global_index = 0;
John Zulauf7705bfc2019-06-10 09:52:04 -0600250 global_index_range_.reserve(binding_count_);
251 // Vector order is finalized so build vectors of descriptors and dynamic offsets by binding index
Tobin Ehlis9637fb22016-12-12 15:59:34 -0700252 for (uint32_t i = 0; i < binding_count_; ++i) {
John Zulaufc483f442017-12-15 14:02:06 -0700253 auto final_index = global_index + bindings_[i].descriptorCount;
John Zulauf7705bfc2019-06-10 09:52:04 -0600254 global_index_range_.emplace_back(global_index, final_index);
John Zulaufc483f442017-12-15 14:02:06 -0700255 global_index = final_index;
Tobin Ehlis9637fb22016-12-12 15:59:34 -0700256 }
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600257}
Tobin Ehlis154c2692016-10-25 09:36:53 -0600258
John Zulaufd47d0612018-02-16 13:00:34 -0700259size_t cvdescriptorset::DescriptorSetLayoutDef::hash() const {
260 hash_util::HashCombiner hc;
261 hc << flags_;
262 hc.Combine(bindings_);
John Zulauf223b69d2018-11-09 16:00:59 -0700263 hc.Combine(binding_flags_);
John Zulaufd47d0612018-02-16 13:00:34 -0700264 return hc.Value();
265}
266//
267
John Zulauf1f8174b2018-02-16 12:58:37 -0700268// Return valid index or "end" i.e. binding_count_;
269// The asserts in "Get" are reduced to the set where no valid answer(like null or 0) could be given
270// Common code for all binding lookups.
271uint32_t cvdescriptorset::DescriptorSetLayoutDef::GetIndexFromBinding(uint32_t binding) const {
272 const auto &bi_itr = binding_to_index_map_.find(binding);
273 if (bi_itr != binding_to_index_map_.cend()) return bi_itr->second;
274 return GetBindingCount();
275}
276VkDescriptorSetLayoutBinding const *cvdescriptorset::DescriptorSetLayoutDef::GetDescriptorSetLayoutBindingPtrFromIndex(
277 const uint32_t index) const {
278 if (index >= bindings_.size()) return nullptr;
279 return bindings_[index].ptr();
280}
281// Return descriptorCount for given index, 0 if index is unavailable
282uint32_t cvdescriptorset::DescriptorSetLayoutDef::GetDescriptorCountFromIndex(const uint32_t index) const {
283 if (index >= bindings_.size()) return 0;
284 return bindings_[index].descriptorCount;
285}
286// For the given index, return descriptorType
287VkDescriptorType cvdescriptorset::DescriptorSetLayoutDef::GetTypeFromIndex(const uint32_t index) const {
288 assert(index < bindings_.size());
289 if (index < bindings_.size()) return bindings_[index].descriptorType;
290 return VK_DESCRIPTOR_TYPE_MAX_ENUM;
291}
292// For the given index, return stageFlags
293VkShaderStageFlags cvdescriptorset::DescriptorSetLayoutDef::GetStageFlagsFromIndex(const uint32_t index) const {
294 assert(index < bindings_.size());
295 if (index < bindings_.size()) return bindings_[index].stageFlags;
296 return VkShaderStageFlags(0);
297}
Jeff Bolzfdf96072018-04-10 14:32:18 -0500298// Return binding flags for given index, 0 if index is unavailable
Mike Schuchardt2df08912020-12-15 16:28:09 -0800299VkDescriptorBindingFlags cvdescriptorset::DescriptorSetLayoutDef::GetDescriptorBindingFlagsFromIndex(const uint32_t index) const {
Jeff Bolzfdf96072018-04-10 14:32:18 -0500300 if (index >= binding_flags_.size()) return 0;
301 return binding_flags_[index];
302}
John Zulauf1f8174b2018-02-16 12:58:37 -0700303
John Zulauf7705bfc2019-06-10 09:52:04 -0600304const cvdescriptorset::IndexRange &cvdescriptorset::DescriptorSetLayoutDef::GetGlobalIndexRangeFromIndex(uint32_t index) const {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700305 const static IndexRange k_invalid_range = {0xFFFFFFFF, 0xFFFFFFFF};
306 if (index >= binding_flags_.size()) return k_invalid_range;
John Zulauf7705bfc2019-06-10 09:52:04 -0600307 return global_index_range_[index];
John Zulauf1f8174b2018-02-16 12:58:37 -0700308}
309
John Zulauf7705bfc2019-06-10 09:52:04 -0600310// For the given binding, return the global index range (half open)
311// As start and end are often needed in pairs, get both with a single lookup.
John Zulauf1f8174b2018-02-16 12:58:37 -0700312const cvdescriptorset::IndexRange &cvdescriptorset::DescriptorSetLayoutDef::GetGlobalIndexRangeFromBinding(
313 const uint32_t binding) const {
John Zulauf7705bfc2019-06-10 09:52:04 -0600314 uint32_t index = GetIndexFromBinding(binding);
315 return GetGlobalIndexRangeFromIndex(index);
John Zulauf1f8174b2018-02-16 12:58:37 -0700316}
317
John Zulauf1f8174b2018-02-16 12:58:37 -0700318// Move to next valid binding having a non-zero binding count
319uint32_t cvdescriptorset::DescriptorSetLayoutDef::GetNextValidBinding(const uint32_t binding) const {
320 auto it = non_empty_bindings_.upper_bound(binding);
321 assert(it != non_empty_bindings_.cend());
322 if (it != non_empty_bindings_.cend()) return *it;
323 return GetMaxBinding() + 1;
324}
325// For given index, return ptr to ImmutableSampler array
326VkSampler const *cvdescriptorset::DescriptorSetLayoutDef::GetImmutableSamplerPtrFromIndex(const uint32_t index) const {
327 if (index < bindings_.size()) {
328 return bindings_[index].pImmutableSamplers;
329 }
330 return nullptr;
331}
John Zulauf9ce3b252019-06-06 15:20:22 -0600332
ziga-lunargd67b5f52021-10-16 23:52:59 +0200333bool cvdescriptorset::DescriptorSetLayoutDef::IsTypeMutable(const VkDescriptorType type, uint32_t binding) const {
334 if (binding < mutable_types_.size()) {
335 if (mutable_types_[binding].size() > 0) {
336 for (const auto mutable_type : mutable_types_[binding]) {
337 if (type == mutable_type) {
338 return true;
339 }
340 }
341 return false;
342 }
343 }
Mike Schuchardt2d523e52022-09-15 12:25:58 -0700344 // If mutableDescriptorTypeListCount is zero or if VkMutableDescriptorTypeCreateInfoEXT structure is not included in the pNext
345 // chain, the VkMutableDescriptorTypeListEXT for each element is considered to be zero or NULL for each member.
ziga-lunargd67b5f52021-10-16 23:52:59 +0200346 return false;
347}
348
ziga-lunarg2ab8c472021-10-27 22:54:02 +0200349const std::vector<std::vector<VkDescriptorType>>& cvdescriptorset::DescriptorSetLayoutDef::GetMutableTypes() const {
350 return mutable_types_;
351}
352
ziga-lunarge5d28542021-10-24 21:14:25 +0200353const std::vector<VkDescriptorType> &cvdescriptorset::DescriptorSetLayoutDef::GetMutableTypes(uint32_t binding) const {
354 if (binding >= mutable_types_.size()) {
355 static const std::vector<VkDescriptorType> empty = {};
356 return empty;
357 }
358 return mutable_types_[binding];
359}
360
John Zulauf1f8174b2018-02-16 12:58:37 -0700361bool cvdescriptorset::DescriptorSetLayoutDef::IsNextBindingConsistent(const uint32_t binding) const {
362 if (!binding_to_index_map_.count(binding + 1)) return false;
363 auto const &bi_itr = binding_to_index_map_.find(binding);
364 if (bi_itr != binding_to_index_map_.end()) {
365 const auto &next_bi_itr = binding_to_index_map_.find(binding + 1);
366 if (next_bi_itr != binding_to_index_map_.end()) {
367 auto type = bindings_[bi_itr->second].descriptorType;
368 auto stage_flags = bindings_[bi_itr->second].stageFlags;
369 auto immut_samp = bindings_[bi_itr->second].pImmutableSamplers ? true : false;
Jeff Bolzfdf96072018-04-10 14:32:18 -0500370 auto flags = binding_flags_[bi_itr->second];
John Zulauf1f8174b2018-02-16 12:58:37 -0700371 if ((type != bindings_[next_bi_itr->second].descriptorType) ||
372 (stage_flags != bindings_[next_bi_itr->second].stageFlags) ||
Jeff Bolzfdf96072018-04-10 14:32:18 -0500373 (immut_samp != (bindings_[next_bi_itr->second].pImmutableSamplers ? true : false)) ||
374 (flags != binding_flags_[next_bi_itr->second])) {
John Zulauf1f8174b2018-02-16 12:58:37 -0700375 return false;
376 }
377 return true;
378 }
379 }
380 return false;
381}
John Zulauf1f8174b2018-02-16 12:58:37 -0700382
Jeremy Gebbenbb718a32022-05-12 08:51:10 -0600383// If our layout is compatible with rh_ds_layout, return true.
384bool cvdescriptorset::DescriptorSetLayout::IsCompatible(DescriptorSetLayout const *rh_ds_layout) const {
385 bool compatible = (this == rh_ds_layout) || (GetLayoutDef() == rh_ds_layout->GetLayoutDef());
386 return compatible;
387}
388
John Zulauf1f8174b2018-02-16 12:58:37 -0700389// The DescriptorSetLayout stores the per handle data for a descriptor set layout, and references the common defintion for the
390// handle invariant portion
391cvdescriptorset::DescriptorSetLayout::DescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo *p_create_info,
392 const VkDescriptorSetLayout layout)
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600393 : BASE_NODE(layout, kVulkanObjectTypeDescriptorSetLayout), layout_id_(GetCanonicalId(p_create_info)) {}
John Zulauf1f8174b2018-02-16 12:58:37 -0700394
Mark Lobodzinskie12b6e32020-06-29 11:44:15 -0600395void cvdescriptorset::AllocateDescriptorSetsData::Init(uint32_t count) {
396 layout_nodes.resize(count);
Mark Lobodzinskie12b6e32020-06-29 11:44:15 -0600397}
Tobin Ehlis68d0adf2016-06-01 11:33:50 -0600398
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500399cvdescriptorset::DescriptorSet::DescriptorSet(const VkDescriptorSet set, DESCRIPTOR_POOL_STATE *pool_state,
Jeff Bolzfdf96072018-04-10 14:32:18 -0500400 const std::shared_ptr<DescriptorSetLayout const> &layout, uint32_t variable_count,
John Zulaufd2c3dae2019-12-12 11:02:17 -0700401 const cvdescriptorset::DescriptorSet::StateTracker *state_data)
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600402 : BASE_NODE(set, kVulkanObjectTypeDescriptorSet),
403 some_update_(false),
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500404 pool_state_(pool_state),
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700405 layout_(layout),
John Zulaufd2c3dae2019-12-12 11:02:17 -0700406 state_data_(state_data),
Jeff Bolzdd4cfa12019-08-11 20:57:51 -0500407 variable_count_(variable_count),
408 change_count_(0) {
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600409 // Foreach binding, create default descriptors of given type
Jeremy Gebben0f6c60c2022-07-29 09:39:41 -0600410 auto binding_count = layout_->GetBindingCount();
411 bindings_.reserve(binding_count);
412 bindings_store_.resize(binding_count);
413 auto free_binding = bindings_store_.data();
414 for (uint32_t i = 0; i < binding_count; ++i) {
Jeremy Gebben080210f2022-05-05 13:37:08 -0600415 auto create_info = layout_->GetDescriptorSetLayoutBindingPtrFromIndex(i);
416 assert(create_info);
417 uint32_t descriptor_count = create_info->descriptorCount;
418 auto flags = layout_->GetDescriptorBindingFlagsFromIndex(i);
419 if (flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT) {
ziga-lunarg9b3f6772022-04-17 20:33:42 +0200420 descriptor_count = variable_count;
421 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700422 auto type = layout_->GetTypeFromIndex(i);
Jeremy Gebben02f83202022-05-04 13:54:07 -0600423 auto descriptor_class = DescriptorTypeToClass(type);
424 switch (descriptor_class) {
425 case PlainSampler: {
Jeremy Gebben0f6c60c2022-07-29 09:39:41 -0600426 auto binding = MakeBinding<SamplerBinding>(free_binding++, *create_info, descriptor_count, flags);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600427 auto immut = layout_->GetImmutableSamplerPtrFromIndex(i);
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600428 if (immut) {
429 for (uint32_t di = 0; di < descriptor_count; ++di) {
430 auto sampler = state_data->GetConstCastShared<SAMPLER_STATE>(immut[di]);
431 if (sampler) {
432 some_update_ = true; // Immutable samplers are updated at creation
Jeremy Gebbenf4816f72022-07-15 08:56:06 -0600433 binding->updated[di] = true;
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600434 binding->descriptors[di].SetSamplerState(std::move(sampler));
435 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700436 }
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700437 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600438 bindings_.push_back(std::move(binding));
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700439 break;
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600440 }
Jeremy Gebben02f83202022-05-04 13:54:07 -0600441 case ImageSampler: {
Jeremy Gebben0f6c60c2022-07-29 09:39:41 -0600442 auto binding = MakeBinding<ImageSamplerBinding>(free_binding++, *create_info, descriptor_count, flags);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700443 auto immut = layout_->GetImmutableSamplerPtrFromIndex(i);
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600444 if (immut) {
445 for (uint32_t di = 0; di < descriptor_count; ++di) {
446 auto sampler = state_data->GetConstCastShared<SAMPLER_STATE>(immut[di]);
447 if (sampler) {
448 some_update_ = true; // Immutable samplers are updated at creation
Jeremy Gebbenf4816f72022-07-15 08:56:06 -0600449 binding->updated[di] = true;
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600450 binding->descriptors[di].SetSamplerState(std::move(sampler));
451 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700452 }
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700453 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600454 bindings_.push_back(std::move(binding));
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700455 break;
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600456 }
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700457 // ImageDescriptors
Jeremy Gebben080210f2022-05-05 13:37:08 -0600458 case Image: {
Jeremy Gebben0f6c60c2022-07-29 09:39:41 -0600459 bindings_.push_back(MakeBinding<ImageBinding>(free_binding++, *create_info, descriptor_count, flags));
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700460 break;
Jeremy Gebben080210f2022-05-05 13:37:08 -0600461 }
462 case TexelBuffer: {
Jeremy Gebben0f6c60c2022-07-29 09:39:41 -0600463 bindings_.push_back(MakeBinding<TexelBinding>(free_binding++, *create_info, descriptor_count, flags));
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700464 break;
Jeremy Gebben080210f2022-05-05 13:37:08 -0600465 }
466 case GeneralBuffer: {
Jeremy Gebben0f6c60c2022-07-29 09:39:41 -0600467 auto binding = MakeBinding<BufferBinding>(free_binding++, *create_info, descriptor_count, flags);
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600468 if (IsDynamicDescriptor(type)) {
469 for (uint32_t di = 0; di < descriptor_count; ++di) {
Jeremy Gebben080210f2022-05-05 13:37:08 -0600470 dynamic_offset_idx_to_descriptor_list_.push_back({i, di});
Jeremy Gebben02f83202022-05-04 13:54:07 -0600471 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700472 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600473 bindings_.push_back(std::move(binding));
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700474 break;
Jeremy Gebben080210f2022-05-05 13:37:08 -0600475 }
476 case InlineUniform: {
Jeremy Gebben0f6c60c2022-07-29 09:39:41 -0600477 bindings_.push_back(MakeBinding<InlineUniformBinding>(free_binding++, *create_info, descriptor_count, flags));
Jeff Bolze54ae892018-09-08 12:16:29 -0500478 break;
Jeremy Gebben080210f2022-05-05 13:37:08 -0600479 }
480 case AccelerationStructure: {
Jeremy Gebben0f6c60c2022-07-29 09:39:41 -0600481 bindings_.push_back(
482 MakeBinding<AccelerationStructureBinding>(free_binding++, *create_info, descriptor_count, flags));
Jeff Bolzfbe51582018-09-13 10:01:35 -0500483 break;
Jeremy Gebben080210f2022-05-05 13:37:08 -0600484 }
485 case Mutable: {
Jeremy Gebben0f6c60c2022-07-29 09:39:41 -0600486 bindings_.push_back(MakeBinding<MutableBinding>(free_binding++, *create_info, descriptor_count, flags));
Tony-LunarGf563b362021-03-18 16:13:18 -0600487 break;
Jeremy Gebben080210f2022-05-05 13:37:08 -0600488 }
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700489 default:
Jeremy Gebben02f83202022-05-04 13:54:07 -0600490 assert(0); // Bad descriptor type specified
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700491 break;
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600492 }
493 }
494}
Tobin Ehlis56a30942016-05-19 08:00:00 -0600495
Jeremy Gebben610d3a62022-01-01 12:53:17 -0700496void cvdescriptorset::DescriptorSet::LinkChildNodes() {
497 // Connect child node(s), which cannot safely be done in the constructor.
Jeremy Gebben080210f2022-05-05 13:37:08 -0600498 for (auto &binding : bindings_) {
499 binding->AddParent(this);
Jeremy Gebben610d3a62022-01-01 12:53:17 -0700500 }
501}
502
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600503void cvdescriptorset::DescriptorSet::Destroy() {
Jeremy Gebben080210f2022-05-05 13:37:08 -0600504 for (auto &binding : bindings_) {
505 binding->RemoveParent(this);
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600506 }
507 BASE_NODE::Destroy();
508}
John Zulauf1d27e0a2018-11-05 10:12:48 -0700509// Loop through the write updates to do for a push descriptor set, ignoring dstSet
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500510void cvdescriptorset::DescriptorSet::PerformPushDescriptorsUpdate(ValidationStateTracker *dev_data, uint32_t write_count,
511 const VkWriteDescriptorSet *p_wds) {
John Zulauf1d27e0a2018-11-05 10:12:48 -0700512 assert(IsPushDescriptor());
513 for (uint32_t i = 0; i < write_count; i++) {
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500514 PerformWriteUpdate(dev_data, &p_wds[i]);
John Zulauf1d27e0a2018-11-05 10:12:48 -0700515 }
Jason Macnak83cfd582019-07-31 10:14:24 -0700516
517 push_descriptor_set_writes.clear();
518 push_descriptor_set_writes.reserve(static_cast<std::size_t>(write_count));
519 for (uint32_t i = 0; i < write_count; i++) {
520 push_descriptor_set_writes.push_back(safe_VkWriteDescriptorSet(&p_wds[i]));
521 }
John Zulauf1d27e0a2018-11-05 10:12:48 -0700522}
523
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600524// Perform write update in given update struct
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500525void cvdescriptorset::DescriptorSet::PerformWriteUpdate(ValidationStateTracker *dev_data, const VkWriteDescriptorSet *update) {
Tobin Ehlisf922ef82016-11-30 10:19:14 -0700526 // Perform update on a per-binding basis as consecutive updates roll over to next binding
527 auto descriptors_remaining = update->descriptorCount;
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600528 auto iter = FindDescriptor(update->dstBinding, update->dstArrayElement);
529 assert(!iter.AtEnd());
530 auto &orig_binding = iter.CurrentBinding();
locke-lunarge46b7782019-09-10 01:44:20 -0600531
locke-lunarge46b7782019-09-10 01:44:20 -0600532 // Verify next consecutive binding matches type, stage flags & immutable sampler use and if AtEnd
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600533 for (uint32_t i = 0; i < descriptors_remaining; ++i, ++iter) {
534 if (iter.AtEnd() || !orig_binding.IsConsistent(iter.CurrentBinding())) {
535 break;
locke-lunarge46b7782019-09-10 01:44:20 -0600536 }
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600537 iter->WriteUpdate(this, state_data_, update, i, iter.CurrentBinding().IsBindless());
Jeremy Gebbenf4816f72022-07-15 08:56:06 -0600538 iter.updated(true);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600539 }
Jeff Bolzdd4cfa12019-08-11 20:57:51 -0500540 if (update->descriptorCount) {
541 some_update_ = true;
542 change_count_++;
543 }
Tobin Ehlis56a30942016-05-19 08:00:00 -0600544
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600545 if (!IsPushDescriptor() && !(orig_binding.binding_flags & (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT |
546 VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT))) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600547 Invalidate(false);
Jeff Bolzfdf96072018-04-10 14:32:18 -0500548 }
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600549}
Tobin Ehlis300888c2016-05-18 13:43:26 -0600550// Perform Copy update
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500551void cvdescriptorset::DescriptorSet::PerformCopyUpdate(ValidationStateTracker *dev_data, const VkCopyDescriptorSet *update,
552 const DescriptorSet *src_set) {
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600553 auto src_iter = src_set->FindDescriptor(update->srcBinding, update->srcArrayElement);
554 auto dst_iter = FindDescriptor(update->dstBinding, update->dstArrayElement);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600555 // Update parameters all look good so perform update
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600556 for (uint32_t i = 0; i < update->descriptorCount; ++i, ++src_iter, ++dst_iter) {
557 auto &src = *src_iter;
558 auto &dst = *dst_iter;
Jeremy Gebbenf4816f72022-07-15 08:56:06 -0600559 if (src_iter.updated()) {
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600560 dst.CopyUpdate(this, state_data_, &src, src_iter.CurrentBinding().IsBindless());
Józef Kucia5297e372017-10-13 22:31:34 +0200561 some_update_ = true;
Jeff Bolzdd4cfa12019-08-11 20:57:51 -0500562 change_count_++;
Jeremy Gebbenf4816f72022-07-15 08:56:06 -0600563 dst_iter.updated(true);
Józef Kucia5297e372017-10-13 22:31:34 +0200564 } else {
Jeremy Gebbenf4816f72022-07-15 08:56:06 -0600565 dst_iter.updated(false);
Józef Kucia5297e372017-10-13 22:31:34 +0200566 }
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600567 }
Tobin Ehlis56a30942016-05-19 08:00:00 -0600568
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700569 if (!(layout_->GetDescriptorBindingFlagsFromBinding(update->dstBinding) &
Mike Schuchardt2df08912020-12-15 16:28:09 -0800570 (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT))) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600571 Invalidate(false);
Jeff Bolzfdf96072018-04-10 14:32:18 -0500572 }
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600573}
Tobin Ehlis56a30942016-05-19 08:00:00 -0600574
John Zulauf6f3d2bd2018-10-29 17:08:42 -0600575// Update the drawing state for the affected descriptors.
576// Set cb_node to this set and this set to cb_node.
577// Add the bindings of the descriptor
578// Set the layout based on the current descriptor layout (will mask subsequent layer mismatch errors)
579// TODO: Modify the UpdateDrawState virtural functions to *only* set initial layout and not change layouts
Tobin Ehlisf9519102016-08-17 09:49:13 -0600580// Prereq: This should be called for a set that has been confirmed to be active for the given cb_node, meaning it's going
581// to be used in a draw by the given cb_node
Jeremy Kniagere6827432020-04-01 09:05:56 -0600582void cvdescriptorset::DescriptorSet::UpdateDrawState(ValidationStateTracker *device_data, CMD_BUFFER_STATE *cb_node,
583 CMD_TYPE cmd_type, const PIPELINE_STATE *pipe,
sfricke-samsung85584a72021-09-30 21:43:38 -0700584 const BindingReqMap &binding_req_map) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600585 // Descriptor UpdateDrawState only call image layout validation callbacks. If it is disabled, skip the entire loop.
586 if (device_data->disabled[image_layout_validation]) {
Jeff Bolze18e7242019-08-12 20:55:22 -0500587 return;
588 }
589
Tobin Ehlisf9519102016-08-17 09:49:13 -0600590 // For the active slots, use set# to look up descriptorSet from boundDescriptorSets, and bind all of that descriptor set's
591 // resources
locke-lunarg540b2252020-08-03 13:23:36 -0600592 CMD_BUFFER_STATE::CmdDrawDispatchInfo cmd_info = {};
John Zulauf79f06582021-02-27 18:38:39 -0700593 for (const auto &binding_req_pair : binding_req_map) {
Jeremy Gebben080210f2022-05-05 13:37:08 -0600594 auto binding = GetBinding(binding_req_pair.first);
595 assert(binding);
locke-gb3ce08f2019-09-30 12:30:56 -0600596
Tony-LunarG62c5dba2018-12-20 14:27:23 -0700597 // We aren't validating descriptors created with PARTIALLY_BOUND or UPDATE_AFTER_BIND, so don't record state
Jeremy Gebben080210f2022-05-05 13:37:08 -0600598 if (binding->IsBindless()) {
599 if (!(binding->binding_flags & VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT)) {
locke-lunarg36045992020-08-20 16:54:37 -0600600 cmd_info.binding_infos.emplace_back(binding_req_pair);
locke-gb3ce08f2019-09-30 12:30:56 -0600601 }
Tony-LunarG62c5dba2018-12-20 14:27:23 -0700602 continue;
603 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600604 switch (binding->descriptor_class) {
605 case Image: {
606 auto *image_binding = static_cast<ImageBinding *>(binding);
607 for (uint32_t i = 0; i < image_binding->count; ++i) {
608 image_binding->descriptors[i].UpdateDrawState(device_data, cb_node);
609 }
610 break;
611 }
Jeremy Gebben02f83202022-05-04 13:54:07 -0600612 case ImageSampler: {
Jeremy Gebben080210f2022-05-05 13:37:08 -0600613 auto *image_binding = static_cast<ImageSamplerBinding *>(binding);
614 for (uint32_t i = 0; i < image_binding->count; ++i) {
615 image_binding->descriptors[i].UpdateDrawState(device_data, cb_node);
Jeremy Gebben3f6f5c82022-05-03 15:25:57 -0600616 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600617 break;
618 }
Jeremy Gebben02f83202022-05-04 13:54:07 -0600619 case Mutable: {
Jeremy Gebben080210f2022-05-05 13:37:08 -0600620 auto *mutable_binding = static_cast<MutableBinding *>(binding);
621 for (uint32_t i = 0; i < mutable_binding->count; ++i) {
622 mutable_binding->descriptors[i].UpdateDrawState(device_data, cb_node);
Jeremy Gebben3f6f5c82022-05-03 15:25:57 -0600623 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600624 break;
625 }
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600626 default:
627 break;
Mark Lobodzinski2872f4a2018-09-03 17:00:53 -0600628 }
629 }
locke-lunarg540b2252020-08-03 13:23:36 -0600630
631 if (cmd_info.binding_infos.size() > 0) {
632 cmd_info.cmd_type = cmd_type;
locke-lunarg540b2252020-08-03 13:23:36 -0600633 if (cb_node->activeFramebuffer) {
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600634 cmd_info.framebuffer = cb_node->activeFramebuffer->framebuffer();
locke-lunargfc78e932020-11-19 17:06:24 -0700635 cmd_info.attachments = cb_node->active_attachments;
636 cmd_info.subpasses = cb_node->active_subpasses;
locke-lunarg540b2252020-08-03 13:23:36 -0600637 }
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600638 cb_node->validate_descriptorsets_in_queuesubmit[GetSet()].emplace_back(cmd_info);
locke-lunarg540b2252020-08-03 13:23:36 -0600639 }
Mark Lobodzinski2872f4a2018-09-03 17:00:53 -0600640}
641
John Zulauffbf3c202019-07-17 14:57:14 -0600642void cvdescriptorset::DescriptorSet::FilterOneBindingReq(const BindingReqMap::value_type &binding_req_pair, BindingReqMap *out_req,
643 const TrackedBindings &bindings, uint32_t limit) {
644 if (bindings.size() < limit) {
645 const auto it = bindings.find(binding_req_pair.first);
646 if (it == bindings.cend()) out_req->emplace(binding_req_pair);
John Zulauf48a6a702017-12-22 17:14:54 -0700647 }
648}
Mark Lobodzinski2872f4a2018-09-03 17:00:53 -0600649
John Zulauffbf3c202019-07-17 14:57:14 -0600650void cvdescriptorset::DescriptorSet::FilterBindingReqs(const CMD_BUFFER_STATE &cb_state, const PIPELINE_STATE &pipeline,
651 const BindingReqMap &in_req, BindingReqMap *out_req) const {
652 // For const cleanliness we have to find in the maps...
Jeremy Gebben87db52f2021-10-14 13:55:09 -0600653 const auto validated_it = cb_state.descriptorset_cache.find(this);
654 if (validated_it == cb_state.descriptorset_cache.end()) {
John Zulauffbf3c202019-07-17 14:57:14 -0600655 // We have nothing validated, copy in to out
656 for (const auto &binding_req_pair : in_req) {
657 out_req->emplace(binding_req_pair);
John Zulauf48a6a702017-12-22 17:14:54 -0700658 }
John Zulauffbf3c202019-07-17 14:57:14 -0600659 return;
John Zulauf48a6a702017-12-22 17:14:54 -0700660 }
John Zulauffbf3c202019-07-17 14:57:14 -0600661 const auto &validated = validated_it->second;
John Zulauf48a6a702017-12-22 17:14:54 -0700662
John Zulauffbf3c202019-07-17 14:57:14 -0600663 const auto image_sample_version_it = validated.image_samplers.find(&pipeline);
664 const VersionedBindings *image_sample_version = nullptr;
665 if (image_sample_version_it != validated.image_samplers.cend()) {
666 image_sample_version = &(image_sample_version_it->second);
667 }
668 const auto &dynamic_buffers = validated.dynamic_buffers;
669 const auto &non_dynamic_buffers = validated.non_dynamic_buffers;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700670 const auto &stats = layout_->GetBindingTypeStats();
John Zulauf48a6a702017-12-22 17:14:54 -0700671 for (const auto &binding_req_pair : in_req) {
672 auto binding = binding_req_pair.first;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700673 VkDescriptorSetLayoutBinding const *layout_binding = layout_->GetDescriptorSetLayoutBindingPtrFromBinding(binding);
John Zulauf48a6a702017-12-22 17:14:54 -0700674 if (!layout_binding) {
675 continue;
676 }
677 // Caching criteria differs per type.
678 // If image_layout have changed , the image descriptors need to be validated against them.
sfricke-samsung60f29ec2021-03-10 20:37:25 -0800679 if (IsBufferDescriptor(layout_binding->descriptorType)) {
680 if (IsDynamicDescriptor(layout_binding->descriptorType)) {
681 FilterOneBindingReq(binding_req_pair, out_req, dynamic_buffers, stats.dynamic_buffer_count);
682 } else {
683 FilterOneBindingReq(binding_req_pair, out_req, non_dynamic_buffers, stats.non_dynamic_buffer_count);
684 }
John Zulauf48a6a702017-12-22 17:14:54 -0700685 } else {
686 // This is rather crude, as the changed layouts may not impact the bound descriptors,
687 // but the simple "versioning" is a simple "dirt" test.
John Zulauffbf3c202019-07-17 14:57:14 -0600688 bool stale = true;
689 if (image_sample_version) {
690 const auto version_it = image_sample_version->find(binding);
691 if (version_it != image_sample_version->cend() && (version_it->second == cb_state.image_layout_change_count)) {
692 stale = false;
693 }
694 }
695 if (stale) {
John Zulauf48a6a702017-12-22 17:14:54 -0700696 out_req->emplace(binding_req_pair);
697 }
698 }
699 }
700}
Tobin Ehlis9252c2b2016-07-21 14:40:22 -0600701
Jeremy Gebben87db52f2021-10-14 13:55:09 -0600702void cvdescriptorset::DescriptorSet::UpdateValidationCache(CMD_BUFFER_STATE &cb_state, const PIPELINE_STATE &pipeline,
John Zulauffbf3c202019-07-17 14:57:14 -0600703 const BindingReqMap &updated_bindings) {
Jeremy Gebben87db52f2021-10-14 13:55:09 -0600704 auto &validated = cb_state.descriptorset_cache[this];
John Zulauffbf3c202019-07-17 14:57:14 -0600705
706 auto &image_sample_version = validated.image_samplers[&pipeline];
707 auto &dynamic_buffers = validated.dynamic_buffers;
708 auto &non_dynamic_buffers = validated.non_dynamic_buffers;
709 for (const auto &binding_req_pair : updated_bindings) {
710 auto binding = binding_req_pair.first;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700711 VkDescriptorSetLayoutBinding const *layout_binding = layout_->GetDescriptorSetLayoutBindingPtrFromBinding(binding);
John Zulauffbf3c202019-07-17 14:57:14 -0600712 if (!layout_binding) {
713 continue;
714 }
715 // Caching criteria differs per type.
sfricke-samsung60f29ec2021-03-10 20:37:25 -0800716 if (IsBufferDescriptor(layout_binding->descriptorType)) {
717 if (IsDynamicDescriptor(layout_binding->descriptorType)) {
718 dynamic_buffers.emplace(binding);
719 } else {
720 non_dynamic_buffers.emplace(binding);
721 }
John Zulauffbf3c202019-07-17 14:57:14 -0600722 } else {
723 // Save the layout change version...
724 image_sample_version[binding] = cb_state.image_layout_change_count;
725 }
726 }
727}
728
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600729// Helper template to change shared pointer members of a Descriptor, while
730// correctly managing links to the parent DescriptorSet.
731// src and dst are shared pointers.
732template <typename T>
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200733static void ReplaceStatePtr(DescriptorSet *set_state, T &dst, const T &src, bool is_bindless) {
734 if (dst && !is_bindless) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600735 dst->RemoveParent(set_state);
736 }
737 dst = src;
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200738 // For descriptor bindings with UPDATE_AFTER_BIND or PARTIALLY_BOUND only set the object as a child, but not the descriptor as a
739 // parent, so that destroying the object wont invalidate the descriptor
740 if (dst && !is_bindless) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600741 dst->AddParent(set_state);
742 }
743}
744
745void cvdescriptorset::SamplerDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600746 const VkWriteDescriptorSet *update, const uint32_t index, bool is_bindless) {
Chris Forbesfea2c542018-04-13 09:34:15 -0700747 if (!immutable_) {
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200748 ReplaceStatePtr(set_state, sampler_state_, dev_data->GetConstCastShared<SAMPLER_STATE>(update->pImageInfo[index].sampler),
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600749 is_bindless);
Chris Forbesfea2c542018-04-13 09:34:15 -0700750 }
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600751}
752
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600753void cvdescriptorset::SamplerDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200754 const Descriptor *src, bool is_bindless) {
Jeremy Gebbenc08f6502022-07-15 09:55:06 -0600755 if (src->GetClass() == Mutable) {
zigada4b1512021-11-28 15:53:06 +0100756 auto *sampler_src = static_cast<const MutableDescriptor *>(src);
757 if (!immutable_) {
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200758 ReplaceStatePtr(set_state, sampler_state_, sampler_src->GetSharedSamplerState(), is_bindless);
zigada4b1512021-11-28 15:53:06 +0100759 }
Tony-LunarG80358322021-04-16 07:58:13 -0600760 return;
761 }
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600762 auto *sampler_src = static_cast<const SamplerDescriptor *>(src);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600763 if (!immutable_) {
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200764 ReplaceStatePtr(set_state, sampler_state_, sampler_src->sampler_state_, is_bindless);
Tobin Ehlis8020eea2016-08-17 11:10:41 -0600765 }
766}
767
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600768void cvdescriptorset::ImageSamplerDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600769 const VkWriteDescriptorSet *update, const uint32_t index,
770 bool is_bindless) {
Tobin Ehlis56a30942016-05-19 08:00:00 -0600771 const auto &image_info = update->pImageInfo[index];
Chris Forbesfea2c542018-04-13 09:34:15 -0700772 if (!immutable_) {
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600773 ReplaceStatePtr(set_state, sampler_state_, dev_data->GetConstCastShared<SAMPLER_STATE>(image_info.sampler), is_bindless);
Chris Forbesfea2c542018-04-13 09:34:15 -0700774 }
Tobin Ehlis300888c2016-05-18 13:43:26 -0600775 image_layout_ = image_info.imageLayout;
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200776 ReplaceStatePtr(set_state, image_view_state_, dev_data->GetConstCastShared<IMAGE_VIEW_STATE>(image_info.imageView),
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600777 is_bindless);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600778}
779
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600780void cvdescriptorset::ImageSamplerDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200781 const Descriptor *src, bool is_bindless) {
Jeremy Gebbenc08f6502022-07-15 09:55:06 -0600782 if (src->GetClass() == Mutable) {
zigada4b1512021-11-28 15:53:06 +0100783 auto *image_src = static_cast<const MutableDescriptor *>(src);
784 if (!immutable_) {
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200785 ReplaceStatePtr(set_state, sampler_state_, image_src->GetSharedSamplerState(), is_bindless);
zigada4b1512021-11-28 15:53:06 +0100786 }
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200787 ImageDescriptor::CopyUpdate(set_state, dev_data, src, is_bindless);
Tony-LunarG80358322021-04-16 07:58:13 -0600788 return;
789 }
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600790 auto *image_src = static_cast<const ImageSamplerDescriptor *>(src);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600791 if (!immutable_) {
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200792 ReplaceStatePtr(set_state, sampler_state_, image_src->sampler_state_, is_bindless);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600793 }
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200794 ImageDescriptor::CopyUpdate(set_state, dev_data, src, is_bindless);
Tobin Ehlis8020eea2016-08-17 11:10:41 -0600795}
796
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600797void cvdescriptorset::ImageDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600798 const VkWriteDescriptorSet *update, const uint32_t index, bool is_bindless) {
Tobin Ehlis56a30942016-05-19 08:00:00 -0600799 const auto &image_info = update->pImageInfo[index];
Tobin Ehlis300888c2016-05-18 13:43:26 -0600800 image_layout_ = image_info.imageLayout;
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200801 ReplaceStatePtr(set_state, image_view_state_, dev_data->GetConstCastShared<IMAGE_VIEW_STATE>(image_info.imageView),
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600802 is_bindless);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600803}
804
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600805void cvdescriptorset::ImageDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200806 const Descriptor *src, bool is_bindless) {
Jeremy Gebbenc08f6502022-07-15 09:55:06 -0600807 if (src->GetClass() == Mutable) {
zigada4b1512021-11-28 15:53:06 +0100808 auto *image_src = static_cast<const MutableDescriptor *>(src);
809
810 image_layout_ = image_src->GetImageLayout();
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200811 ReplaceStatePtr(set_state, image_view_state_, image_src->GetSharedImageViewState(), is_bindless);
Tony-LunarG80358322021-04-16 07:58:13 -0600812 return;
813 }
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600814 auto *image_src = static_cast<const ImageDescriptor *>(src);
Tony-LunarG80358322021-04-16 07:58:13 -0600815
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600816 image_layout_ = image_src->image_layout_;
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200817 ReplaceStatePtr(set_state, image_view_state_, image_src->image_view_state_, is_bindless);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600818}
819
John Zulauffbf3c202019-07-17 14:57:14 -0600820void cvdescriptorset::ImageDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) {
Tobin Ehlis81e46372016-08-17 13:33:44 -0600821 // Add binding for image
Jeff Bolzfaffeb32019-10-04 12:47:16 -0500822 auto iv_state = GetImageViewState();
Tobin Ehlis8b26a382016-09-14 08:02:49 -0600823 if (iv_state) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600824 dev_data->CallSetImageViewInitialLayoutCallback(cb_node, *iv_state, image_layout_);
Jeff Bolz148d94e2018-12-13 21:25:56 -0600825 }
Tobin Ehlis8020eea2016-08-17 11:10:41 -0600826}
827
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600828void cvdescriptorset::BufferDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600829 const VkWriteDescriptorSet *update, const uint32_t index, bool is_bindless) {
Tobin Ehlis56a30942016-05-19 08:00:00 -0600830 const auto &buffer_info = update->pBufferInfo[index];
Tobin Ehlis300888c2016-05-18 13:43:26 -0600831 offset_ = buffer_info.offset;
832 range_ = buffer_info.range;
Jeremy Gebben080210f2022-05-05 13:37:08 -0600833 auto buffer_state = dev_data->GetConstCastShared<BUFFER_STATE>(buffer_info.buffer);
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600834 ReplaceStatePtr(set_state, buffer_state_, buffer_state, is_bindless);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600835}
836
zigada4b1512021-11-28 15:53:06 +0100837void cvdescriptorset::BufferDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200838 const Descriptor *src, bool is_bindless) {
Jeremy Gebbenc08f6502022-07-15 09:55:06 -0600839 if (src->GetClass() == Mutable) {
zigada4b1512021-11-28 15:53:06 +0100840 const auto buff_desc = static_cast<const MutableDescriptor *>(src);
841 offset_ = buff_desc->GetOffset();
842 range_ = buff_desc->GetRange();
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200843 ReplaceStatePtr(set_state, buffer_state_, buff_desc->GetSharedBufferState(), is_bindless);
Tony-LunarG80358322021-04-16 07:58:13 -0600844 return;
845 }
Karl Schultz76d16a42020-11-11 05:05:33 -0700846 const auto buff_desc = static_cast<const BufferDescriptor *>(src);
Tobin Ehlis300888c2016-05-18 13:43:26 -0600847 offset_ = buff_desc->offset_;
848 range_ = buff_desc->range_;
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200849 ReplaceStatePtr(set_state, buffer_state_, buff_desc->buffer_state_, is_bindless);
Tobin Ehlis8020eea2016-08-17 11:10:41 -0600850}
851
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600852void cvdescriptorset::TexelDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600853 const VkWriteDescriptorSet *update, const uint32_t index, bool is_bindless) {
Jeremy Gebben080210f2022-05-05 13:37:08 -0600854 auto buffer_view = dev_data->GetConstCastShared<BUFFER_VIEW_STATE>(update->pTexelBufferView[index]);
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600855 ReplaceStatePtr(set_state, buffer_view_state_, buffer_view, is_bindless);
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600856}
857
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600858void cvdescriptorset::TexelDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200859 const Descriptor *src, bool is_bindless) {
Jeremy Gebbenc08f6502022-07-15 09:55:06 -0600860 if (src->GetClass() == Mutable) {
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200861 ReplaceStatePtr(set_state, buffer_view_state_, static_cast<const MutableDescriptor *>(src)->GetSharedBufferViewState(),
862 is_bindless);
Tony-LunarG80358322021-04-16 07:58:13 -0600863 return;
864 }
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200865 ReplaceStatePtr(set_state, buffer_view_state_, static_cast<const TexelDescriptor *>(src)->buffer_view_state_, is_bindless);
Tobin Ehlis8020eea2016-08-17 11:10:41 -0600866}
867
zigada4b1512021-11-28 15:53:06 +0100868void cvdescriptorset::AccelerationStructureDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600869 const VkWriteDescriptorSet *update, const uint32_t index,
870 bool is_bindless) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700871 const auto *acc_info = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureKHR>(update->pNext);
872 const auto *acc_info_nv = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureNV>(update->pNext);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700873 assert(acc_info || acc_info_nv);
874 is_khr_ = (acc_info != NULL);
sourav parmarcd5fb182020-07-17 12:58:44 -0700875 if (is_khr_) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700876 acc_ = acc_info->pAccelerationStructures[index];
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600877 ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_), is_bindless);
sourav parmarcd5fb182020-07-17 12:58:44 -0700878 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700879 acc_nv_ = acc_info_nv->pAccelerationStructures[index];
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600880 ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_), is_bindless);
sourav parmarcd5fb182020-07-17 12:58:44 -0700881 }
Jeff Bolz95176d02020-04-01 00:36:16 -0500882}
883
zigada4b1512021-11-28 15:53:06 +0100884void cvdescriptorset::AccelerationStructureDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200885 const Descriptor *src, bool is_bindless) {
Jeremy Gebbenc08f6502022-07-15 09:55:06 -0600886 if (src->GetClass() == Mutable) {
zigada4b1512021-11-28 15:53:06 +0100887 auto acc_desc = static_cast<const MutableDescriptor *>(src);
888 if (is_khr_) {
889 acc_ = acc_desc->GetAccelerationStructure();
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200890 ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_),
891 is_bindless);
zigada4b1512021-11-28 15:53:06 +0100892 } else {
893 acc_nv_ = acc_desc->GetAccelerationStructureNV();
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200894 ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_),
895 is_bindless);
zigada4b1512021-11-28 15:53:06 +0100896 }
Tony-LunarG80358322021-04-16 07:58:13 -0600897 return;
898 }
zigada4b1512021-11-28 15:53:06 +0100899 auto acc_desc = static_cast<const AccelerationStructureDescriptor *>(src);
sourav parmarcd5fb182020-07-17 12:58:44 -0700900 if (is_khr_) {
901 acc_ = acc_desc->acc_;
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200902 ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_), is_bindless);
sourav parmarcd5fb182020-07-17 12:58:44 -0700903 } else {
904 acc_nv_ = acc_desc->acc_nv_;
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200905 ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_), is_bindless);
sourav parmarcd5fb182020-07-17 12:58:44 -0700906 }
Jeff Bolz95176d02020-04-01 00:36:16 -0500907}
908
Jeremy Gebbenc08f6502022-07-15 09:55:06 -0600909cvdescriptorset::MutableDescriptor::MutableDescriptor()
910 : Descriptor(),
zigada4b1512021-11-28 15:53:06 +0100911 buffer_size_(0),
Mike Schuchardt2d523e52022-09-15 12:25:58 -0700912 active_descriptor_type_(VK_DESCRIPTOR_TYPE_MUTABLE_EXT),
zigada4b1512021-11-28 15:53:06 +0100913 immutable_(false),
914 image_layout_(VK_IMAGE_LAYOUT_UNDEFINED),
915 offset_(0),
916 range_(0),
917 is_khr_(false),
Jeremy Gebbenc08f6502022-07-15 09:55:06 -0600918 acc_(VK_NULL_HANDLE) {}
Tony-LunarGf563b362021-03-18 16:13:18 -0600919
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600920void cvdescriptorset::MutableDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600921 const VkWriteDescriptorSet *update, const uint32_t index, bool is_bindless) {
Jeremy Gebben080210f2022-05-05 13:37:08 -0600922 VkDeviceSize buffer_size = 0;
Jeremy Gebben080210f2022-05-05 13:37:08 -0600923 switch (DescriptorTypeToClass(update->descriptorType)) {
924 case DescriptorClass::PlainSampler:
925 if (!immutable_) {
926 ReplaceStatePtr(set_state, sampler_state_,
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600927 dev_data->GetConstCastShared<SAMPLER_STATE>(update->pImageInfo[index].sampler), is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600928 }
929 break;
930 case DescriptorClass::ImageSampler: {
931 const auto &image_info = update->pImageInfo[index];
932 if (!immutable_) {
933 ReplaceStatePtr(set_state, sampler_state_, dev_data->GetConstCastShared<SAMPLER_STATE>(image_info.sampler),
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600934 is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600935 }
936 image_layout_ = image_info.imageLayout;
937 ReplaceStatePtr(set_state, image_view_state_, dev_data->GetConstCastShared<IMAGE_VIEW_STATE>(image_info.imageView),
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600938 is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600939 break;
zigada4b1512021-11-28 15:53:06 +0100940 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600941 case DescriptorClass::Image: {
942 const auto &image_info = update->pImageInfo[index];
943 image_layout_ = image_info.imageLayout;
944 ReplaceStatePtr(set_state, image_view_state_, dev_data->GetConstCastShared<IMAGE_VIEW_STATE>(image_info.imageView),
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600945 is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600946 break;
zigada4b1512021-11-28 15:53:06 +0100947 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600948 case DescriptorClass::GeneralBuffer: {
949 const auto &buffer_info = update->pBufferInfo[index];
950 offset_ = buffer_info.offset;
951 range_ = buffer_info.range;
952 const auto buffer_state = dev_data->GetConstCastShared<BUFFER_STATE>(update->pBufferInfo->buffer);
953 if (buffer_state) {
954 buffer_size = buffer_state->createInfo.size;
955 }
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600956 ReplaceStatePtr(set_state, buffer_state_, buffer_state, is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600957 break;
zigada4b1512021-11-28 15:53:06 +0100958 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600959 case DescriptorClass::TexelBuffer: {
960 const auto buffer_view = dev_data->GetConstCastShared<BUFFER_VIEW_STATE>(update->pTexelBufferView[index]);
961 if (buffer_view) {
962 buffer_size = buffer_view->buffer_state->createInfo.size;
963 }
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600964 ReplaceStatePtr(set_state, buffer_view_state_, buffer_view, is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600965 break;
966 }
967 case DescriptorClass::AccelerationStructure: {
968 const auto *acc_info = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureKHR>(update->pNext);
969 const auto *acc_info_nv = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureNV>(update->pNext);
970 assert(acc_info || acc_info_nv);
971 is_khr_ = (acc_info != NULL);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600972 if (is_khr_) {
973 acc_ = acc_info->pAccelerationStructures[index];
974 ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_),
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600975 is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600976 } else {
977 acc_nv_ = acc_info_nv->pAccelerationStructures[index];
978 ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_),
Jeremy Gebben1b9fdb82022-06-15 15:31:32 -0600979 is_bindless);
Jeremy Gebben080210f2022-05-05 13:37:08 -0600980 }
981 break;
982 }
983 default:
984 break;
zigada4b1512021-11-28 15:53:06 +0100985 }
Jeremy Gebben080210f2022-05-05 13:37:08 -0600986 SetDescriptorType(update->descriptorType, buffer_size);
Tony-LunarGf563b362021-03-18 16:13:18 -0600987}
988
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600989void cvdescriptorset::MutableDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data,
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200990 const Descriptor *src, bool is_bindless) {
Jeremy Gebbenc08f6502022-07-15 09:55:06 -0600991 if (src->GetClass() == DescriptorClass::PlainSampler) {
zigada4b1512021-11-28 15:53:06 +0100992 auto *sampler_src = static_cast<const SamplerDescriptor *>(src);
993 if (!immutable_) {
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200994 ReplaceStatePtr(set_state, sampler_state_, sampler_src->GetSharedSamplerState(), is_bindless);
zigada4b1512021-11-28 15:53:06 +0100995 }
Jeremy Gebbenc08f6502022-07-15 09:55:06 -0600996 } else if (src->GetClass() == DescriptorClass::ImageSampler) {
zigada4b1512021-11-28 15:53:06 +0100997 auto *image_src = static_cast<const ImageSamplerDescriptor *>(src);
998 if (!immutable_) {
ziga-lunargb4f3f7d2022-04-17 15:36:03 +0200999 ReplaceStatePtr(set_state, sampler_state_, image_src->GetSharedSamplerState(), is_bindless);
zigada4b1512021-11-28 15:53:06 +01001000 }
1001
1002 image_layout_ = image_src->GetImageLayout();
ziga-lunargb4f3f7d2022-04-17 15:36:03 +02001003 ReplaceStatePtr(set_state, image_view_state_, image_src->GetSharedImageViewState(), is_bindless);
Jeremy Gebbenc08f6502022-07-15 09:55:06 -06001004 } else if (src->GetClass() == DescriptorClass::Image) {
zigada4b1512021-11-28 15:53:06 +01001005 auto *image_src = static_cast<const ImageDescriptor *>(src);
1006
1007 image_layout_ = image_src->GetImageLayout();
ziga-lunargb4f3f7d2022-04-17 15:36:03 +02001008 ReplaceStatePtr(set_state, image_view_state_, image_src->GetSharedImageViewState(), is_bindless);
Jeremy Gebbenc08f6502022-07-15 09:55:06 -06001009 } else if (src->GetClass() == DescriptorClass::TexelBuffer) {
ziga-lunargb4f3f7d2022-04-17 15:36:03 +02001010 ReplaceStatePtr(set_state, buffer_view_state_, static_cast<const TexelDescriptor *>(src)->GetSharedBufferViewState(),
1011 is_bindless);
Jeremy Gebbenc08f6502022-07-15 09:55:06 -06001012 } else if (src->GetClass() == DescriptorClass::GeneralBuffer) {
zigada4b1512021-11-28 15:53:06 +01001013 const auto buff_desc = static_cast<const BufferDescriptor *>(src);
1014 offset_ = buff_desc->GetOffset();
1015 range_ = buff_desc->GetRange();
ziga-lunargb4f3f7d2022-04-17 15:36:03 +02001016 ReplaceStatePtr(set_state, buffer_state_, buff_desc->GetSharedBufferState(), is_bindless);
Jeremy Gebbenc08f6502022-07-15 09:55:06 -06001017 } else if (src->GetClass() == DescriptorClass::AccelerationStructure) {
zigada4b1512021-11-28 15:53:06 +01001018 auto acc_desc = static_cast<const AccelerationStructureDescriptor *>(src);
1019 if (is_khr_) {
1020 acc_ = acc_desc->GetAccelerationStructure();
ziga-lunargb4f3f7d2022-04-17 15:36:03 +02001021 ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_),
1022 is_bindless);
zigada4b1512021-11-28 15:53:06 +01001023 } else {
1024 acc_nv_ = acc_desc->GetAccelerationStructureNV();
ziga-lunargb4f3f7d2022-04-17 15:36:03 +02001025 ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_),
1026 is_bindless);
zigada4b1512021-11-28 15:53:06 +01001027 }
Jeremy Gebbenc08f6502022-07-15 09:55:06 -06001028 } else if (src->GetClass() == DescriptorClass::Mutable) {
1029 const auto mutable_src = static_cast<const MutableDescriptor *>(src);
1030 auto active_class = DescriptorTypeToClass(mutable_src->ActiveType());
Jeremy Gebben02f83202022-05-04 13:54:07 -06001031 switch (active_class) {
1032 case PlainSampler: {
Jeremy Gebben02f83202022-05-04 13:54:07 -06001033 if (!immutable_) {
Jeremy Gebbenc08f6502022-07-15 09:55:06 -06001034 ReplaceStatePtr(set_state, sampler_state_, mutable_src->GetSharedSamplerState(), is_bindless);
Jeremy Gebben02f83202022-05-04 13:54:07 -06001035 }
1036 } break;
1037 case ImageSampler: {
Jeremy Gebben02f83202022-05-04 13:54:07 -06001038 if (!immutable_) {
Jeremy Gebbenc08f6502022-07-15 09:55:06 -06001039 ReplaceStatePtr(set_state, sampler_state_, mutable_src->GetSharedSamplerState(), is_bindless);
Jeremy Gebben02f83202022-05-04 13:54:07 -06001040 }
zigada4b1512021-11-28 15:53:06 +01001041
Jeremy Gebbenc08f6502022-07-15 09:55:06 -06001042 image_layout_ = mutable_src->GetImageLayout();
1043 ReplaceStatePtr(set_state, image_view_state_, mutable_src->GetSharedImageViewState(), is_bindless);
Jeremy Gebben02f83202022-05-04 13:54:07 -06001044 } break;
1045 case Image: {
Jeremy Gebbenc08f6502022-07-15 09:55:06 -06001046 image_layout_ = mutable_src->GetImageLayout();
1047 ReplaceStatePtr(set_state, image_view_state_, mutable_src->GetSharedImageViewState(), is_bindless);
Jeremy Gebben02f83202022-05-04 13:54:07 -06001048 } break;
1049 case GeneralBuffer: {
Jeremy Gebbenc08f6502022-07-15 09:55:06 -06001050 offset_ = mutable_src->GetOffset();
1051 range_ = mutable_src->GetRange();
1052 ReplaceStatePtr(set_state, buffer_state_, mutable_src->GetSharedBufferState(), is_bindless);
Jeremy Gebben02f83202022-05-04 13:54:07 -06001053 } break;
1054 case TexelBuffer: {
1055 ReplaceStatePtr(set_state, buffer_view_state_,
Jeremy Gebbenc08f6502022-07-15 09:55:06 -06001056 mutable_src->GetSharedBufferViewState(), is_bindless);
Jeremy Gebben02f83202022-05-04 13:54:07 -06001057 } break;
1058 case AccelerationStructure: {
Jeremy Gebben02f83202022-05-04 13:54:07 -06001059 if (is_khr_) {
Jeremy Gebbenc08f6502022-07-15 09:55:06 -06001060 acc_ = mutable_src->GetAccelerationStructure();
Jeremy Gebben02f83202022-05-04 13:54:07 -06001061 ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_),
1062 is_bindless);
1063 } else {
Jeremy Gebbenc08f6502022-07-15 09:55:06 -06001064 acc_nv_ = mutable_src->GetAccelerationStructureNV();
Jeremy Gebben02f83202022-05-04 13:54:07 -06001065 ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_),
1066 is_bindless);
1067 }
1068
1069 } break;
1070 default:
1071 break;
1072 }
Jeremy Gebben5f823da2022-07-22 11:22:19 -06001073 SetDescriptorType(mutable_src->ActiveType(), mutable_src->GetBufferSize());
Jeremy Gebben02f83202022-05-04 13:54:07 -06001074 }
1075}
1076
1077void cvdescriptorset::MutableDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) {
Jeremy Gebbenc08f6502022-07-15 09:55:06 -06001078 auto active_class = DescriptorTypeToClass(active_descriptor_type_);
Jeremy Gebben02f83202022-05-04 13:54:07 -06001079 if (active_class == Image || active_class == ImageSampler) {
1080 if (image_view_state_) {
1081 dev_data->CallSetImageViewInitialLayoutCallback(cb_node, *image_view_state_, image_layout_);
zigada4b1512021-11-28 15:53:06 +01001082 }
1083 }
1084}
1085
1086bool cvdescriptorset::MutableDescriptor::AddParent(BASE_NODE *base_node) {
1087 bool result = false;
Jeremy Gebbenc08f6502022-07-15 09:55:06 -06001088 auto active_class = DescriptorTypeToClass(active_descriptor_type_);
Jeremy Gebben02f83202022-05-04 13:54:07 -06001089 switch (active_class) {
1090 case PlainSampler:
1091 if (sampler_state_) {
1092 result |= sampler_state_->AddParent(base_node);
1093 }
1094 break;
1095 case ImageSampler:
1096 if (sampler_state_) {
1097 result |= sampler_state_->AddParent(base_node);
1098 }
1099 if (image_view_state_) {
1100 result = image_view_state_->AddParent(base_node);
1101 }
1102 break;
1103 case TexelBuffer:
1104 if (buffer_view_state_) {
1105 result = buffer_view_state_->AddParent(base_node);
1106 }
1107 break;
1108 case Image:
1109 if (image_view_state_) {
1110 result = image_view_state_->AddParent(base_node);
1111 }
1112 break;
1113 case GeneralBuffer:
1114 if (buffer_state_) {
1115 result = buffer_state_->AddParent(base_node);
1116 }
1117 break;
1118 case AccelerationStructure:
1119 if (acc_state_) {
1120 result |= acc_state_->AddParent(base_node);
1121 }
1122 if (acc_state_nv_) {
1123 result |= acc_state_nv_->AddParent(base_node);
1124 }
Jeremy Gebbenbb718a32022-05-12 08:51:10 -06001125 break;
1126 default:
1127 break;
zigada4b1512021-11-28 15:53:06 +01001128 }
1129 return result;
1130}
1131void cvdescriptorset::MutableDescriptor::RemoveParent(BASE_NODE *base_node) {
1132 if (sampler_state_) {
1133 sampler_state_->RemoveParent(base_node);
1134 }
1135 if (image_view_state_) {
1136 image_view_state_->RemoveParent(base_node);
1137 }
1138 if (buffer_view_state_) {
1139 buffer_view_state_->RemoveParent(base_node);
1140 }
1141 if (buffer_state_) {
1142 buffer_state_->RemoveParent(base_node);
1143 }
1144 if (acc_state_) {
1145 acc_state_->RemoveParent(base_node);
1146 }
1147 if (acc_state_nv_) {
1148 acc_state_nv_->RemoveParent(base_node);
1149 }
Tony-LunarGf563b362021-03-18 16:13:18 -06001150}
1151
Jeremy Gebbena08da232022-02-01 15:14:52 -07001152bool cvdescriptorset::MutableDescriptor::Invalid() const {
Jeremy Gebbenc08f6502022-07-15 09:55:06 -06001153 switch (ActiveClass()) {
Jeremy Gebben02f83202022-05-04 13:54:07 -06001154 case PlainSampler:
Jeremy Gebbena08da232022-02-01 15:14:52 -07001155 return !sampler_state_ || sampler_state_->Destroyed();
1156
Jeremy Gebben02f83202022-05-04 13:54:07 -06001157 case ImageSampler:
Jeremy Gebbena08da232022-02-01 15:14:52 -07001158 return !sampler_state_ || sampler_state_->Invalid() || !image_view_state_ || image_view_state_->Invalid();
1159
Jeremy Gebben02f83202022-05-04 13:54:07 -06001160 case TexelBuffer:
Jeremy Gebbena08da232022-02-01 15:14:52 -07001161 return !buffer_view_state_ || buffer_view_state_->Invalid();
1162
Jeremy Gebben02f83202022-05-04 13:54:07 -06001163 case Image:
Jeremy Gebbena08da232022-02-01 15:14:52 -07001164 return !image_view_state_ || image_view_state_->Invalid();
1165
Jeremy Gebben02f83202022-05-04 13:54:07 -06001166 case GeneralBuffer:
Jeremy Gebbena08da232022-02-01 15:14:52 -07001167 return !buffer_state_ || buffer_state_->Invalid();
1168
Jeremy Gebben02f83202022-05-04 13:54:07 -06001169 case AccelerationStructure:
1170 if (is_khr_) {
1171 return !acc_state_ || acc_state_->Invalid();
1172 } else {
1173 return !acc_state_nv_ || acc_state_nv_->Invalid();
1174 }
Jeremy Gebbena08da232022-02-01 15:14:52 -07001175 default:
1176 return false;
1177 }
1178}
1179
Tobin Ehlis300888c2016-05-18 13:43:26 -06001180// This is a helper function that iterates over a set of Write and Copy updates, pulls the DescriptorSet* for updated
Tobin Ehlis300888c2016-05-18 13:43:26 -06001181// sets, and then calls their respective Perform[Write|Copy]Update functions.
1182// Prerequisite : ValidateUpdateDescriptorSets() should be called and return "false" prior to calling PerformUpdateDescriptorSets()
1183// with the same set of updates.
1184// This is split from the validate code to allow validation prior to calling down the chain, and then update after
1185// calling down the chain.
John Zulaufe3b35f32019-06-25 14:21:21 -06001186void cvdescriptorset::PerformUpdateDescriptorSets(ValidationStateTracker *dev_data, uint32_t write_count,
1187 const VkWriteDescriptorSet *p_wds, uint32_t copy_count,
1188 const VkCopyDescriptorSet *p_cds) {
Tobin Ehlis300888c2016-05-18 13:43:26 -06001189 // Write updates first
1190 uint32_t i = 0;
1191 for (i = 0; i < write_count; ++i) {
1192 auto dest_set = p_wds[i].dstSet;
Jeremy Gebbenb20a8242021-11-05 15:14:43 -06001193 auto set_node = dev_data->Get<cvdescriptorset::DescriptorSet>(dest_set);
Tobin Ehlis6a72dc72016-06-01 16:41:17 -06001194 if (set_node) {
Jeff Bolz41a1ced2019-10-11 11:40:49 -05001195 set_node->PerformWriteUpdate(dev_data, &p_wds[i]);
Tobin Ehlis300888c2016-05-18 13:43:26 -06001196 }
1197 }
1198 // Now copy updates
1199 for (i = 0; i < copy_count; ++i) {
1200 auto dst_set = p_cds[i].dstSet;
1201 auto src_set = p_cds[i].srcSet;
Jeremy Gebbenb20a8242021-11-05 15:14:43 -06001202 auto src_node = dev_data->Get<cvdescriptorset::DescriptorSet>(src_set);
1203 auto dst_node = dev_data->Get<cvdescriptorset::DescriptorSet>(dst_set);
Tobin Ehlis6a72dc72016-06-01 16:41:17 -06001204 if (src_node && dst_node) {
Jeremy Gebben9f537102021-10-05 16:37:12 -06001205 dst_node->PerformCopyUpdate(dev_data, &p_cds[i], src_node.get());
Tobin Ehlis300888c2016-05-18 13:43:26 -06001206 }
1207 }
1208}
Jeff Bolzdd4cfa12019-08-11 20:57:51 -05001209const BindingReqMap &cvdescriptorset::PrefilterBindRequestMap::FilteredMap(const CMD_BUFFER_STATE &cb_state,
1210 const PIPELINE_STATE &pipeline) {
John Zulauffbf3c202019-07-17 14:57:14 -06001211 if (IsManyDescriptors()) {
Karl Schultz7090a052020-11-10 08:54:21 -07001212 filtered_map_.reset(new BindingReqMap);
John Zulauffbf3c202019-07-17 14:57:14 -06001213 descriptor_set_.FilterBindingReqs(cb_state, pipeline, orig_map_, filtered_map_.get());
1214 return *filtered_map_;
John Zulauf48a6a702017-12-22 17:14:54 -07001215 }
John Zulauffbf3c202019-07-17 14:57:14 -06001216 return orig_map_;
Artem Kharytoniuk2456f992018-01-12 14:17:41 +01001217}