sfricke-samsung | 691299b | 2021-01-01 20:48:48 -0800 | [diff] [blame] | 1 | /* Copyright (c) 2015-2021 The Khronos Group Inc. |
| 2 | * Copyright (c) 2015-2021 Valve Corporation |
| 3 | * Copyright (c) 2015-2021 LunarG, Inc. |
| 4 | * Copyright (C) 2015-2021 Google Inc. |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 5 | * |
| 6 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 7 | * you may not use this file except in compliance with the License. |
| 8 | * You may obtain a copy of the License at |
| 9 | * |
| 10 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 11 | * |
| 12 | * Unless required by applicable law or agreed to in writing, software |
| 13 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 15 | * See the License for the specific language governing permissions and |
| 16 | * limitations under the License. |
| 17 | * |
| 18 | * Author: Tobin Ehlis <tobine@google.com> |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 19 | * John Zulauf <jzulauf@lunarg.com> |
Jeremy Kniager | e682743 | 2020-04-01 09:05:56 -0600 | [diff] [blame] | 20 | * Jeremy Kniager <jeremyk@lunarg.com> |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 21 | */ |
| 22 | |
Mark Lobodzinski | b56bbb9 | 2019-02-18 11:49:59 -0700 | [diff] [blame] | 23 | #include "chassis.h" |
Mark Lobodzinski | 76d7666 | 2019-02-14 14:38:21 -0700 | [diff] [blame] | 24 | #include "core_validation_error_enums.h" |
| 25 | #include "core_validation.h" |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 26 | #include "descriptor_sets.h" |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 27 | #include "hash_vk_types.h" |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 28 | #include "vk_enum_string_helper.h" |
| 29 | #include "vk_safe_struct.h" |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 30 | #include "vk_typemap_helper.h" |
Tobin Ehlis | c826645 | 2017-04-07 12:20:30 -0600 | [diff] [blame] | 31 | #include "buffer_validation.h" |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 32 | #include <sstream> |
Mark Lobodzinski | 2eee5d8 | 2016-12-02 15:33:18 -0700 | [diff] [blame] | 33 | #include <algorithm> |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 34 | #include <array> |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 35 | #include <memory> |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 36 | |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 37 | static DESCRIPTOR_POOL_STATE::TypeCountMap GetMaxTypeCounts(const VkDescriptorPoolCreateInfo *create_info) { |
| 38 | DESCRIPTOR_POOL_STATE::TypeCountMap counts; |
| 39 | // Collect maximums per descriptor type. |
| 40 | for (uint32_t i = 0; i < create_info->poolSizeCount; ++i) { |
| 41 | const auto &pool_size = create_info->pPoolSizes[i]; |
| 42 | uint32_t type = static_cast<uint32_t>(pool_size.type); |
| 43 | // Same descriptor types can appear several times |
| 44 | counts[type] += pool_size.descriptorCount; |
| 45 | } |
| 46 | return counts; |
| 47 | } |
| 48 | |
| 49 | DESCRIPTOR_POOL_STATE::DESCRIPTOR_POOL_STATE(ValidationStateTracker *dev, const VkDescriptorPool pool, |
| 50 | const VkDescriptorPoolCreateInfo *pCreateInfo) |
| 51 | : BASE_NODE(pool, kVulkanObjectTypeDescriptorPool), |
| 52 | dev_data(dev), |
| 53 | maxSets(pCreateInfo->maxSets), |
| 54 | availableSets(pCreateInfo->maxSets), |
| 55 | createInfo(pCreateInfo), |
| 56 | maxDescriptorTypeCount(GetMaxTypeCounts(pCreateInfo)), |
| 57 | availableDescriptorTypeCount(maxDescriptorTypeCount) {} |
| 58 | |
| 59 | void DESCRIPTOR_POOL_STATE::Allocate(const VkDescriptorSetAllocateInfo *alloc_info, const VkDescriptorSet *descriptor_sets, |
| 60 | const cvdescriptorset::AllocateDescriptorSetsData *ds_data) { |
| 61 | // Account for sets and individual descriptors allocated from pool |
| 62 | availableSets -= alloc_info->descriptorSetCount; |
| 63 | for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) { |
| 64 | availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first); |
| 65 | } |
| 66 | |
| 67 | const auto *variable_count_info = LvlFindInChain<VkDescriptorSetVariableDescriptorCountAllocateInfo>(alloc_info->pNext); |
| 68 | bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == alloc_info->descriptorSetCount; |
| 69 | |
| 70 | // Create tracking object for each descriptor set; insert into global map and the pool's set. |
| 71 | for (uint32_t i = 0; i < alloc_info->descriptorSetCount; i++) { |
| 72 | uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0; |
| 73 | |
| 74 | auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], this, ds_data->layout_nodes[i], |
| 75 | variable_count, dev_data); |
| 76 | sets.emplace(descriptor_sets[i], new_ds.get()); |
Jeremy Gebben | 082a983 | 2021-10-28 13:40:11 -0600 | [diff] [blame] | 77 | dev_data->Add(std::move(new_ds)); |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 78 | } |
| 79 | } |
| 80 | |
| 81 | void DESCRIPTOR_POOL_STATE::Free(uint32_t count, const VkDescriptorSet *descriptor_sets) { |
| 82 | // Update available descriptor sets in pool |
| 83 | availableSets += count; |
| 84 | |
Jeremy Gebben | 082a983 | 2021-10-28 13:40:11 -0600 | [diff] [blame] | 85 | // For each freed descriptor add its resources back into the pool as available and remove from pool and device data |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 86 | for (uint32_t i = 0; i < count; ++i) { |
| 87 | if (descriptor_sets[i] != VK_NULL_HANDLE) { |
| 88 | auto iter = sets.find(descriptor_sets[i]); |
| 89 | assert(iter != sets.end()); |
| 90 | auto *set_state = iter->second; |
| 91 | uint32_t type_index = 0, descriptor_count = 0; |
| 92 | for (uint32_t j = 0; j < set_state->GetBindingCount(); ++j) { |
| 93 | type_index = static_cast<uint32_t>(set_state->GetTypeFromIndex(j)); |
| 94 | descriptor_count = set_state->GetDescriptorCountFromIndex(j); |
| 95 | availableDescriptorTypeCount[type_index] += descriptor_count; |
| 96 | } |
Jeremy Gebben | 082a983 | 2021-10-28 13:40:11 -0600 | [diff] [blame] | 97 | dev_data->Destroy<cvdescriptorset::DescriptorSet>(iter->first); |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 98 | sets.erase(iter); |
| 99 | } |
| 100 | } |
| 101 | } |
| 102 | |
| 103 | void DESCRIPTOR_POOL_STATE::Reset() { |
| 104 | // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet |
| 105 | for (auto entry : sets) { |
Jeremy Gebben | 082a983 | 2021-10-28 13:40:11 -0600 | [diff] [blame] | 106 | dev_data->Destroy<cvdescriptorset::DescriptorSet>(entry.first); |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 107 | } |
| 108 | sets.clear(); |
| 109 | // Reset available count for each type and available sets for this pool |
| 110 | availableDescriptorTypeCount = maxDescriptorTypeCount; |
| 111 | availableSets = maxSets; |
| 112 | } |
| 113 | |
| 114 | void DESCRIPTOR_POOL_STATE::Destroy() { |
| 115 | Reset(); |
| 116 | BASE_NODE::Destroy(); |
| 117 | } |
| 118 | |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 119 | // ExtendedBinding collects a VkDescriptorSetLayoutBinding and any extended |
| 120 | // state that comes from a different array/structure so they can stay together |
| 121 | // while being sorted by binding number. |
| 122 | struct ExtendedBinding { |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 123 | ExtendedBinding(const VkDescriptorSetLayoutBinding *l, VkDescriptorBindingFlags f) : layout_binding(l), binding_flags(f) {} |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 124 | |
| 125 | const VkDescriptorSetLayoutBinding *layout_binding; |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 126 | VkDescriptorBindingFlags binding_flags; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 127 | }; |
| 128 | |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 129 | struct BindingNumCmp { |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 130 | bool operator()(const ExtendedBinding &a, const ExtendedBinding &b) const { |
| 131 | return a.layout_binding->binding < b.layout_binding->binding; |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 132 | } |
| 133 | }; |
| 134 | |
John Zulauf | 613fd98 | 2019-06-04 15:14:41 -0600 | [diff] [blame] | 135 | using DescriptorSet = cvdescriptorset::DescriptorSet; |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 136 | using DescriptorSetLayout = cvdescriptorset::DescriptorSetLayout; |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 137 | using DescriptorSetLayoutDef = cvdescriptorset::DescriptorSetLayoutDef; |
| 138 | using DescriptorSetLayoutId = cvdescriptorset::DescriptorSetLayoutId; |
| 139 | |
John Zulauf | 34ebf27 | 2018-02-16 13:08:47 -0700 | [diff] [blame] | 140 | // Canonical dictionary of DescriptorSetLayoutDef (without any handle/device specific information) |
| 141 | cvdescriptorset::DescriptorSetLayoutDict descriptor_set_layout_dict; |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 142 | |
Shannon McPherson | c06c33d | 2018-06-28 17:21:12 -0600 | [diff] [blame] | 143 | DescriptorSetLayoutId GetCanonicalId(const VkDescriptorSetLayoutCreateInfo *p_create_info) { |
John Zulauf | 34ebf27 | 2018-02-16 13:08:47 -0700 | [diff] [blame] | 144 | return descriptor_set_layout_dict.look_up(DescriptorSetLayoutDef(p_create_info)); |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 145 | } |
John Zulauf | 34ebf27 | 2018-02-16 13:08:47 -0700 | [diff] [blame] | 146 | |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 147 | // Construct DescriptorSetLayout instance from given create info |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 148 | // Proactively reserve and resize as possible, as the reallocation was visible in profiling |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 149 | cvdescriptorset::DescriptorSetLayoutDef::DescriptorSetLayoutDef(const VkDescriptorSetLayoutCreateInfo *p_create_info) |
| 150 | : flags_(p_create_info->flags), binding_count_(0), descriptor_count_(0), dynamic_descriptor_count_(0) { |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 151 | const auto *flags_create_info = LvlFindInChain<VkDescriptorSetLayoutBindingFlagsCreateInfo>(p_create_info->pNext); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 152 | |
sfricke-samsung | 0d00aed | 2021-03-08 23:31:17 -0800 | [diff] [blame] | 153 | binding_type_stats_ = {0, 0}; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 154 | std::set<ExtendedBinding, BindingNumCmp> sorted_bindings; |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 155 | const uint32_t input_bindings_count = p_create_info->bindingCount; |
| 156 | // Sort the input bindings in binding number order, eliminating duplicates |
| 157 | for (uint32_t i = 0; i < input_bindings_count; i++) { |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 158 | VkDescriptorBindingFlags flags = 0; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 159 | if (flags_create_info && flags_create_info->bindingCount == p_create_info->bindingCount) { |
| 160 | flags = flags_create_info->pBindingFlags[i]; |
| 161 | } |
Jeremy Gebben | fc6f815 | 2021-03-18 16:58:55 -0600 | [diff] [blame] | 162 | sorted_bindings.emplace(p_create_info->pBindings + i, flags); |
John Zulauf | b6d7120 | 2017-12-22 16:47:09 -0700 | [diff] [blame] | 163 | } |
| 164 | |
ziga-lunarg | d67b5f5 | 2021-10-16 23:52:59 +0200 | [diff] [blame] | 165 | const auto *mutable_descriptor_type_create_info = LvlFindInChain<VkMutableDescriptorTypeCreateInfoVALVE>(p_create_info->pNext); |
| 166 | if (mutable_descriptor_type_create_info) { |
| 167 | mutable_types_.resize(mutable_descriptor_type_create_info->mutableDescriptorTypeListCount); |
| 168 | for (uint32_t i = 0; i < mutable_descriptor_type_create_info->mutableDescriptorTypeListCount; ++i) { |
| 169 | const auto &list = mutable_descriptor_type_create_info->pMutableDescriptorTypeLists[i]; |
| 170 | mutable_types_[i].reserve(list.descriptorTypeCount); |
| 171 | for (uint32_t j = 0; j < list.descriptorTypeCount; ++j) { |
| 172 | mutable_types_[i].push_back(list.pDescriptorTypes[j]); |
| 173 | } |
ziga-lunarg | 2ab8c47 | 2021-10-27 22:54:02 +0200 | [diff] [blame] | 174 | std::sort(mutable_types_[i].begin(), mutable_types_[i].end()); |
ziga-lunarg | d67b5f5 | 2021-10-16 23:52:59 +0200 | [diff] [blame] | 175 | } |
| 176 | } |
| 177 | |
John Zulauf | b6d7120 | 2017-12-22 16:47:09 -0700 | [diff] [blame] | 178 | // Store the create info in the sorted order from above |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 179 | uint32_t index = 0; |
| 180 | binding_count_ = static_cast<uint32_t>(sorted_bindings.size()); |
| 181 | bindings_.reserve(binding_count_); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 182 | binding_flags_.reserve(binding_count_); |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 183 | binding_to_index_map_.reserve(binding_count_); |
John Zulauf | 79f0658 | 2021-02-27 18:38:39 -0700 | [diff] [blame] | 184 | for (const auto &input_binding : sorted_bindings) { |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 185 | // Add to binding and map, s.t. it is robust to invalid duplication of binding_num |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 186 | const auto binding_num = input_binding.layout_binding->binding; |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 187 | binding_to_index_map_[binding_num] = index++; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 188 | bindings_.emplace_back(input_binding.layout_binding); |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 189 | auto &binding_info = bindings_.back(); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 190 | binding_flags_.emplace_back(input_binding.binding_flags); |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 191 | |
John Zulauf | b6d7120 | 2017-12-22 16:47:09 -0700 | [diff] [blame] | 192 | descriptor_count_ += binding_info.descriptorCount; |
| 193 | if (binding_info.descriptorCount > 0) { |
| 194 | non_empty_bindings_.insert(binding_num); |
Tobin Ehlis | 9637fb2 | 2016-12-12 15:59:34 -0700 | [diff] [blame] | 195 | } |
John Zulauf | b6d7120 | 2017-12-22 16:47:09 -0700 | [diff] [blame] | 196 | |
sfricke-samsung | 60f29ec | 2021-03-10 20:37:25 -0800 | [diff] [blame] | 197 | if (IsDynamicDescriptor(binding_info.descriptorType)) { |
sfricke-samsung | 0d00aed | 2021-03-08 23:31:17 -0800 | [diff] [blame] | 198 | dynamic_descriptor_count_ += binding_info.descriptorCount; |
| 199 | } |
| 200 | |
| 201 | // Get stats depending on descriptor type for caching later |
sfricke-samsung | 60f29ec | 2021-03-10 20:37:25 -0800 | [diff] [blame] | 202 | if (IsBufferDescriptor(binding_info.descriptorType)) { |
| 203 | if (IsDynamicDescriptor(binding_info.descriptorType)) { |
| 204 | binding_type_stats_.dynamic_buffer_count++; |
| 205 | } else { |
| 206 | binding_type_stats_.non_dynamic_buffer_count++; |
| 207 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 208 | } |
| 209 | } |
Tobin Ehlis | 9637fb2 | 2016-12-12 15:59:34 -0700 | [diff] [blame] | 210 | assert(bindings_.size() == binding_count_); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 211 | assert(binding_flags_.size() == binding_count_); |
Tobin Ehlis | 9637fb2 | 2016-12-12 15:59:34 -0700 | [diff] [blame] | 212 | uint32_t global_index = 0; |
John Zulauf | 7705bfc | 2019-06-10 09:52:04 -0600 | [diff] [blame] | 213 | global_index_range_.reserve(binding_count_); |
| 214 | // Vector order is finalized so build vectors of descriptors and dynamic offsets by binding index |
Tobin Ehlis | 9637fb2 | 2016-12-12 15:59:34 -0700 | [diff] [blame] | 215 | for (uint32_t i = 0; i < binding_count_; ++i) { |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 216 | auto final_index = global_index + bindings_[i].descriptorCount; |
John Zulauf | 7705bfc | 2019-06-10 09:52:04 -0600 | [diff] [blame] | 217 | global_index_range_.emplace_back(global_index, final_index); |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 218 | global_index = final_index; |
Tobin Ehlis | 9637fb2 | 2016-12-12 15:59:34 -0700 | [diff] [blame] | 219 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 220 | } |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 221 | |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 222 | size_t cvdescriptorset::DescriptorSetLayoutDef::hash() const { |
| 223 | hash_util::HashCombiner hc; |
| 224 | hc << flags_; |
| 225 | hc.Combine(bindings_); |
John Zulauf | 223b69d | 2018-11-09 16:00:59 -0700 | [diff] [blame] | 226 | hc.Combine(binding_flags_); |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 227 | return hc.Value(); |
| 228 | } |
| 229 | // |
| 230 | |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 231 | // Return valid index or "end" i.e. binding_count_; |
| 232 | // The asserts in "Get" are reduced to the set where no valid answer(like null or 0) could be given |
| 233 | // Common code for all binding lookups. |
| 234 | uint32_t cvdescriptorset::DescriptorSetLayoutDef::GetIndexFromBinding(uint32_t binding) const { |
| 235 | const auto &bi_itr = binding_to_index_map_.find(binding); |
| 236 | if (bi_itr != binding_to_index_map_.cend()) return bi_itr->second; |
| 237 | return GetBindingCount(); |
| 238 | } |
| 239 | VkDescriptorSetLayoutBinding const *cvdescriptorset::DescriptorSetLayoutDef::GetDescriptorSetLayoutBindingPtrFromIndex( |
| 240 | const uint32_t index) const { |
| 241 | if (index >= bindings_.size()) return nullptr; |
| 242 | return bindings_[index].ptr(); |
| 243 | } |
| 244 | // Return descriptorCount for given index, 0 if index is unavailable |
| 245 | uint32_t cvdescriptorset::DescriptorSetLayoutDef::GetDescriptorCountFromIndex(const uint32_t index) const { |
| 246 | if (index >= bindings_.size()) return 0; |
| 247 | return bindings_[index].descriptorCount; |
| 248 | } |
| 249 | // For the given index, return descriptorType |
| 250 | VkDescriptorType cvdescriptorset::DescriptorSetLayoutDef::GetTypeFromIndex(const uint32_t index) const { |
| 251 | assert(index < bindings_.size()); |
| 252 | if (index < bindings_.size()) return bindings_[index].descriptorType; |
| 253 | return VK_DESCRIPTOR_TYPE_MAX_ENUM; |
| 254 | } |
| 255 | // For the given index, return stageFlags |
| 256 | VkShaderStageFlags cvdescriptorset::DescriptorSetLayoutDef::GetStageFlagsFromIndex(const uint32_t index) const { |
| 257 | assert(index < bindings_.size()); |
| 258 | if (index < bindings_.size()) return bindings_[index].stageFlags; |
| 259 | return VkShaderStageFlags(0); |
| 260 | } |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 261 | // Return binding flags for given index, 0 if index is unavailable |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 262 | VkDescriptorBindingFlags cvdescriptorset::DescriptorSetLayoutDef::GetDescriptorBindingFlagsFromIndex(const uint32_t index) const { |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 263 | if (index >= binding_flags_.size()) return 0; |
| 264 | return binding_flags_[index]; |
| 265 | } |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 266 | |
John Zulauf | 7705bfc | 2019-06-10 09:52:04 -0600 | [diff] [blame] | 267 | const cvdescriptorset::IndexRange &cvdescriptorset::DescriptorSetLayoutDef::GetGlobalIndexRangeFromIndex(uint32_t index) const { |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 268 | const static IndexRange k_invalid_range = {0xFFFFFFFF, 0xFFFFFFFF}; |
| 269 | if (index >= binding_flags_.size()) return k_invalid_range; |
John Zulauf | 7705bfc | 2019-06-10 09:52:04 -0600 | [diff] [blame] | 270 | return global_index_range_[index]; |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 271 | } |
| 272 | |
John Zulauf | 7705bfc | 2019-06-10 09:52:04 -0600 | [diff] [blame] | 273 | // For the given binding, return the global index range (half open) |
| 274 | // As start and end are often needed in pairs, get both with a single lookup. |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 275 | const cvdescriptorset::IndexRange &cvdescriptorset::DescriptorSetLayoutDef::GetGlobalIndexRangeFromBinding( |
| 276 | const uint32_t binding) const { |
John Zulauf | 7705bfc | 2019-06-10 09:52:04 -0600 | [diff] [blame] | 277 | uint32_t index = GetIndexFromBinding(binding); |
| 278 | return GetGlobalIndexRangeFromIndex(index); |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 279 | } |
| 280 | |
| 281 | // For given binding, return ptr to ImmutableSampler array |
| 282 | VkSampler const *cvdescriptorset::DescriptorSetLayoutDef::GetImmutableSamplerPtrFromBinding(const uint32_t binding) const { |
| 283 | const auto &bi_itr = binding_to_index_map_.find(binding); |
| 284 | if (bi_itr != binding_to_index_map_.end()) { |
| 285 | return bindings_[bi_itr->second].pImmutableSamplers; |
| 286 | } |
| 287 | return nullptr; |
| 288 | } |
| 289 | // Move to next valid binding having a non-zero binding count |
| 290 | uint32_t cvdescriptorset::DescriptorSetLayoutDef::GetNextValidBinding(const uint32_t binding) const { |
| 291 | auto it = non_empty_bindings_.upper_bound(binding); |
| 292 | assert(it != non_empty_bindings_.cend()); |
| 293 | if (it != non_empty_bindings_.cend()) return *it; |
| 294 | return GetMaxBinding() + 1; |
| 295 | } |
| 296 | // For given index, return ptr to ImmutableSampler array |
| 297 | VkSampler const *cvdescriptorset::DescriptorSetLayoutDef::GetImmutableSamplerPtrFromIndex(const uint32_t index) const { |
| 298 | if (index < bindings_.size()) { |
| 299 | return bindings_[index].pImmutableSamplers; |
| 300 | } |
| 301 | return nullptr; |
| 302 | } |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 303 | |
ziga-lunarg | d67b5f5 | 2021-10-16 23:52:59 +0200 | [diff] [blame] | 304 | bool cvdescriptorset::DescriptorSetLayoutDef::IsTypeMutable(const VkDescriptorType type, uint32_t binding) const { |
| 305 | if (binding < mutable_types_.size()) { |
| 306 | if (mutable_types_[binding].size() > 0) { |
| 307 | for (const auto mutable_type : mutable_types_[binding]) { |
| 308 | if (type == mutable_type) { |
| 309 | return true; |
| 310 | } |
| 311 | } |
| 312 | return false; |
| 313 | } |
| 314 | } |
| 315 | // If mutableDescriptorTypeListCount is zero or if VkMutableDescriptorTypeCreateInfoVALVE structure is not included in the pNext |
| 316 | // chain, the VkMutableDescriptorTypeListVALVE for each element is considered to be zero or NULL for each member. |
| 317 | return false; |
| 318 | } |
| 319 | |
ziga-lunarg | 2ab8c47 | 2021-10-27 22:54:02 +0200 | [diff] [blame] | 320 | const std::vector<std::vector<VkDescriptorType>>& cvdescriptorset::DescriptorSetLayoutDef::GetMutableTypes() const { |
| 321 | return mutable_types_; |
| 322 | } |
| 323 | |
ziga-lunarg | e5d2854 | 2021-10-24 21:14:25 +0200 | [diff] [blame] | 324 | const std::vector<VkDescriptorType> &cvdescriptorset::DescriptorSetLayoutDef::GetMutableTypes(uint32_t binding) const { |
| 325 | if (binding >= mutable_types_.size()) { |
| 326 | static const std::vector<VkDescriptorType> empty = {}; |
| 327 | return empty; |
| 328 | } |
| 329 | return mutable_types_[binding]; |
| 330 | } |
| 331 | |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 332 | // If our layout is compatible with rh_ds_layout, return true. |
| 333 | bool cvdescriptorset::DescriptorSetLayout::IsCompatible(DescriptorSetLayout const *rh_ds_layout) const { |
| 334 | bool compatible = (this == rh_ds_layout) || (GetLayoutDef() == rh_ds_layout->GetLayoutDef()); |
| 335 | return compatible; |
| 336 | } |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 337 | |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 338 | // TODO: Find a way to add smarts to the autogenerated version of this |
| 339 | static std::string smart_string_VkShaderStageFlags(VkShaderStageFlags stage_flags) { |
| 340 | if (stage_flags == VK_SHADER_STAGE_ALL) { |
| 341 | return string_VkShaderStageFlagBits(VK_SHADER_STAGE_ALL); |
| 342 | } |
| 343 | |
| 344 | return string_VkShaderStageFlags(stage_flags); |
| 345 | } |
| 346 | |
| 347 | // If our layout is compatible with bound_dsl, return true, |
| 348 | // else return false and fill in error_msg will description of what causes incompatibility |
| 349 | bool cvdescriptorset::VerifySetLayoutCompatibility(const debug_report_data *report_data, DescriptorSetLayout const *layout_dsl, |
| 350 | DescriptorSetLayout const *bound_dsl, std::string *error_msg) { |
| 351 | // Short circuit the detailed check. |
| 352 | if (layout_dsl->IsCompatible(bound_dsl)) return true; |
| 353 | |
| 354 | // Do a detailed compatibility check of this lhs def (referenced by layout_dsl), vs. the rhs (layout and def) |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 355 | // Should only be run if trivial accept has failed, and in that context should return false. |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 356 | VkDescriptorSetLayout layout_dsl_handle = layout_dsl->GetDescriptorSetLayout(); |
| 357 | VkDescriptorSetLayout bound_dsl_handle = bound_dsl->GetDescriptorSetLayout(); |
| 358 | DescriptorSetLayoutDef const *layout_ds_layout_def = layout_dsl->GetLayoutDef(); |
| 359 | DescriptorSetLayoutDef const *bound_ds_layout_def = bound_dsl->GetLayoutDef(); |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 360 | |
| 361 | // Check descriptor counts |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 362 | const auto bound_total_count = bound_ds_layout_def->GetTotalDescriptorCount(); |
| 363 | if (layout_ds_layout_def->GetTotalDescriptorCount() != bound_ds_layout_def->GetTotalDescriptorCount()) { |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 364 | std::stringstream error_str; |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 365 | error_str << report_data->FormatHandle(layout_dsl_handle) << " from pipeline layout has " |
| 366 | << layout_ds_layout_def->GetTotalDescriptorCount() << " total descriptors, but " |
| 367 | << report_data->FormatHandle(bound_dsl_handle) << ", which is bound, has " << bound_total_count |
| 368 | << " total descriptors."; |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 369 | *error_msg = error_str.str(); |
| 370 | return false; // trivial fail case |
| 371 | } |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 372 | |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 373 | // Descriptor counts match so need to go through bindings one-by-one |
| 374 | // and verify that type and stageFlags match |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 375 | for (const auto &layout_binding : layout_ds_layout_def->GetBindings()) { |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 376 | // TODO : Do we also need to check immutable samplers? |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 377 | const auto bound_binding = bound_ds_layout_def->GetBindingInfoFromBinding(layout_binding.binding); |
| 378 | if (layout_binding.descriptorCount != bound_binding->descriptorCount) { |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 379 | std::stringstream error_str; |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 380 | error_str << "Binding " << layout_binding.binding << " for " << report_data->FormatHandle(layout_dsl_handle) |
| 381 | << " from pipeline layout has a descriptorCount of " << layout_binding.descriptorCount << " but binding " |
| 382 | << layout_binding.binding << " for " << report_data->FormatHandle(bound_dsl_handle) |
| 383 | << ", which is bound, has a descriptorCount of " << bound_binding->descriptorCount; |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 384 | *error_msg = error_str.str(); |
| 385 | return false; |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 386 | } else if (layout_binding.descriptorType != bound_binding->descriptorType) { |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 387 | std::stringstream error_str; |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 388 | error_str << "Binding " << layout_binding.binding << " for " << report_data->FormatHandle(layout_dsl_handle) |
| 389 | << " from pipeline layout is type '" << string_VkDescriptorType(layout_binding.descriptorType) |
| 390 | << "' but binding " << layout_binding.binding << " for " << report_data->FormatHandle(bound_dsl_handle) |
| 391 | << ", which is bound, is type '" << string_VkDescriptorType(bound_binding->descriptorType) << "'"; |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 392 | *error_msg = error_str.str(); |
| 393 | return false; |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 394 | } else if (layout_binding.stageFlags != bound_binding->stageFlags) { |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 395 | std::stringstream error_str; |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 396 | error_str << "Binding " << layout_binding.binding << " for " << report_data->FormatHandle(layout_dsl_handle) |
| 397 | << " from pipeline layout has stageFlags " << smart_string_VkShaderStageFlags(layout_binding.stageFlags) |
| 398 | << " but binding " << layout_binding.binding << " for " << report_data->FormatHandle(bound_dsl_handle) |
| 399 | << ", which is bound, has stageFlags " << smart_string_VkShaderStageFlags(bound_binding->stageFlags); |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 400 | *error_msg = error_str.str(); |
| 401 | return false; |
| 402 | } |
| 403 | } |
Tony-LunarG | 692b8b4 | 2019-09-30 16:07:26 -0600 | [diff] [blame] | 404 | |
| 405 | const auto &ds_layout_flags = layout_ds_layout_def->GetBindingFlags(); |
| 406 | const auto &bound_layout_flags = bound_ds_layout_def->GetBindingFlags(); |
| 407 | if (bound_layout_flags != ds_layout_flags) { |
| 408 | std::stringstream error_str; |
| 409 | assert(ds_layout_flags.size() == bound_layout_flags.size()); |
| 410 | size_t i; |
| 411 | for (i = 0; i < ds_layout_flags.size(); i++) { |
| 412 | if (ds_layout_flags[i] != bound_layout_flags[i]) break; |
| 413 | } |
| 414 | error_str << report_data->FormatHandle(layout_dsl_handle) |
| 415 | << " from pipeline layout does not have the same binding flags at binding " << i << " ( " |
| 416 | << string_VkDescriptorBindingFlagsEXT(ds_layout_flags[i]) << " ) as " |
| 417 | << report_data->FormatHandle(bound_dsl_handle) << " ( " |
| 418 | << string_VkDescriptorBindingFlagsEXT(bound_layout_flags[i]) << " ), which is bound"; |
| 419 | *error_msg = error_str.str(); |
| 420 | return false; |
| 421 | } |
| 422 | |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 423 | // No detailed check should succeed if the trivial check failed -- or the dictionary has failed somehow. |
| 424 | bool compatible = true; |
| 425 | assert(!compatible); |
| 426 | return compatible; |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 427 | } |
| 428 | |
| 429 | bool cvdescriptorset::DescriptorSetLayoutDef::IsNextBindingConsistent(const uint32_t binding) const { |
| 430 | if (!binding_to_index_map_.count(binding + 1)) return false; |
| 431 | auto const &bi_itr = binding_to_index_map_.find(binding); |
| 432 | if (bi_itr != binding_to_index_map_.end()) { |
| 433 | const auto &next_bi_itr = binding_to_index_map_.find(binding + 1); |
| 434 | if (next_bi_itr != binding_to_index_map_.end()) { |
| 435 | auto type = bindings_[bi_itr->second].descriptorType; |
| 436 | auto stage_flags = bindings_[bi_itr->second].stageFlags; |
| 437 | auto immut_samp = bindings_[bi_itr->second].pImmutableSamplers ? true : false; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 438 | auto flags = binding_flags_[bi_itr->second]; |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 439 | if ((type != bindings_[next_bi_itr->second].descriptorType) || |
| 440 | (stage_flags != bindings_[next_bi_itr->second].stageFlags) || |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 441 | (immut_samp != (bindings_[next_bi_itr->second].pImmutableSamplers ? true : false)) || |
| 442 | (flags != binding_flags_[next_bi_itr->second])) { |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 443 | return false; |
| 444 | } |
| 445 | return true; |
| 446 | } |
| 447 | } |
| 448 | return false; |
| 449 | } |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 450 | |
| 451 | // The DescriptorSetLayout stores the per handle data for a descriptor set layout, and references the common defintion for the |
| 452 | // handle invariant portion |
| 453 | cvdescriptorset::DescriptorSetLayout::DescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo *p_create_info, |
| 454 | const VkDescriptorSetLayout layout) |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 455 | : BASE_NODE(layout, kVulkanObjectTypeDescriptorSetLayout), layout_id_(GetCanonicalId(p_create_info)) {} |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 456 | |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 457 | // Validate descriptor set layout create info |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 458 | bool cvdescriptorset::ValidateDescriptorSetLayoutCreateInfo( |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 459 | const ValidationObject *val_obj, const VkDescriptorSetLayoutCreateInfo *create_info, const bool push_descriptor_ext, |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 460 | const uint32_t max_push_descriptors, const bool descriptor_indexing_ext, |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 461 | const VkPhysicalDeviceVulkan12Features *core12_features, |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 462 | const VkPhysicalDeviceInlineUniformBlockFeaturesEXT *inline_uniform_block_features, |
ziga-lunarg | 637356a | 2021-07-19 12:36:34 +0200 | [diff] [blame] | 463 | const VkPhysicalDeviceInlineUniformBlockPropertiesEXT *inline_uniform_block_props, |
| 464 | const VkPhysicalDeviceAccelerationStructureFeaturesKHR *acceleration_structure_features, |
| 465 | const DeviceExtensions *device_extensions) { |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 466 | bool skip = false; |
Jeremy Gebben | cbf2286 | 2021-03-03 12:01:22 -0700 | [diff] [blame] | 467 | layer_data::unordered_set<uint32_t> bindings; |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 468 | uint64_t total_descriptors = 0; |
| 469 | |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 470 | const auto *flags_create_info = LvlFindInChain<VkDescriptorSetLayoutBindingFlagsCreateInfo>(create_info->pNext); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 471 | |
| 472 | const bool push_descriptor_set = !!(create_info->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR); |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 473 | if (push_descriptor_set && !push_descriptor_ext) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 474 | skip |= val_obj->LogError( |
| 475 | val_obj->device, kVUID_Core_DrawState_ExtensionNotEnabled, |
| 476 | "vkCreateDescriptorSetLayout(): Attempted to use %s in %s but its required extension %s has not been enabled.\n", |
| 477 | "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR", "VkDescriptorSetLayoutCreateInfo::flags", |
| 478 | VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME); |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 479 | } |
| 480 | |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 481 | const bool update_after_bind_set = !!(create_info->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 482 | if (update_after_bind_set && !descriptor_indexing_ext) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 483 | skip |= val_obj->LogError( |
| 484 | val_obj->device, kVUID_Core_DrawState_ExtensionNotEnabled, |
| 485 | "vkCreateDescriptorSetLayout(): Attemped to use %s in %s but its required extension %s has not been enabled.\n", |
| 486 | "VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT", "VkDescriptorSetLayoutCreateInfo::flags", |
| 487 | VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 488 | } |
| 489 | |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 490 | auto valid_type = [push_descriptor_set](const VkDescriptorType type) { |
| 491 | return !push_descriptor_set || |
Dave Houlton | 142c4cb | 2018-10-17 15:04:41 -0600 | [diff] [blame] | 492 | ((type != VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) && (type != VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) && |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 493 | (type != VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT)); |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 494 | }; |
| 495 | |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 496 | uint32_t max_binding = 0; |
| 497 | |
ziga-lunarg | 185ef28 | 2021-07-19 13:13:28 +0200 | [diff] [blame] | 498 | uint32_t update_after_bind = create_info->bindingCount; |
| 499 | uint32_t uniform_buffer_dynamic = create_info->bindingCount; |
| 500 | uint32_t storage_buffer_dynamic = create_info->bindingCount; |
| 501 | |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 502 | for (uint32_t i = 0; i < create_info->bindingCount; ++i) { |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 503 | const auto &binding_info = create_info->pBindings[i]; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 504 | max_binding = std::max(max_binding, binding_info.binding); |
| 505 | |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 506 | if (!bindings.insert(binding_info.binding).second) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 507 | skip |= val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutCreateInfo-binding-00279", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 508 | "vkCreateDescriptorSetLayout(): pBindings[%u] has duplicated binding number (%u).", i, |
| 509 | binding_info.binding); |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 510 | } |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 511 | if (!valid_type(binding_info.descriptorType)) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 512 | skip |= val_obj->LogError(val_obj->device, |
| 513 | (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) |
| 514 | ? "VUID-VkDescriptorSetLayoutCreateInfo-flags-02208" |
| 515 | : "VUID-VkDescriptorSetLayoutCreateInfo-flags-00280", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 516 | "vkCreateDescriptorSetLayout(): pBindings[%u] has invalid type %s , for push descriptors.", i, |
| 517 | string_VkDescriptorType(binding_info.descriptorType)); |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 518 | } |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 519 | |
| 520 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) { |
sfricke-samsung | c1e27c3 | 2021-01-16 09:32:49 -0800 | [diff] [blame] | 521 | if (!inline_uniform_block_features->inlineUniformBlock) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 522 | skip |= val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutBinding-descriptorType-04604", |
| 523 | "vkCreateDescriptorSetLayout(): pBindings[%u] is creating VkDescriptorSetLayout with " |
| 524 | "descriptor type VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT " |
Jeremy Gebben | da6b48f | 2021-05-13 10:46:18 -0600 | [diff] [blame] | 525 | "but the inlineUniformBlock feature is not enabled", |
| 526 | i); |
Tony-LunarG | d6744bc | 2019-08-23 09:57:10 -0600 | [diff] [blame] | 527 | } else { |
| 528 | if ((binding_info.descriptorCount % 4) != 0) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 529 | skip |= val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutBinding-descriptorType-02209", |
| 530 | "vkCreateDescriptorSetLayout(): pBindings[%u] has descriptorCount =(%" PRIu32 |
| 531 | ") but must be a multiple of 4", |
| 532 | i, binding_info.descriptorCount); |
Tony-LunarG | d6744bc | 2019-08-23 09:57:10 -0600 | [diff] [blame] | 533 | } |
| 534 | if (binding_info.descriptorCount > inline_uniform_block_props->maxInlineUniformBlockSize) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 535 | skip |= |
| 536 | val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutBinding-descriptorType-02210", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 537 | "vkCreateDescriptorSetLayout(): pBindings[%u] has descriptorCount =(%" PRIu32 |
| 538 | ") but must be less than or equal to maxInlineUniformBlockSize (%u)", |
| 539 | i, binding_info.descriptorCount, inline_uniform_block_props->maxInlineUniformBlockSize); |
Tony-LunarG | d6744bc | 2019-08-23 09:57:10 -0600 | [diff] [blame] | 540 | } |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 541 | } |
ziga-lunarg | 185ef28 | 2021-07-19 13:13:28 +0200 | [diff] [blame] | 542 | } else if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) { |
| 543 | uniform_buffer_dynamic = i; |
| 544 | } else if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) { |
| 545 | storage_buffer_dynamic = i; |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 546 | } |
| 547 | |
Tony-LunarG | 7337b31 | 2020-04-15 16:40:25 -0600 | [diff] [blame] | 548 | if ((binding_info.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER || |
| 549 | binding_info.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) && |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 550 | binding_info.pImmutableSamplers && IsExtEnabled(device_extensions->vk_ext_custom_border_color)) { |
Tony-LunarG | 7337b31 | 2020-04-15 16:40:25 -0600 | [diff] [blame] | 551 | const CoreChecks *core_checks = reinterpret_cast<const CoreChecks *>(val_obj); |
| 552 | for (uint32_t j = 0; j < binding_info.descriptorCount; j++) { |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 553 | const auto sampler_state = core_checks->Get<SAMPLER_STATE>(binding_info.pImmutableSamplers[j]); |
Tony-LunarG | 7337b31 | 2020-04-15 16:40:25 -0600 | [diff] [blame] | 554 | if (sampler_state && (sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT || |
| 555 | sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT)) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 556 | skip |= val_obj->LogError( |
| 557 | val_obj->device, "VUID-VkDescriptorSetLayoutBinding-pImmutableSamplers-04009", |
Jeremy Gebben | da6b48f | 2021-05-13 10:46:18 -0600 | [diff] [blame] | 558 | "vkCreateDescriptorSetLayout(): pBindings[%u].pImmutableSamplers[%u] has VkSampler %s" |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 559 | " presented as immutable has a custom border color", |
Jeremy Gebben | da6b48f | 2021-05-13 10:46:18 -0600 | [diff] [blame] | 560 | i, j, val_obj->report_data->FormatHandle(binding_info.pImmutableSamplers[j]).c_str()); |
Tony-LunarG | 7337b31 | 2020-04-15 16:40:25 -0600 | [diff] [blame] | 561 | } |
| 562 | } |
| 563 | } |
| 564 | |
ziga-lunarg | 6d77079 | 2021-07-19 11:27:18 +0200 | [diff] [blame] | 565 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE && binding_info.pImmutableSamplers != nullptr) { |
| 566 | skip |= val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutBinding-descriptorType-04605", |
| 567 | "vkCreateDescriptorSetLayout(): pBindings[%u] has descriptorType " |
| 568 | "VK_DESCRIPTOR_TYPE_MUTABLE_VALVE but pImmutableSamplers is not NULL.", |
| 569 | i); |
| 570 | } |
| 571 | |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 572 | total_descriptors += binding_info.descriptorCount; |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 573 | } |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 574 | |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 575 | if (flags_create_info) { |
| 576 | if (flags_create_info->bindingCount != 0 && flags_create_info->bindingCount != create_info->bindingCount) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 577 | skip |= val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-bindingCount-03002", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 578 | "vkCreateDescriptorSetLayout(): VkDescriptorSetLayoutCreateInfo::bindingCount (%d) != " |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 579 | "VkDescriptorSetLayoutBindingFlagsCreateInfo::bindingCount (%d)", |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 580 | create_info->bindingCount, flags_create_info->bindingCount); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 581 | } |
| 582 | |
| 583 | if (flags_create_info->bindingCount == create_info->bindingCount) { |
| 584 | for (uint32_t i = 0; i < create_info->bindingCount; ++i) { |
| 585 | const auto &binding_info = create_info->pBindings[i]; |
| 586 | |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 587 | if (flags_create_info->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT) { |
ziga-lunarg | 185ef28 | 2021-07-19 13:13:28 +0200 | [diff] [blame] | 588 | update_after_bind = i; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 589 | if (!update_after_bind_set) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 590 | skip |= val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutCreateInfo-flags-03000", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 591 | "vkCreateDescriptorSetLayout(): pBindings[%u] does not have " |
| 592 | "VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT.", |
| 593 | i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 594 | } |
| 595 | |
| 596 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER && |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 597 | !core12_features->descriptorBindingUniformBufferUpdateAfterBind) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 598 | skip |= val_obj->LogError( |
| 599 | val_obj->device, |
| 600 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-" |
| 601 | "descriptorBindingUniformBufferUpdateAfterBind-03005", |
| 602 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 603 | "for %s since descriptorBindingUniformBufferUpdateAfterBind is not enabled.", |
| 604 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 605 | } |
| 606 | if ((binding_info.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER || |
| 607 | binding_info.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER || |
| 608 | binding_info.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) && |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 609 | !core12_features->descriptorBindingSampledImageUpdateAfterBind) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 610 | skip |= val_obj->LogError( |
| 611 | val_obj->device, |
| 612 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-" |
| 613 | "descriptorBindingSampledImageUpdateAfterBind-03006", |
| 614 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 615 | "for %s since descriptorBindingSampledImageUpdateAfterBind is not enabled.", |
| 616 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 617 | } |
| 618 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE && |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 619 | !core12_features->descriptorBindingStorageImageUpdateAfterBind) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 620 | skip |= val_obj->LogError( |
| 621 | val_obj->device, |
| 622 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-" |
| 623 | "descriptorBindingStorageImageUpdateAfterBind-03007", |
| 624 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 625 | "for %s since descriptorBindingStorageImageUpdateAfterBind is not enabled.", |
| 626 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 627 | } |
| 628 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER && |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 629 | !core12_features->descriptorBindingStorageBufferUpdateAfterBind) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 630 | skip |= val_obj->LogError( |
| 631 | val_obj->device, |
| 632 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-" |
| 633 | "descriptorBindingStorageBufferUpdateAfterBind-03008", |
| 634 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 635 | "for %s since descriptorBindingStorageBufferUpdateAfterBind is not enabled.", |
| 636 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 637 | } |
| 638 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER && |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 639 | !core12_features->descriptorBindingUniformTexelBufferUpdateAfterBind) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 640 | skip |= val_obj->LogError( |
| 641 | val_obj->device, |
| 642 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-" |
| 643 | "descriptorBindingUniformTexelBufferUpdateAfterBind-03009", |
| 644 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 645 | "for %s since descriptorBindingUniformTexelBufferUpdateAfterBind is not enabled.", |
| 646 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 647 | } |
| 648 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER && |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 649 | !core12_features->descriptorBindingStorageTexelBufferUpdateAfterBind) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 650 | skip |= val_obj->LogError( |
| 651 | val_obj->device, |
| 652 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-" |
| 653 | "descriptorBindingStorageTexelBufferUpdateAfterBind-03010", |
| 654 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 655 | "for %s since descriptorBindingStorageTexelBufferUpdateAfterBind is not enabled.", |
| 656 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 657 | } |
| 658 | if ((binding_info.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT || |
| 659 | binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC || |
| 660 | binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 661 | skip |= val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-None-03011", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 662 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have " |
| 663 | "VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT for %s.", |
| 664 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 665 | } |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 666 | |
| 667 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT && |
| 668 | !inline_uniform_block_features->descriptorBindingInlineUniformBlockUpdateAfterBind) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 669 | skip |= val_obj->LogError( |
| 670 | val_obj->device, |
| 671 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-" |
| 672 | "descriptorBindingInlineUniformBlockUpdateAfterBind-02211", |
| 673 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 674 | "for %s since descriptorBindingInlineUniformBlockUpdateAfterBind is not enabled.", |
| 675 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 676 | } |
ziga-lunarg | 637356a | 2021-07-19 12:36:34 +0200 | [diff] [blame] | 677 | if ((binding_info.descriptorType == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR || |
| 678 | binding_info.descriptorType == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV) && |
| 679 | !acceleration_structure_features->descriptorBindingAccelerationStructureUpdateAfterBind) { |
| 680 | skip |= val_obj->LogError(val_obj->device, |
| 681 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-" |
| 682 | "descriptorBindingAccelerationStructureUpdateAfterBind-03570", |
| 683 | "vkCreateDescriptorSetLayout(): pBindings[%" PRIu32 |
| 684 | "] can't have VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 685 | "for %s if " |
| 686 | "VkPhysicalDeviceAccelerationStructureFeaturesKHR::" |
| 687 | "descriptorBindingAccelerationStructureUpdateAfterBind is not enabled.", |
| 688 | i, string_VkDescriptorType(binding_info.descriptorType)); |
| 689 | } |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 690 | } |
| 691 | |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 692 | if (flags_create_info->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT) { |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 693 | if (!core12_features->descriptorBindingUpdateUnusedWhilePending) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 694 | skip |= val_obj->LogError( |
| 695 | val_obj->device, |
Mike Schuchardt | 65847d9 | 2019-12-20 13:50:47 -0800 | [diff] [blame] | 696 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-descriptorBindingUpdateUnusedWhilePending-03012", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 697 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have " |
| 698 | "VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT for %s since " |
| 699 | "descriptorBindingUpdateUnusedWhilePending is not enabled.", |
| 700 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 701 | } |
| 702 | } |
| 703 | |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 704 | if (flags_create_info->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT) { |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 705 | if (!core12_features->descriptorBindingPartiallyBound) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 706 | skip |= val_obj->LogError( |
| 707 | val_obj->device, |
| 708 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-descriptorBindingPartiallyBound-03013", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 709 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT for " |
| 710 | "%s since descriptorBindingPartiallyBound is not enabled.", |
| 711 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 712 | } |
| 713 | } |
| 714 | |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 715 | if (flags_create_info->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT) { |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 716 | if (binding_info.binding != max_binding) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 717 | skip |= val_obj->LogError( |
| 718 | val_obj->device, "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-pBindingFlags-03004", |
| 719 | "vkCreateDescriptorSetLayout(): pBindings[%u] has VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT " |
| 720 | "but %u is the largest value of all the bindings.", |
| 721 | i, binding_info.binding); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 722 | } |
| 723 | |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 724 | if (!core12_features->descriptorBindingVariableDescriptorCount) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 725 | skip |= val_obj->LogError( |
| 726 | val_obj->device, |
Mike Schuchardt | 65847d9 | 2019-12-20 13:50:47 -0800 | [diff] [blame] | 727 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-descriptorBindingVariableDescriptorCount-03014", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 728 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have " |
| 729 | "VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT for %s since " |
| 730 | "descriptorBindingVariableDescriptorCount is not enabled.", |
| 731 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 732 | } |
sfricke-samsung | 4ba7d6e | 2021-03-06 20:56:35 -0800 | [diff] [blame] | 733 | if ((binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) || |
| 734 | (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 735 | skip |= val_obj->LogError(val_obj->device, |
| 736 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-pBindingFlags-03015", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 737 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have " |
| 738 | "VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT for %s.", |
| 739 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 740 | } |
| 741 | } |
| 742 | |
| 743 | if (push_descriptor_set && |
| 744 | (flags_create_info->pBindingFlags[i] & |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 745 | (VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT | VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT | |
| 746 | VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT))) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 747 | skip |= val_obj->LogError( |
| 748 | val_obj->device, "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-flags-03003", |
| 749 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT, " |
| 750 | "VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, or " |
| 751 | "VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT for with " |
| 752 | "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR.", |
| 753 | i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 754 | } |
| 755 | } |
| 756 | } |
| 757 | } |
| 758 | |
ziga-lunarg | 185ef28 | 2021-07-19 13:13:28 +0200 | [diff] [blame] | 759 | if (update_after_bind < create_info->bindingCount) { |
| 760 | if (uniform_buffer_dynamic < create_info->bindingCount) { |
| 761 | skip |= |
| 762 | val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutCreateInfo-descriptorType-03001", |
| 763 | "vkCreateDescriptorSetLayout(): binding (%" PRIi32 |
| 764 | ") has VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 765 | "flag, but binding (%" PRIi32 ") has descriptor type VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC.", |
| 766 | update_after_bind, uniform_buffer_dynamic); |
| 767 | } |
| 768 | if (storage_buffer_dynamic < create_info->bindingCount) { |
| 769 | skip |= |
| 770 | val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutCreateInfo-descriptorType-03001", |
| 771 | "vkCreateDescriptorSetLayout(): binding (%" PRIi32 |
| 772 | ") has VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 773 | "flag, but binding (%" PRIi32 ") has descriptor type VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC.", |
| 774 | update_after_bind, storage_buffer_dynamic); |
| 775 | } |
| 776 | } |
| 777 | |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 778 | if ((push_descriptor_set) && (total_descriptors > max_push_descriptors)) { |
| 779 | const char *undefined = push_descriptor_ext ? "" : " -- undefined"; |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 780 | skip |= val_obj->LogError( |
| 781 | val_obj->device, "VUID-VkDescriptorSetLayoutCreateInfo-flags-00281", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 782 | "vkCreateDescriptorSetLayout(): for push descriptor, total descriptor count in layout (%" PRIu64 |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 783 | ") must not be greater than VkPhysicalDevicePushDescriptorPropertiesKHR::maxPushDescriptors (%" PRIu32 "%s).", |
| 784 | total_descriptors, max_push_descriptors, undefined); |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 785 | } |
| 786 | |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 787 | return skip; |
| 788 | } |
| 789 | |
Mark Lobodzinski | e12b6e3 | 2020-06-29 11:44:15 -0600 | [diff] [blame] | 790 | void cvdescriptorset::AllocateDescriptorSetsData::Init(uint32_t count) { |
| 791 | layout_nodes.resize(count); |
Mark Lobodzinski | e12b6e3 | 2020-06-29 11:44:15 -0600 | [diff] [blame] | 792 | } |
Tobin Ehlis | 68d0adf | 2016-06-01 11:33:50 -0600 | [diff] [blame] | 793 | |
Jeff Bolz | 41a1ced | 2019-10-11 11:40:49 -0500 | [diff] [blame] | 794 | cvdescriptorset::DescriptorSet::DescriptorSet(const VkDescriptorSet set, DESCRIPTOR_POOL_STATE *pool_state, |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 795 | const std::shared_ptr<DescriptorSetLayout const> &layout, uint32_t variable_count, |
John Zulauf | d2c3dae | 2019-12-12 11:02:17 -0700 | [diff] [blame] | 796 | const cvdescriptorset::DescriptorSet::StateTracker *state_data) |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 797 | : BASE_NODE(set, kVulkanObjectTypeDescriptorSet), |
| 798 | some_update_(false), |
Jeff Bolz | 41a1ced | 2019-10-11 11:40:49 -0500 | [diff] [blame] | 799 | pool_state_(pool_state), |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 800 | layout_(layout), |
John Zulauf | d2c3dae | 2019-12-12 11:02:17 -0700 | [diff] [blame] | 801 | state_data_(state_data), |
Jeff Bolz | dd4cfa1 | 2019-08-11 20:57:51 -0500 | [diff] [blame] | 802 | variable_count_(variable_count), |
| 803 | change_count_(0) { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 804 | if (pool_state_) { |
| 805 | pool_state_->AddParent(this); |
| 806 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 807 | // Foreach binding, create default descriptors of given type |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 808 | descriptors_.reserve(layout_->GetTotalDescriptorCount()); |
| 809 | descriptor_store_.resize(layout_->GetTotalDescriptorCount()); |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 810 | auto free_descriptor = descriptor_store_.data(); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 811 | for (uint32_t i = 0; i < layout_->GetBindingCount(); ++i) { |
| 812 | auto type = layout_->GetTypeFromIndex(i); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 813 | switch (type) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 814 | case VK_DESCRIPTOR_TYPE_SAMPLER: { |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 815 | auto immut_sampler = layout_->GetImmutableSamplerPtrFromIndex(i); |
| 816 | for (uint32_t di = 0; di < layout_->GetDescriptorCountFromIndex(i); ++di) { |
Tobin Ehlis | 082c751 | 2017-05-08 11:24:57 -0600 | [diff] [blame] | 817 | if (immut_sampler) { |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 818 | descriptors_.emplace_back(new ((free_descriptor++)->Sampler()) |
| 819 | SamplerDescriptor(state_data, immut_sampler + di)); |
Tobin Ehlis | 082c751 | 2017-05-08 11:24:57 -0600 | [diff] [blame] | 820 | some_update_ = true; // Immutable samplers are updated at creation |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 821 | } else { |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 822 | descriptors_.emplace_back(new ((free_descriptor++)->Sampler()) SamplerDescriptor(state_data, nullptr)); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 823 | } |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 824 | descriptors_.back()->AddParent(this); |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 825 | } |
| 826 | break; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 827 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 828 | case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: { |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 829 | auto immut = layout_->GetImmutableSamplerPtrFromIndex(i); |
| 830 | for (uint32_t di = 0; di < layout_->GetDescriptorCountFromIndex(i); ++di) { |
Tobin Ehlis | 082c751 | 2017-05-08 11:24:57 -0600 | [diff] [blame] | 831 | if (immut) { |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 832 | descriptors_.emplace_back(new ((free_descriptor++)->ImageSampler()) |
| 833 | ImageSamplerDescriptor(state_data, immut + di)); |
Tobin Ehlis | 082c751 | 2017-05-08 11:24:57 -0600 | [diff] [blame] | 834 | some_update_ = true; // Immutable samplers are updated at creation |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 835 | } else { |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 836 | descriptors_.emplace_back(new ((free_descriptor++)->ImageSampler()) |
| 837 | ImageSamplerDescriptor(state_data, nullptr)); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 838 | } |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 839 | descriptors_.back()->AddParent(this); |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 840 | } |
| 841 | break; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 842 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 843 | // ImageDescriptors |
| 844 | case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: |
| 845 | case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: |
| 846 | case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 847 | for (uint32_t di = 0; di < layout_->GetDescriptorCountFromIndex(i); ++di) { |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 848 | descriptors_.emplace_back(new ((free_descriptor++)->Image()) ImageDescriptor(type)); |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 849 | descriptors_.back()->AddParent(this); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 850 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 851 | break; |
| 852 | case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: |
| 853 | case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 854 | for (uint32_t di = 0; di < layout_->GetDescriptorCountFromIndex(i); ++di) { |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 855 | descriptors_.emplace_back(new ((free_descriptor++)->Texel()) TexelDescriptor(type)); |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 856 | descriptors_.back()->AddParent(this); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 857 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 858 | break; |
sfricke-samsung | 4ca3565 | 2021-03-05 02:22:10 -0800 | [diff] [blame] | 859 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: |
| 860 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: |
| 861 | for (uint32_t di = 0; di < layout_->GetDescriptorCountFromIndex(i); ++di) { |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 862 | descriptors_.emplace_back(new ((free_descriptor++)->Buffer()) BufferDescriptor(type)); |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 863 | descriptors_.back()->AddParent(this); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 864 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 865 | break; |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 866 | case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 867 | for (uint32_t di = 0; di < layout_->GetDescriptorCountFromIndex(i); ++di) { |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 868 | descriptors_.emplace_back(new ((free_descriptor++)->InlineUniform()) InlineUniformDescriptor(type)); |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 869 | descriptors_.back()->AddParent(this); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 870 | } |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 871 | break; |
Eric Werness | 30127fd | 2018-10-31 21:01:03 -0700 | [diff] [blame] | 872 | case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV: |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 873 | case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 874 | for (uint32_t di = 0; di < layout_->GetDescriptorCountFromIndex(i); ++di) { |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 875 | descriptors_.emplace_back(new ((free_descriptor++)->AccelerationStructure()) |
| 876 | AccelerationStructureDescriptor(type)); |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 877 | descriptors_.back()->AddParent(this); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 878 | } |
Jeff Bolz | fbe5158 | 2018-09-13 10:01:35 -0500 | [diff] [blame] | 879 | break; |
Tony-LunarG | f563b36 | 2021-03-18 16:13:18 -0600 | [diff] [blame] | 880 | case VK_DESCRIPTOR_TYPE_MUTABLE_VALVE: |
| 881 | for (uint32_t di = 0; di < layout_->GetDescriptorCountFromIndex(i); ++di) { |
ziga-lunarg | e5d2854 | 2021-10-24 21:14:25 +0200 | [diff] [blame] | 882 | descriptors_.emplace_back(new ((free_descriptor++)->Mutable()) MutableDescriptor()); |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 883 | descriptors_.back()->AddParent(this); |
Tony-LunarG | f563b36 | 2021-03-18 16:13:18 -0600 | [diff] [blame] | 884 | } |
| 885 | break; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 886 | default: |
sfricke-samsung | 60f29ec | 2021-03-10 20:37:25 -0800 | [diff] [blame] | 887 | if (IsDynamicDescriptor(type) && IsBufferDescriptor(type)) { |
| 888 | for (uint32_t di = 0; di < layout_->GetDescriptorCountFromIndex(i); ++di) { |
| 889 | dynamic_offset_idx_to_descriptor_list_.push_back(descriptors_.size()); |
| 890 | descriptors_.emplace_back(new ((free_descriptor++)->Buffer()) BufferDescriptor(type)); |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 891 | descriptors_.back()->AddParent(this); |
sfricke-samsung | 60f29ec | 2021-03-10 20:37:25 -0800 | [diff] [blame] | 892 | } |
| 893 | } else { |
| 894 | assert(0); // Bad descriptor type specified |
| 895 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 896 | break; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 897 | } |
| 898 | } |
| 899 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 900 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 901 | void cvdescriptorset::DescriptorSet::Destroy() { |
| 902 | if (pool_state_) { |
| 903 | pool_state_->RemoveParent(this); |
| 904 | } |
| 905 | for (auto &desc: descriptors_) { |
| 906 | desc->RemoveParent(this); |
| 907 | } |
| 908 | BASE_NODE::Destroy(); |
| 909 | } |
Chris Forbes | 5798913 | 2016-07-26 17:06:10 +1200 | [diff] [blame] | 910 | |
Jeremy Gebben | 90ce416 | 2021-08-25 14:23:07 -0600 | [diff] [blame] | 911 | static std::string StringDescriptorReqViewType(DescriptorReqFlags req) { |
Chris Forbes | 6e58ebd | 2016-08-31 12:58:14 -0700 | [diff] [blame] | 912 | std::string result(""); |
Mark Lobodzinski | 29f451a | 2020-02-10 16:15:30 -0700 | [diff] [blame] | 913 | for (unsigned i = 0; i <= VK_IMAGE_VIEW_TYPE_CUBE_ARRAY; i++) { |
Chris Forbes | 5798913 | 2016-07-26 17:06:10 +1200 | [diff] [blame] | 914 | if (req & (1 << i)) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 915 | if (result.size()) result += ", "; |
Chris Forbes | 6e58ebd | 2016-08-31 12:58:14 -0700 | [diff] [blame] | 916 | result += string_VkImageViewType(VkImageViewType(i)); |
Chris Forbes | 5798913 | 2016-07-26 17:06:10 +1200 | [diff] [blame] | 917 | } |
| 918 | } |
| 919 | |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 920 | if (!result.size()) result = "(none)"; |
Chris Forbes | 6e58ebd | 2016-08-31 12:58:14 -0700 | [diff] [blame] | 921 | |
| 922 | return result; |
Chris Forbes | 5798913 | 2016-07-26 17:06:10 +1200 | [diff] [blame] | 923 | } |
| 924 | |
Jeremy Gebben | 90ce416 | 2021-08-25 14:23:07 -0600 | [diff] [blame] | 925 | static char const *StringDescriptorReqComponentType(DescriptorReqFlags req) { |
Chris Forbes | da01e8d | 2018-08-27 15:36:57 -0700 | [diff] [blame] | 926 | if (req & DESCRIPTOR_REQ_COMPONENT_TYPE_SINT) return "SINT"; |
| 927 | if (req & DESCRIPTOR_REQ_COMPONENT_TYPE_UINT) return "UINT"; |
| 928 | if (req & DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT) return "FLOAT"; |
| 929 | return "(none)"; |
| 930 | } |
| 931 | |
Jeff Bolz | 6cede83 | 2019-08-09 23:30:39 -0500 | [diff] [blame] | 932 | unsigned DescriptorRequirementsBitsFromFormat(VkFormat fmt) { |
sfricke-samsung | e308629 | 2021-11-18 23:02:35 -0800 | [diff] [blame] | 933 | if (FormatIsSINT(fmt)) return DESCRIPTOR_REQ_COMPONENT_TYPE_SINT; |
| 934 | if (FormatIsUINT(fmt)) return DESCRIPTOR_REQ_COMPONENT_TYPE_UINT; |
sfricke-samsung | ed028b0 | 2021-09-06 23:14:51 -0700 | [diff] [blame] | 935 | // Formats such as VK_FORMAT_D16_UNORM_S8_UINT are both |
Chris Forbes | da01e8d | 2018-08-27 15:36:57 -0700 | [diff] [blame] | 936 | if (FormatIsDepthAndStencil(fmt)) return DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT | DESCRIPTOR_REQ_COMPONENT_TYPE_UINT; |
| 937 | if (fmt == VK_FORMAT_UNDEFINED) return 0; |
| 938 | // everything else -- UNORM/SNORM/FLOAT/USCALED/SSCALED is all float in the shader. |
| 939 | return DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT; |
| 940 | } |
| 941 | |
Tobin Ehlis | 3066db6 | 2016-08-22 08:12:23 -0600 | [diff] [blame] | 942 | // Validate that the state of this set is appropriate for the given bindings and dynamic_offsets at Draw time |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 943 | // This includes validating that all descriptors in the given bindings are updated, |
| 944 | // that any update buffers are valid, and that any dynamic offsets are within the bounds of their buffers. |
| 945 | // Return true if state is acceptable, or false and write an error message into error string |
locke-lunarg | b8d7a7a | 2020-10-25 16:01:52 -0600 | [diff] [blame] | 946 | bool CoreChecks::ValidateDrawState(const DescriptorSet *descriptor_set, const BindingReqMap &bindings, |
| 947 | const std::vector<uint32_t> &dynamic_offsets, const CMD_BUFFER_STATE *cb_node, |
Niels Möller | 1f945f6 | 2021-07-12 15:06:02 +0200 | [diff] [blame] | 948 | const std::vector<IMAGE_VIEW_STATE *> *attachments, const std::vector<SUBPASS_INFO> *subpasses, |
locke-lunarg | fc78e93 | 2020-11-19 17:06:24 -0700 | [diff] [blame] | 949 | const char *caller, const DrawDispatchVuid &vuids) const { |
Jeremy Gebben | 37ed31e | 2021-04-23 12:12:10 -0600 | [diff] [blame] | 950 | layer_data::optional<layer_data::unordered_map<VkImageView, VkImageLayout>> checked_layouts; |
Karl Schultz | 2171f93 | 2021-03-19 10:47:01 -0600 | [diff] [blame] | 951 | if (descriptor_set->GetTotalDescriptorCount() > cvdescriptorset::PrefilterBindRequestMap::kManyDescriptors_) { |
| 952 | checked_layouts.emplace(); |
| 953 | } |
Tony-LunarG | ace473a | 2020-05-06 12:48:04 -0600 | [diff] [blame] | 954 | bool result = false; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 955 | VkFramebuffer framebuffer = cb_node->activeFramebuffer ? cb_node->activeFramebuffer->framebuffer() : VK_NULL_HANDLE; |
John Zulauf | 79f0658 | 2021-02-27 18:38:39 -0700 | [diff] [blame] | 956 | for (const auto &binding_pair : bindings) { |
| 957 | const auto binding = binding_pair.first; |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 958 | DescriptorSetLayout::ConstBindingIterator binding_it(descriptor_set->GetLayout().get(), binding); |
| 959 | if (binding_it.AtEnd()) { // End at construction is the condition for an invalid binding. |
Tony-LunarG | ace473a | 2020-05-06 12:48:04 -0600 | [diff] [blame] | 960 | auto set = descriptor_set->GetSet(); |
locke-lunarg | 1328e8e | 2020-08-20 12:40:08 -0600 | [diff] [blame] | 961 | result |= LogError(set, vuids.descriptor_valid, |
Tony-LunarG | ace473a | 2020-05-06 12:48:04 -0600 | [diff] [blame] | 962 | "%s encountered the following validation error at %s time: Attempting to " |
| 963 | "validate DrawState for binding #%u which is an invalid binding for this descriptor set.", |
| 964 | report_data->FormatHandle(set).c_str(), caller, binding); |
| 965 | return result; |
Tobin Ehlis | 58c5958 | 2016-06-21 12:34:33 -0600 | [diff] [blame] | 966 | } |
Jeff Bolz | 6cede83 | 2019-08-09 23:30:39 -0500 | [diff] [blame] | 967 | |
| 968 | if (binding_it.GetDescriptorBindingFlags() & |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 969 | (VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT)) { |
Jeff Bolz | 6cede83 | 2019-08-09 23:30:39 -0500 | [diff] [blame] | 970 | // Can't validate the descriptor because it may not have been updated, |
| 971 | // or the view could have been destroyed |
| 972 | continue; |
| 973 | } |
John Zulauf | 81dd1f1 | 2021-01-26 16:49:16 -0700 | [diff] [blame] | 974 | // // This is a record time only path |
| 975 | const bool record_time_validate = true; |
locke-lunarg | fc78e93 | 2020-11-19 17:06:24 -0700 | [diff] [blame] | 976 | result |= ValidateDescriptorSetBindingData(cb_node, descriptor_set, dynamic_offsets, binding_pair, framebuffer, attachments, |
Karl Schultz | 2171f93 | 2021-03-19 10:47:01 -0600 | [diff] [blame] | 977 | subpasses, record_time_validate, caller, vuids, checked_layouts); |
unknown | 3087a64 | 2019-09-26 17:21:05 -0600 | [diff] [blame] | 978 | } |
Tony-LunarG | ace473a | 2020-05-06 12:48:04 -0600 | [diff] [blame] | 979 | return result; |
unknown | 3087a64 | 2019-09-26 17:21:05 -0600 | [diff] [blame] | 980 | } |
Jeff Bolz | 6cede83 | 2019-08-09 23:30:39 -0500 | [diff] [blame] | 981 | |
locke-lunarg | b8be822 | 2020-10-20 00:34:37 -0600 | [diff] [blame] | 982 | bool CoreChecks::ValidateDescriptorSetBindingData(const CMD_BUFFER_STATE *cb_node, const DescriptorSet *descriptor_set, |
| 983 | const std::vector<uint32_t> &dynamic_offsets, |
John Zulauf | 79f0658 | 2021-02-27 18:38:39 -0700 | [diff] [blame] | 984 | const std::pair<const uint32_t, DescriptorRequirement> &binding_info, |
Mark Lobodzinski | 85ebd40 | 2020-12-03 12:56:07 -0700 | [diff] [blame] | 985 | VkFramebuffer framebuffer, const std::vector<IMAGE_VIEW_STATE *> *attachments, |
Niels Möller | 1f945f6 | 2021-07-12 15:06:02 +0200 | [diff] [blame] | 986 | const std::vector<SUBPASS_INFO> *subpasses, bool record_time_validate, |
Karl Schultz | 2171f93 | 2021-03-19 10:47:01 -0600 | [diff] [blame] | 987 | const char *caller, const DrawDispatchVuid &vuids, |
Jeremy Gebben | 37ed31e | 2021-04-23 12:12:10 -0600 | [diff] [blame] | 988 | layer_data::optional<layer_data::unordered_map<VkImageView, VkImageLayout>> &checked_layouts) const { |
unknown | 3087a64 | 2019-09-26 17:21:05 -0600 | [diff] [blame] | 989 | using DescriptorClass = cvdescriptorset::DescriptorClass; |
| 990 | using BufferDescriptor = cvdescriptorset::BufferDescriptor; |
| 991 | using ImageDescriptor = cvdescriptorset::ImageDescriptor; |
| 992 | using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor; |
| 993 | using SamplerDescriptor = cvdescriptorset::SamplerDescriptor; |
| 994 | using TexelDescriptor = cvdescriptorset::TexelDescriptor; |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 995 | using AccelerationStructureDescriptor = cvdescriptorset::AccelerationStructureDescriptor; |
locke-lunarg | 3604599 | 2020-08-20 16:54:37 -0600 | [diff] [blame] | 996 | const auto binding = binding_info.first; |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 997 | bool skip = false; |
unknown | 3087a64 | 2019-09-26 17:21:05 -0600 | [diff] [blame] | 998 | DescriptorSetLayout::ConstBindingIterator binding_it(descriptor_set->GetLayout().get(), binding); |
| 999 | { |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 1000 | // Copy the range, the end range is subject to update based on variable length descriptor arrays. |
| 1001 | cvdescriptorset::IndexRange index_range = binding_it.GetGlobalIndexRange(); |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 1002 | auto array_idx = 0; // Track array idx if we're dealing with array descriptors |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1003 | |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 1004 | if (binding_it.IsVariableDescriptorCount()) { |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1005 | // Only validate the first N descriptors if it uses variable_count |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 1006 | index_range.end = index_range.start + descriptor_set->GetVariableDescriptorCount(); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1007 | } |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1008 | for (uint32_t i = index_range.start; !skip && i < index_range.end; ++i, ++array_idx) { |
Locke | b994adf | 2019-03-29 23:52:31 -0600 | [diff] [blame] | 1009 | uint32_t index = i - index_range.start; |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 1010 | const auto *descriptor = descriptor_set->GetDescriptorFromGlobalIndex(i); |
Jeremy Gebben | 550ebbd | 2021-03-11 05:04:52 -0700 | [diff] [blame] | 1011 | const auto descriptor_class = descriptor->GetClass(); |
Locke | b994adf | 2019-03-29 23:52:31 -0600 | [diff] [blame] | 1012 | |
Jeremy Gebben | 550ebbd | 2021-03-11 05:04:52 -0700 | [diff] [blame] | 1013 | if (descriptor_class == DescriptorClass::InlineUniform) { |
Jeff Bolz | 6cede83 | 2019-08-09 23:30:39 -0500 | [diff] [blame] | 1014 | // Can't validate the descriptor because it may not have been updated. |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1015 | continue; |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 1016 | } else if (!descriptor->updated) { |
Tony-LunarG | ace473a | 2020-05-06 12:48:04 -0600 | [diff] [blame] | 1017 | auto set = descriptor_set->GetSet(); |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 1018 | return LogError( |
| 1019 | set, vuids.descriptor_valid, |
| 1020 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in binding #%" PRIu32 |
| 1021 | " index %" PRIu32 |
| 1022 | " is being used in draw but has never been updated via vkUpdateDescriptorSets() or a similar call.", |
| 1023 | report_data->FormatHandle(set).c_str(), caller, binding, index); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1024 | } |
| 1025 | switch (descriptor_class) { |
| 1026 | case DescriptorClass::GeneralBuffer: { |
| 1027 | const auto *buffer_desc = static_cast<const BufferDescriptor *>(descriptor); |
| 1028 | skip = |
| 1029 | ValidateGeneralBufferDescriptor(caller, vuids, cb_node, descriptor_set, *buffer_desc, binding_info, index); |
| 1030 | } break; |
| 1031 | case DescriptorClass::ImageSampler: { |
| 1032 | const auto *image_sampler_desc = static_cast<const ImageSamplerDescriptor *>(descriptor); |
| 1033 | skip = ValidateImageDescriptor(caller, vuids, cb_node, descriptor_set, *image_sampler_desc, binding_info, index, |
| 1034 | record_time_validate, attachments, subpasses, framebuffer, binding_it.GetType(), |
| 1035 | checked_layouts); |
| 1036 | if (!skip) { |
| 1037 | skip = ValidateSamplerDescriptor(caller, vuids, cb_node, descriptor_set, binding_info, index, |
| 1038 | image_sampler_desc->GetSampler(), image_sampler_desc->IsImmutableSampler(), |
| 1039 | image_sampler_desc->GetSamplerState()); |
Karl Schultz | 76d16a4 | 2020-11-11 05:05:33 -0700 | [diff] [blame] | 1040 | } |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1041 | } break; |
| 1042 | case DescriptorClass::Image: { |
| 1043 | const auto *image_desc = static_cast<const ImageDescriptor *>(descriptor); |
| 1044 | skip = ValidateImageDescriptor(caller, vuids, cb_node, descriptor_set, *image_desc, binding_info, index, |
| 1045 | record_time_validate, attachments, subpasses, framebuffer, binding_it.GetType(), |
| 1046 | checked_layouts); |
| 1047 | } break; |
| 1048 | case DescriptorClass::PlainSampler: { |
| 1049 | const auto *sampler_desc = static_cast<const SamplerDescriptor *>(descriptor); |
| 1050 | skip = ValidateSamplerDescriptor(caller, vuids, cb_node, descriptor_set, binding_info, index, |
| 1051 | sampler_desc->GetSampler(), sampler_desc->IsImmutableSampler(), |
| 1052 | sampler_desc->GetSamplerState()); |
| 1053 | } break; |
| 1054 | case DescriptorClass::TexelBuffer: { |
| 1055 | const auto *texel_desc = static_cast<const TexelDescriptor *>(descriptor); |
| 1056 | skip = ValidateTexelDescriptor(caller, vuids, cb_node, descriptor_set, *texel_desc, binding_info, index); |
| 1057 | } break; |
| 1058 | case DescriptorClass::AccelerationStructure: { |
| 1059 | const auto *accel_desc = static_cast<const AccelerationStructureDescriptor *>(descriptor); |
| 1060 | skip = ValidateAccelerationDescriptor(caller, vuids, cb_node, descriptor_set, *accel_desc, binding_info, index); |
| 1061 | } break; |
| 1062 | default: |
| 1063 | break; |
| 1064 | } |
| 1065 | } |
| 1066 | } |
| 1067 | return skip; |
| 1068 | } |
locke-lunarg | 3604599 | 2020-08-20 16:54:37 -0600 | [diff] [blame] | 1069 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1070 | bool CoreChecks::ValidateGeneralBufferDescriptor(const char *caller, const DrawDispatchVuid &vuids, const CMD_BUFFER_STATE *cb_node, |
| 1071 | const cvdescriptorset::DescriptorSet *descriptor_set, |
| 1072 | const cvdescriptorset::BufferDescriptor &descriptor, |
| 1073 | const std::pair<const uint32_t, DescriptorRequirement> &binding_info, |
| 1074 | uint32_t index) const { |
| 1075 | // Verify that buffers are valid |
| 1076 | auto buffer = descriptor.GetBuffer(); |
| 1077 | auto buffer_node = descriptor.GetBufferState(); |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1078 | if ((!buffer_node && !enabled_features.robustness2_features.nullDescriptor) || (buffer_node && buffer_node->Destroyed())) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1079 | auto set = descriptor_set->GetSet(); |
| 1080 | return LogError(set, vuids.descriptor_valid, |
| 1081 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1082 | "binding #%" PRIu32 " index %" PRIu32 " is using buffer %s that is invalid or has been destroyed.", |
| 1083 | report_data->FormatHandle(set).c_str(), caller, binding_info.first, index, |
| 1084 | report_data->FormatHandle(buffer).c_str()); |
| 1085 | } |
| 1086 | if (buffer) { |
| 1087 | if (buffer_node && !buffer_node->sparse) { |
Jeremy Gebben | 6fbf824 | 2021-06-21 09:14:46 -0600 | [diff] [blame] | 1088 | for (const auto &item: buffer_node->GetBoundMemory()) { |
| 1089 | auto &binding = item.second; |
| 1090 | if (binding.mem_state->Destroyed()) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1091 | auto set = descriptor_set->GetSet(); |
| 1092 | return LogError(set, vuids.descriptor_valid, |
| 1093 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1094 | "binding #%" PRIu32 " index %" PRIu32 " is uses buffer %s that references invalid memory %s.", |
| 1095 | report_data->FormatHandle(set).c_str(), caller, binding_info.first, index, |
Jeremy Gebben | 6fbf824 | 2021-06-21 09:14:46 -0600 | [diff] [blame] | 1096 | report_data->FormatHandle(buffer).c_str(), |
| 1097 | report_data->FormatHandle(binding.mem_state->mem()).c_str()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1098 | } |
| 1099 | } |
| 1100 | } |
| 1101 | if (enabled_features.core11.protectedMemory == VK_TRUE) { |
| 1102 | if (ValidateProtectedBuffer(cb_node, buffer_node, caller, vuids.unprotected_command_buffer, |
| 1103 | "Buffer is in a descriptorSet")) { |
| 1104 | return true; |
| 1105 | } |
| 1106 | if (binding_info.second.is_writable && |
| 1107 | ValidateUnprotectedBuffer(cb_node, buffer_node, caller, vuids.protected_command_buffer, |
| 1108 | "Buffer is in a descriptorSet")) { |
| 1109 | return true; |
| 1110 | } |
| 1111 | } |
| 1112 | } |
| 1113 | return false; |
| 1114 | } |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 1115 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1116 | bool CoreChecks::ValidateImageDescriptor(const char *caller, const DrawDispatchVuid &vuids, const CMD_BUFFER_STATE *cb_node, |
| 1117 | const cvdescriptorset::DescriptorSet *descriptor_set, |
| 1118 | const cvdescriptorset::ImageDescriptor &image_descriptor, |
| 1119 | const std::pair<const uint32_t, DescriptorRequirement> &binding_info, uint32_t index, |
| 1120 | bool record_time_validate, const std::vector<IMAGE_VIEW_STATE *> *attachments, |
Niels Möller | 1f945f6 | 2021-07-12 15:06:02 +0200 | [diff] [blame] | 1121 | const std::vector<SUBPASS_INFO> *subpasses, VkFramebuffer framebuffer, |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1122 | VkDescriptorType descriptor_type, |
Jeremy Gebben | 37ed31e | 2021-04-23 12:12:10 -0600 | [diff] [blame] | 1123 | layer_data::optional<layer_data::unordered_map<VkImageView, VkImageLayout>> &checked_layouts) const { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1124 | std::vector<const SAMPLER_STATE *> sampler_states; |
| 1125 | VkImageView image_view = image_descriptor.GetImageView(); |
| 1126 | const IMAGE_VIEW_STATE *image_view_state = image_descriptor.GetImageViewState(); |
| 1127 | VkImageLayout image_layout = image_descriptor.GetImageLayout(); |
| 1128 | const auto binding = binding_info.first; |
| 1129 | const auto reqs = binding_info.second.reqs; |
Karl Schultz | 76d16a4 | 2020-11-11 05:05:33 -0700 | [diff] [blame] | 1130 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1131 | if (image_descriptor.GetClass() == cvdescriptorset::DescriptorClass::ImageSampler) { |
| 1132 | sampler_states.emplace_back( |
| 1133 | static_cast<const cvdescriptorset::ImageSamplerDescriptor &>(image_descriptor).GetSamplerState()); |
| 1134 | } else { |
| 1135 | if (binding_info.second.samplers_used_by_image.size() > index) { |
Jeremy Gebben | 856b8c6 | 2021-12-01 15:20:07 -0700 | [diff] [blame] | 1136 | for (const auto &desc_index : binding_info.second.samplers_used_by_image[index]) { |
| 1137 | const auto *desc = descriptor_set->GetDescriptorFromBinding(desc_index.sampler_slot.binding, desc_index.sampler_index); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1138 | // NOTE: This check _shouldn't_ be necessary due to the checks made in IsSpecificDescriptorType in |
| 1139 | // shader_validation.cpp. However, without this check some traces still crash. |
Jeremy Gebben | 856b8c6 | 2021-12-01 15:20:07 -0700 | [diff] [blame] | 1140 | if (desc && (desc->GetClass() == cvdescriptorset::DescriptorClass::PlainSampler)) { |
| 1141 | const auto *sampler_state = static_cast<const cvdescriptorset::SamplerDescriptor *>(desc)->GetSamplerState(); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1142 | if (sampler_state) sampler_states.emplace_back(sampler_state); |
| 1143 | } |
| 1144 | } |
| 1145 | } |
| 1146 | } |
locke-lunarg | 4e1e463 | 2020-10-26 01:52:19 -0600 | [diff] [blame] | 1147 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1148 | if ((!image_view_state && !enabled_features.robustness2_features.nullDescriptor) || |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1149 | (image_view_state && image_view_state->Destroyed())) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1150 | // Image view must have been destroyed since initial update. Could potentially flag the descriptor |
| 1151 | // as "invalid" (updated = false) at DestroyImageView() time and detect this error at bind time |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 1152 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1153 | auto set = descriptor_set->GetSet(); |
| 1154 | return LogError(set, vuids.descriptor_valid, |
| 1155 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1156 | "binding #%" PRIu32 " index %" PRIu32 " is using imageView %s that is invalid or has been destroyed.", |
| 1157 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1158 | report_data->FormatHandle(image_view).c_str()); |
| 1159 | } |
| 1160 | if (image_view) { |
| 1161 | const auto &image_view_ci = image_view_state->create_info; |
| 1162 | const auto *image_state = image_view_state->image_state.get(); |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 1163 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1164 | if (reqs & DESCRIPTOR_REQ_ALL_VIEW_TYPE_BITS) { |
| 1165 | if (~reqs & (1 << image_view_ci.viewType)) { |
| 1166 | auto set = descriptor_set->GetSet(); |
| 1167 | return LogError(set, vuids.descriptor_valid, |
| 1168 | "Descriptor set %s encountered the following validation error at %s time: Descriptor " |
| 1169 | "in binding #%" PRIu32 " index %" PRIu32 " requires an image view of type %s but got %s.", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 1170 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1171 | StringDescriptorReqViewType(reqs).c_str(), string_VkImageViewType(image_view_ci.viewType)); |
| 1172 | } |
locke-lunarg | 25b6c35 | 2020-08-06 17:44:18 -0600 | [diff] [blame] | 1173 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1174 | if (!(reqs & image_view_state->descriptor_format_bits)) { |
| 1175 | // bad component type |
| 1176 | auto set = descriptor_set->GetSet(); |
| 1177 | return LogError(set, vuids.descriptor_valid, |
| 1178 | "Descriptor set %s encountered the following validation error at %s time: " |
| 1179 | "Descriptor in binding " |
| 1180 | "#%" PRIu32 " index %" PRIu32 " requires %s component type, but bound descriptor format is %s.", |
| 1181 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1182 | StringDescriptorReqComponentType(reqs), string_VkFormat(image_view_ci.format)); |
| 1183 | } |
| 1184 | } |
locke-lunarg | 540b225 | 2020-08-03 13:23:36 -0600 | [diff] [blame] | 1185 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1186 | // NOTE: Submit time validation of UPDATE_AFTER_BIND image layout is not possible with the |
| 1187 | // image layout tracking as currently implemented, so only record_time_validation is done |
| 1188 | if (!disabled[image_layout_validation] && record_time_validate) { |
| 1189 | // Verify Image Layout |
| 1190 | // No "invalid layout" VUID required for this call, since the optimal_layout parameter is UNDEFINED. |
| 1191 | // The caller provides a checked_layouts map when there are a large number of layouts to check, |
| 1192 | // making it worthwhile to keep track of verified layouts and not recheck them. |
| 1193 | bool already_validated = false; |
| 1194 | if (checked_layouts) { |
| 1195 | auto search = checked_layouts->find(image_view); |
| 1196 | if (search != checked_layouts->end() && search->second == image_layout) { |
| 1197 | already_validated = true; |
| 1198 | } |
| 1199 | } |
| 1200 | if (!already_validated) { |
| 1201 | bool hit_error = false; |
| 1202 | VerifyImageLayout(cb_node, image_state, image_view_state->normalized_subresource_range, |
| 1203 | image_view_ci.subresourceRange.aspectMask, image_layout, VK_IMAGE_LAYOUT_UNDEFINED, caller, |
| 1204 | kVUIDUndefined, "VUID-VkDescriptorImageInfo-imageLayout-00344", &hit_error); |
| 1205 | if (hit_error) { |
| 1206 | auto set = descriptor_set->GetSet(); |
| 1207 | return LogError(set, vuids.descriptor_valid, |
| 1208 | "Descriptor set %s encountered the following validation error at %s time: Image layout " |
| 1209 | "specified " |
| 1210 | "at vkUpdateDescriptorSet* or vkCmdPushDescriptorSet* time " |
| 1211 | "doesn't match actual image layout at time descriptor is used. See previous error callback for " |
| 1212 | "specific details.", |
| 1213 | report_data->FormatHandle(set).c_str(), caller); |
| 1214 | } |
| 1215 | if (checked_layouts) { |
| 1216 | checked_layouts->emplace(image_view, image_layout); |
| 1217 | } |
| 1218 | } |
| 1219 | } |
locke-lunarg | 540b225 | 2020-08-03 13:23:36 -0600 | [diff] [blame] | 1220 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1221 | // Verify Sample counts |
| 1222 | if ((reqs & DESCRIPTOR_REQ_SINGLE_SAMPLE) && image_view_state->samples != VK_SAMPLE_COUNT_1_BIT) { |
| 1223 | auto set = descriptor_set->GetSet(); |
| 1224 | return LogError(set, vuids.descriptor_valid, |
| 1225 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1226 | "binding #%" PRIu32 " index %" PRIu32 " requires bound image to have VK_SAMPLE_COUNT_1_BIT but got %s.", |
| 1227 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1228 | string_VkSampleCountFlagBits(image_view_state->samples)); |
| 1229 | } |
| 1230 | if ((reqs & DESCRIPTOR_REQ_MULTI_SAMPLE) && image_view_state->samples == VK_SAMPLE_COUNT_1_BIT) { |
| 1231 | auto set = descriptor_set->GetSet(); |
| 1232 | return LogError(set, vuids.descriptor_valid, |
| 1233 | "Descriptor set %s encountered the following validation error at %s time: Descriptor " |
| 1234 | "in binding #%" PRIu32 " index %" PRIu32 |
| 1235 | " requires bound image to have multiple samples, but got VK_SAMPLE_COUNT_1_BIT.", |
| 1236 | report_data->FormatHandle(set).c_str(), caller, binding, index); |
| 1237 | } |
locke-lunarg | 3604599 | 2020-08-20 16:54:37 -0600 | [diff] [blame] | 1238 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1239 | // Verify VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT |
| 1240 | if ((reqs & DESCRIPTOR_REQ_VIEW_ATOMIC_OPERATION) && (descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) && |
| 1241 | !(image_view_state->format_features & VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT)) { |
| 1242 | auto set = descriptor_set->GetSet(); |
| 1243 | LogObjectList objlist(set); |
| 1244 | objlist.add(image_view); |
| 1245 | return LogError(objlist, vuids.imageview_atomic, |
| 1246 | "Descriptor set %s encountered the following validation error at %s time: Descriptor " |
| 1247 | "in binding #%" PRIu32 " index %" PRIu32 |
| 1248 | ", %s, format %s, doesn't " |
| 1249 | "contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT.", |
| 1250 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1251 | report_data->FormatHandle(image_view).c_str(), string_VkFormat(image_view_ci.format)); |
| 1252 | } |
locke-lunarg | 654a905 | 2020-10-13 16:28:42 -0600 | [diff] [blame] | 1253 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1254 | // Verify if attachments are used in DescriptorSet |
Niels Möller | 1f945f6 | 2021-07-12 15:06:02 +0200 | [diff] [blame] | 1255 | if (attachments && attachments->size() > 0 && subpasses && (descriptor_type != VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)) { |
Jeremy Gebben | b4d1701 | 2021-07-08 13:18:15 -0600 | [diff] [blame] | 1256 | bool ds_aspect = (image_view_state->normalized_subresource_range.aspectMask & |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1257 | (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) |
| 1258 | ? true |
| 1259 | : false; |
| 1260 | uint32_t att_index = 0; |
| 1261 | for (const auto &view_state : *attachments) { |
Niels Möller | 1f945f6 | 2021-07-12 15:06:02 +0200 | [diff] [blame] | 1262 | const SUBPASS_INFO& subpass = (*subpasses)[att_index]; |
| 1263 | if (!subpass.used || !view_state || view_state->Destroyed()) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1264 | continue; |
| 1265 | } |
Ricardo Garcia | fe2b768 | 2021-07-30 10:44:57 +0200 | [diff] [blame] | 1266 | if (ds_aspect && (subpass.usage == VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT || |
| 1267 | subpass.usage == VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1268 | if ((image_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL || |
| 1269 | image_layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL || |
| 1270 | image_layout == VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL) && |
Niels Möller | 1f945f6 | 2021-07-12 15:06:02 +0200 | [diff] [blame] | 1271 | (subpass.layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL || |
| 1272 | subpass.layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL || |
| 1273 | subpass.layout == VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL)) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1274 | continue; |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 1275 | } |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1276 | if ((image_layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL && |
Niels Möller | 1f945f6 | 2021-07-12 15:06:02 +0200 | [diff] [blame] | 1277 | subpass.layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL) || |
| 1278 | (subpass.layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL && |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1279 | image_layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL)) { |
| 1280 | continue; |
| 1281 | } |
| 1282 | } |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1283 | if (view_state->image_view() == image_view) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1284 | auto set = descriptor_set->GetSet(); |
| 1285 | LogObjectList objlist(set); |
| 1286 | objlist.add(image_view); |
| 1287 | objlist.add(framebuffer); |
| 1288 | return LogError(objlist, vuids.image_subresources, |
| 1289 | "Descriptor set %s encountered the following validation error at %s time: %s is used in " |
| 1290 | "Descriptor in binding #%" PRIu32 " index %" PRIu32 " and %s attachment # %" PRIu32 ".", |
| 1291 | report_data->FormatHandle(set).c_str(), caller, report_data->FormatHandle(image_view).c_str(), |
| 1292 | binding, index, report_data->FormatHandle(framebuffer).c_str(), att_index); |
| 1293 | } else { |
| 1294 | if (image_view_state->OverlapSubresource(*view_state)) { |
Karl Schultz | 76d16a4 | 2020-11-11 05:05:33 -0700 | [diff] [blame] | 1295 | auto set = descriptor_set->GetSet(); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1296 | LogObjectList objlist(set); |
| 1297 | objlist.add(image_view); |
| 1298 | objlist.add(framebuffer); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1299 | objlist.add(view_state->image_view()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1300 | return LogError( |
| 1301 | objlist, vuids.image_subresources, |
| 1302 | "Descriptor set %s encountered the following validation error at %s time: " |
| 1303 | "Image subresources of %s in " |
| 1304 | "Descriptor in binding #%" PRIu32 " index %" PRIu32 " and %s in %s attachment # %" PRIu32 " overlap.", |
| 1305 | report_data->FormatHandle(set).c_str(), caller, report_data->FormatHandle(image_view).c_str(), binding, |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1306 | index, report_data->FormatHandle(view_state->image_view()).c_str(), |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1307 | report_data->FormatHandle(framebuffer).c_str(), att_index); |
Karl Schultz | 76d16a4 | 2020-11-11 05:05:33 -0700 | [diff] [blame] | 1308 | } |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1309 | } |
| 1310 | ++att_index; |
| 1311 | } |
| 1312 | if (enabled_features.core11.protectedMemory == VK_TRUE) { |
| 1313 | if (ValidateProtectedImage(cb_node, image_view_state->image_state.get(), caller, vuids.unprotected_command_buffer, |
| 1314 | "Image is in a descriptorSet")) { |
| 1315 | return true; |
| 1316 | } |
| 1317 | if (binding_info.second.is_writable && |
| 1318 | ValidateUnprotectedImage(cb_node, image_view_state->image_state.get(), caller, vuids.protected_command_buffer, |
| 1319 | "Image is in a descriptorSet")) { |
| 1320 | return true; |
| 1321 | } |
| 1322 | } |
| 1323 | } |
Chris Forbes | e92dd1d | 2019-01-21 15:58:57 -0800 | [diff] [blame] | 1324 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1325 | for (const auto *sampler_state : sampler_states) { |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1326 | if (!sampler_state || sampler_state->Destroyed()) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1327 | continue; |
| 1328 | } |
locke-lunarg | 25b6c35 | 2020-08-06 17:44:18 -0600 | [diff] [blame] | 1329 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1330 | // TODO: Validate 04015 for DescriptorClass::PlainSampler |
| 1331 | if ((sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT || |
| 1332 | sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) && |
| 1333 | (sampler_state->customCreateInfo.format == VK_FORMAT_UNDEFINED)) { |
| 1334 | if (image_view_state->create_info.format == VK_FORMAT_B4G4R4A4_UNORM_PACK16 || |
| 1335 | image_view_state->create_info.format == VK_FORMAT_B5G6R5_UNORM_PACK16 || |
| 1336 | image_view_state->create_info.format == VK_FORMAT_B5G5R5A1_UNORM_PACK16) { |
| 1337 | auto set = descriptor_set->GetSet(); |
| 1338 | LogObjectList objlist(set); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1339 | objlist.add(sampler_state->sampler()); |
| 1340 | objlist.add(image_view_state->image_view()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1341 | return LogError(objlist, "VUID-VkSamplerCustomBorderColorCreateInfoEXT-format-04015", |
| 1342 | "Descriptor set %s encountered the following validation error at %s time: Sampler %s in " |
| 1343 | "binding #%" PRIu32 " index %" PRIu32 |
| 1344 | " has a custom border color with format = VK_FORMAT_UNDEFINED and is used to " |
| 1345 | "sample an image view %s with format %s", |
| 1346 | report_data->FormatHandle(set).c_str(), caller, |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1347 | report_data->FormatHandle(sampler_state->sampler()).c_str(), binding, index, |
| 1348 | report_data->FormatHandle(image_view_state->image_view()).c_str(), |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1349 | string_VkFormat(image_view_state->create_info.format)); |
| 1350 | } |
| 1351 | } |
| 1352 | VkFilter sampler_mag_filter = sampler_state->createInfo.magFilter; |
| 1353 | VkFilter sampler_min_filter = sampler_state->createInfo.minFilter; |
| 1354 | VkBool32 sampler_compare_enable = sampler_state->createInfo.compareEnable; |
| 1355 | if ((sampler_mag_filter == VK_FILTER_LINEAR || sampler_min_filter == VK_FILTER_LINEAR) && |
| 1356 | (sampler_compare_enable == VK_FALSE) && |
| 1357 | !(image_view_state->format_features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT)) { |
| 1358 | auto set = descriptor_set->GetSet(); |
| 1359 | LogObjectList objlist(set); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1360 | objlist.add(sampler_state->sampler()); |
| 1361 | objlist.add(image_view_state->image_view()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1362 | return LogError(objlist, vuids.linear_sampler, |
| 1363 | "Descriptor set %s encountered the following validation error at %s time: Sampler " |
| 1364 | "(%s) is set to use VK_FILTER_LINEAR with " |
Tony-LunarG | 81195df | 2021-12-02 15:01:58 -0700 | [diff] [blame] | 1365 | "compareEnable is set to VK_FALSE, but image view's (%s) format (%s) does not " |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1366 | "contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT in its format features.", |
| 1367 | report_data->FormatHandle(set).c_str(), caller, |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1368 | report_data->FormatHandle(sampler_state->sampler()).c_str(), |
| 1369 | report_data->FormatHandle(image_view_state->image_view()).c_str(), |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1370 | string_VkFormat(image_view_state->create_info.format)); |
| 1371 | } |
| 1372 | if (sampler_mag_filter == VK_FILTER_CUBIC_EXT || sampler_min_filter == VK_FILTER_CUBIC_EXT) { |
| 1373 | if (!(image_view_state->format_features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT)) { |
| 1374 | auto set = descriptor_set->GetSet(); |
| 1375 | LogObjectList objlist(set); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1376 | objlist.add(sampler_state->sampler()); |
| 1377 | objlist.add(image_view_state->image_view()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1378 | return LogError(objlist, vuids.cubic_sampler, |
| 1379 | "Descriptor set %s encountered the following validation error at %s time: " |
| 1380 | "Sampler (%s) is set to use VK_FILTER_CUBIC_EXT, then " |
| 1381 | "image view's (%s) format (%s) MUST contain " |
| 1382 | "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT in its format features.", |
| 1383 | report_data->FormatHandle(set).c_str(), caller, |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1384 | report_data->FormatHandle(sampler_state->sampler()).c_str(), |
| 1385 | report_data->FormatHandle(image_view_state->image_view()).c_str(), |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1386 | string_VkFormat(image_view_state->create_info.format)); |
| 1387 | } |
| 1388 | |
| 1389 | if (IsExtEnabled(device_extensions.vk_ext_filter_cubic)) { |
| 1390 | const auto reduction_mode_info = |
| 1391 | LvlFindInChain<VkSamplerReductionModeCreateInfo>(sampler_state->createInfo.pNext); |
| 1392 | if (reduction_mode_info && |
| 1393 | (reduction_mode_info->reductionMode == VK_SAMPLER_REDUCTION_MODE_MIN || |
| 1394 | reduction_mode_info->reductionMode == VK_SAMPLER_REDUCTION_MODE_MAX) && |
| 1395 | !image_view_state->filter_cubic_props.filterCubicMinmax) { |
| 1396 | auto set = descriptor_set->GetSet(); |
| 1397 | LogObjectList objlist(set); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1398 | objlist.add(sampler_state->sampler()); |
| 1399 | objlist.add(image_view_state->image_view()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1400 | return LogError(objlist, vuids.filter_cubic_min_max, |
| 1401 | "Descriptor set %s encountered the following validation error at %s time: " |
| 1402 | "Sampler (%s) is set to use VK_FILTER_CUBIC_EXT & %s, " |
| 1403 | "but image view (%s) doesn't support filterCubicMinmax.", |
| 1404 | report_data->FormatHandle(set).c_str(), caller, |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1405 | report_data->FormatHandle(sampler_state->sampler()).c_str(), |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1406 | string_VkSamplerReductionMode(reduction_mode_info->reductionMode), |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1407 | report_data->FormatHandle(image_view_state->image_view()).c_str()); |
Chris Forbes | e92dd1d | 2019-01-21 15:58:57 -0800 | [diff] [blame] | 1408 | } |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1409 | |
| 1410 | if (!image_view_state->filter_cubic_props.filterCubic) { |
| 1411 | auto set = descriptor_set->GetSet(); |
| 1412 | LogObjectList objlist(set); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1413 | objlist.add(sampler_state->sampler()); |
| 1414 | objlist.add(image_view_state->image_view()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1415 | return LogError(objlist, vuids.filter_cubic, |
| 1416 | "Descriptor set %s encountered the following validation error at %s time: " |
| 1417 | "Sampler (%s) is set to use VK_FILTER_CUBIC_EXT, " |
| 1418 | "but image view (%s) doesn't support filterCubic.", |
| 1419 | report_data->FormatHandle(set).c_str(), caller, |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1420 | report_data->FormatHandle(sampler_state->sampler()).c_str(), |
| 1421 | report_data->FormatHandle(image_view_state->image_view()).c_str()); |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 1422 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1423 | } |
locke-lunarg | 4e1e463 | 2020-10-26 01:52:19 -0600 | [diff] [blame] | 1424 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1425 | if (IsExtEnabled(device_extensions.vk_img_filter_cubic)) { |
Clemens Kern | 5a42ea6 | 2021-09-29 16:30:23 +0200 | [diff] [blame] | 1426 | if (image_view_state->create_info.viewType == VK_IMAGE_VIEW_TYPE_3D || |
| 1427 | image_view_state->create_info.viewType == VK_IMAGE_VIEW_TYPE_CUBE || |
| 1428 | image_view_state->create_info.viewType == VK_IMAGE_VIEW_TYPE_CUBE_ARRAY) { |
Tony-LunarG | ace473a | 2020-05-06 12:48:04 -0600 | [diff] [blame] | 1429 | auto set = descriptor_set->GetSet(); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1430 | LogObjectList objlist(set); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1431 | objlist.add(sampler_state->sampler()); |
| 1432 | objlist.add(image_view_state->image_view()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1433 | return LogError(objlist, vuids.img_filter_cubic, |
| 1434 | "Descriptor set %s encountered the following validation error at %s time: Sampler " |
| 1435 | "(%s)is set to use VK_FILTER_CUBIC_EXT while the VK_IMG_filter_cubic extension " |
| 1436 | "is enabled, but image view (%s) has an invalid imageViewType (%s).", |
| 1437 | report_data->FormatHandle(set).c_str(), caller, |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1438 | report_data->FormatHandle(sampler_state->sampler()).c_str(), |
| 1439 | report_data->FormatHandle(image_view_state->image_view()).c_str(), |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1440 | string_VkImageViewType(image_view_state->create_info.viewType)); |
Tobin Ehlis | b1a2e4b | 2018-03-16 07:54:24 -0600 | [diff] [blame] | 1441 | } |
| 1442 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1443 | } |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1444 | |
| 1445 | if ((image_state->createInfo.flags & VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV) && |
| 1446 | (sampler_state->createInfo.addressModeU != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE || |
| 1447 | sampler_state->createInfo.addressModeV != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE || |
| 1448 | sampler_state->createInfo.addressModeW != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE)) { |
| 1449 | std::string address_mode_letter = |
| 1450 | (sampler_state->createInfo.addressModeU != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE) |
| 1451 | ? "U" |
| 1452 | : (sampler_state->createInfo.addressModeV != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE) ? "V" : "W"; |
| 1453 | VkSamplerAddressMode address_mode = |
| 1454 | (sampler_state->createInfo.addressModeU != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE) |
| 1455 | ? sampler_state->createInfo.addressModeU |
| 1456 | : (sampler_state->createInfo.addressModeV != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE) |
| 1457 | ? sampler_state->createInfo.addressModeV |
| 1458 | : sampler_state->createInfo.addressModeW; |
| 1459 | auto set = descriptor_set->GetSet(); |
| 1460 | LogObjectList objlist(set); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1461 | objlist.add(sampler_state->sampler()); |
| 1462 | objlist.add(image_state->image()); |
| 1463 | objlist.add(image_view_state->image_view()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1464 | return LogError(objlist, vuids.corner_sampled_address_mode, |
| 1465 | "Descriptor set %s encountered the following validation error at %s time: Image " |
| 1466 | "(%s) in image view (%s) is created with flag " |
| 1467 | "VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV and can only be sampled using " |
| 1468 | "VK_SAMPLER_ADDRESS_MODE_CLAMP_EDGE, but sampler (%s) has " |
| 1469 | "createInfo.addressMode%s set to %s.", |
| 1470 | report_data->FormatHandle(set).c_str(), caller, |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1471 | report_data->FormatHandle(image_state->image()).c_str(), |
| 1472 | report_data->FormatHandle(image_view_state->image_view()).c_str(), |
| 1473 | report_data->FormatHandle(sampler_state->sampler()).c_str(), address_mode_letter.c_str(), |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1474 | string_VkSamplerAddressMode(address_mode)); |
| 1475 | } |
| 1476 | |
| 1477 | // UnnormalizedCoordinates sampler validations |
| 1478 | if (sampler_state->createInfo.unnormalizedCoordinates) { |
| 1479 | // If ImageView is used by a unnormalizedCoordinates sampler, it needs to check ImageView type |
| 1480 | if (image_view_ci.viewType == VK_IMAGE_VIEW_TYPE_3D || image_view_ci.viewType == VK_IMAGE_VIEW_TYPE_CUBE || |
| 1481 | image_view_ci.viewType == VK_IMAGE_VIEW_TYPE_1D_ARRAY || |
| 1482 | image_view_ci.viewType == VK_IMAGE_VIEW_TYPE_2D_ARRAY || |
| 1483 | image_view_ci.viewType == VK_IMAGE_VIEW_TYPE_CUBE_ARRAY) { |
| 1484 | auto set = descriptor_set->GetSet(); |
| 1485 | LogObjectList objlist(set); |
| 1486 | objlist.add(image_view); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1487 | objlist.add(sampler_state->sampler()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1488 | return LogError(objlist, vuids.sampler_imageview_type, |
| 1489 | "Descriptor set %s encountered the following validation error at %s time: %s, type: %s in " |
| 1490 | "Descriptor in binding #%" PRIu32 " index %" PRIu32 "is used by %s.", |
| 1491 | report_data->FormatHandle(set).c_str(), caller, report_data->FormatHandle(image_view).c_str(), |
| 1492 | string_VkImageViewType(image_view_ci.viewType), binding, index, |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1493 | report_data->FormatHandle(sampler_state->sampler()).c_str()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1494 | } |
| 1495 | |
| 1496 | // sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* |
| 1497 | // instructions with ImplicitLod, Dref or Proj in their name |
| 1498 | if (reqs & DESCRIPTOR_REQ_SAMPLER_IMPLICITLOD_DREF_PROJ) { |
| 1499 | auto set = descriptor_set->GetSet(); |
| 1500 | LogObjectList objlist(set); |
| 1501 | objlist.add(image_view); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1502 | objlist.add(sampler_state->sampler()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1503 | return LogError(objlist, vuids.sampler_implicitLod_dref_proj, |
| 1504 | "Descriptor set %s encountered the following validation error at %s time: %s in " |
| 1505 | "Descriptor in binding #%" PRIu32 " index %" PRIu32 |
| 1506 | " is used by %s that uses invalid operator.", |
| 1507 | report_data->FormatHandle(set).c_str(), caller, report_data->FormatHandle(image_view).c_str(), |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1508 | binding, index, report_data->FormatHandle(sampler_state->sampler()).c_str()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1509 | } |
| 1510 | |
| 1511 | // sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* |
| 1512 | // instructions that includes a LOD bias or any offset values |
| 1513 | if (reqs & DESCRIPTOR_REQ_SAMPLER_BIAS_OFFSET) { |
| 1514 | auto set = descriptor_set->GetSet(); |
| 1515 | LogObjectList objlist(set); |
| 1516 | objlist.add(image_view); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1517 | objlist.add(sampler_state->sampler()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1518 | return LogError(objlist, vuids.sampler_bias_offset, |
| 1519 | "Descriptor set %s encountered the following validation error at %s time: %s in " |
| 1520 | "Descriptor in binding #%" PRIu32 " index %" PRIu32 |
| 1521 | " is used by %s that uses invalid bias or offset operator.", |
| 1522 | report_data->FormatHandle(set).c_str(), caller, report_data->FormatHandle(image_view).c_str(), |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1523 | binding, index, report_data->FormatHandle(sampler_state->sampler()).c_str()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1524 | } |
| 1525 | } |
| 1526 | } |
| 1527 | } |
| 1528 | return false; |
| 1529 | } |
| 1530 | |
| 1531 | bool CoreChecks::ValidateTexelDescriptor(const char *caller, const DrawDispatchVuid &vuids, const CMD_BUFFER_STATE *cb_node, |
| 1532 | const cvdescriptorset::DescriptorSet *descriptor_set, |
| 1533 | const cvdescriptorset::TexelDescriptor &texel_descriptor, |
| 1534 | const std::pair<const uint32_t, DescriptorRequirement> &binding_info, |
| 1535 | uint32_t index) const { |
| 1536 | auto buffer_view = texel_descriptor.GetBufferView(); |
| 1537 | auto buffer_view_state = texel_descriptor.GetBufferViewState(); |
| 1538 | const auto binding = binding_info.first; |
| 1539 | const auto reqs = binding_info.second.reqs; |
| 1540 | if ((!buffer_view_state && !enabled_features.robustness2_features.nullDescriptor) || |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1541 | (buffer_view_state && buffer_view_state->Destroyed())) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1542 | auto set = descriptor_set->GetSet(); |
| 1543 | return LogError(set, vuids.descriptor_valid, |
| 1544 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1545 | "binding #%" PRIu32 " index %" PRIu32 " is using bufferView %s that is invalid or has been destroyed.", |
| 1546 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1547 | report_data->FormatHandle(buffer_view).c_str()); |
| 1548 | } |
| 1549 | if (buffer_view) { |
| 1550 | auto buffer = buffer_view_state->create_info.buffer; |
| 1551 | auto buffer_state = buffer_view_state->buffer_state.get(); |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1552 | if (buffer_state->Destroyed()) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1553 | auto set = descriptor_set->GetSet(); |
| 1554 | return LogError(set, vuids.descriptor_valid, |
| 1555 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1556 | "binding #%" PRIu32 " index %" PRIu32 " is using buffer %s that has been destroyed.", |
| 1557 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1558 | report_data->FormatHandle(buffer).c_str()); |
| 1559 | } |
| 1560 | auto format_bits = DescriptorRequirementsBitsFromFormat(buffer_view_state->create_info.format); |
| 1561 | |
| 1562 | if (!(reqs & format_bits)) { |
| 1563 | // bad component type |
| 1564 | auto set = descriptor_set->GetSet(); |
| 1565 | return LogError(set, vuids.descriptor_valid, |
| 1566 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1567 | "binding #%" PRIu32 " index %" PRIu32 " requires %s component type, but bound descriptor format is %s.", |
| 1568 | report_data->FormatHandle(set).c_str(), caller, binding, index, StringDescriptorReqComponentType(reqs), |
| 1569 | string_VkFormat(buffer_view_state->create_info.format)); |
| 1570 | } |
| 1571 | |
| 1572 | // Verify VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT |
| 1573 | if ((reqs & DESCRIPTOR_REQ_VIEW_ATOMIC_OPERATION) && |
| 1574 | (descriptor_set->GetTypeFromBinding(binding) == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER) && |
| 1575 | !(buffer_view_state->format_features & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT)) { |
| 1576 | auto set = descriptor_set->GetSet(); |
| 1577 | LogObjectList objlist(set); |
| 1578 | objlist.add(buffer_view); |
| 1579 | return LogError(objlist, "UNASSIGNED-None-MismatchAtomicBufferFeature", |
| 1580 | "Descriptor set %s encountered the following validation error at %s time: Descriptor " |
| 1581 | "in binding #%" PRIu32 " index %" PRIu32 |
| 1582 | ", %s, format %s, doesn't " |
| 1583 | "contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT.", |
| 1584 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1585 | report_data->FormatHandle(buffer_view).c_str(), string_VkFormat(buffer_view_state->create_info.format)); |
| 1586 | } |
| 1587 | if (enabled_features.core11.protectedMemory == VK_TRUE) { |
| 1588 | if (ValidateProtectedBuffer(cb_node, buffer_view_state->buffer_state.get(), caller, vuids.unprotected_command_buffer, |
| 1589 | "Buffer is in a descriptorSet")) { |
| 1590 | return true; |
| 1591 | } |
| 1592 | if (binding_info.second.is_writable && |
| 1593 | ValidateUnprotectedBuffer(cb_node, buffer_view_state->buffer_state.get(), caller, vuids.protected_command_buffer, |
| 1594 | "Buffer is in a descriptorSet")) { |
| 1595 | return true; |
| 1596 | } |
| 1597 | } |
| 1598 | } |
| 1599 | return false; |
| 1600 | } |
| 1601 | |
| 1602 | bool CoreChecks::ValidateAccelerationDescriptor(const char *caller, const DrawDispatchVuid &vuids, const CMD_BUFFER_STATE *cb_node, |
| 1603 | const cvdescriptorset::DescriptorSet *descriptor_set, |
| 1604 | const cvdescriptorset::AccelerationStructureDescriptor &descriptor, |
| 1605 | const std::pair<const uint32_t, DescriptorRequirement> &binding_info, |
| 1606 | uint32_t index) const { |
| 1607 | // Verify that acceleration structures are valid |
| 1608 | const auto binding = binding_info.first; |
| 1609 | if (descriptor.is_khr()) { |
| 1610 | auto acc = descriptor.GetAccelerationStructure(); |
| 1611 | auto acc_node = descriptor.GetAccelerationStructureStateKHR(); |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1612 | if (!acc_node || acc_node->Destroyed()) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1613 | if (acc != VK_NULL_HANDLE || !enabled_features.robustness2_features.nullDescriptor) { |
| 1614 | auto set = descriptor_set->GetSet(); |
| 1615 | return LogError(set, vuids.descriptor_valid, |
| 1616 | "Descriptor set %s encountered the following validation error at %s time: " |
| 1617 | "Descriptor in binding #%" PRIu32 " index %" PRIu32 |
| 1618 | " is using acceleration structure %s that is invalid or has been destroyed.", |
| 1619 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1620 | report_data->FormatHandle(acc).c_str()); |
| 1621 | } |
| 1622 | } else { |
Jeremy Gebben | 6fbf824 | 2021-06-21 09:14:46 -0600 | [diff] [blame] | 1623 | for (const auto &item: acc_node->GetBoundMemory()) { |
| 1624 | auto &mem_binding = item.second; |
| 1625 | if (mem_binding.mem_state->Destroyed()) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1626 | auto set = descriptor_set->GetSet(); |
| 1627 | return LogError(set, vuids.descriptor_valid, |
| 1628 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1629 | "binding #%" PRIu32 " index %" PRIu32 |
| 1630 | " is using acceleration structure %s that references invalid memory %s.", |
| 1631 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
Jeremy Gebben | 6fbf824 | 2021-06-21 09:14:46 -0600 | [diff] [blame] | 1632 | report_data->FormatHandle(acc).c_str(), |
| 1633 | report_data->FormatHandle(mem_binding.mem_state->mem()).c_str()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1634 | } |
| 1635 | } |
| 1636 | } |
| 1637 | } else { |
| 1638 | auto acc = descriptor.GetAccelerationStructureNV(); |
| 1639 | auto acc_node = descriptor.GetAccelerationStructureStateNV(); |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1640 | if (!acc_node || acc_node->Destroyed()) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1641 | if (acc != VK_NULL_HANDLE || !enabled_features.robustness2_features.nullDescriptor) { |
| 1642 | auto set = descriptor_set->GetSet(); |
| 1643 | return LogError(set, vuids.descriptor_valid, |
| 1644 | "Descriptor set %s encountered the following validation error at %s time: " |
| 1645 | "Descriptor in binding #%" PRIu32 " index %" PRIu32 |
| 1646 | " is using acceleration structure %s that is invalid or has been destroyed.", |
| 1647 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1648 | report_data->FormatHandle(acc).c_str()); |
| 1649 | } |
| 1650 | } else { |
Jeremy Gebben | 6fbf824 | 2021-06-21 09:14:46 -0600 | [diff] [blame] | 1651 | for (const auto &item : acc_node->GetBoundMemory()) { |
| 1652 | auto &mem_binding = item.second; |
| 1653 | if (mem_binding.mem_state->Destroyed()) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1654 | auto set = descriptor_set->GetSet(); |
| 1655 | return LogError(set, vuids.descriptor_valid, |
| 1656 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1657 | "binding #%" PRIu32 " index %" PRIu32 |
| 1658 | " is using acceleration structure %s that references invalid memory %s.", |
| 1659 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
Jeremy Gebben | 6fbf824 | 2021-06-21 09:14:46 -0600 | [diff] [blame] | 1660 | report_data->FormatHandle(acc).c_str(), |
| 1661 | report_data->FormatHandle(mem_binding.mem_state->mem()).c_str()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1662 | } |
| 1663 | } |
| 1664 | } |
| 1665 | } |
| 1666 | return false; |
| 1667 | } |
| 1668 | |
| 1669 | // If the validation is related to both of image and sampler, |
| 1670 | // please leave it in (descriptor_class == DescriptorClass::ImageSampler || descriptor_class == |
| 1671 | // DescriptorClass::Image) Here is to validate for only sampler. |
| 1672 | bool CoreChecks::ValidateSamplerDescriptor(const char *caller, const DrawDispatchVuid &vuids, const CMD_BUFFER_STATE *cb_node, |
| 1673 | const cvdescriptorset::DescriptorSet *descriptor_set, |
| 1674 | const std::pair<const uint32_t, DescriptorRequirement> &binding_info, uint32_t index, |
| 1675 | VkSampler sampler, bool is_immutable, const SAMPLER_STATE *sampler_state) const { |
| 1676 | // Verify Sampler still valid |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1677 | if (!sampler_state || sampler_state->Destroyed()) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1678 | auto set = descriptor_set->GetSet(); |
| 1679 | return LogError(set, vuids.descriptor_valid, |
| 1680 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1681 | "binding #%" PRIu32 " index %" PRIu32 " is using sampler %s that is invalid or has been destroyed.", |
| 1682 | report_data->FormatHandle(set).c_str(), caller, binding_info.first, index, |
| 1683 | report_data->FormatHandle(sampler).c_str()); |
| 1684 | } else { |
| 1685 | if (sampler_state->samplerConversion && !is_immutable) { |
| 1686 | auto set = descriptor_set->GetSet(); |
| 1687 | return LogError(set, vuids.descriptor_valid, |
| 1688 | "Descriptor set %s encountered the following validation error at %s time: sampler (%s) " |
| 1689 | "in the descriptor set (%s) contains a YCBCR conversion (%s), then the sampler MUST " |
| 1690 | "also exist as an immutable sampler.", |
| 1691 | report_data->FormatHandle(set).c_str(), caller, report_data->FormatHandle(sampler).c_str(), |
| 1692 | report_data->FormatHandle(descriptor_set->GetSet()).c_str(), |
| 1693 | report_data->FormatHandle(sampler_state->samplerConversion).c_str()); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1694 | } |
| 1695 | } |
Tony-LunarG | ace473a | 2020-05-06 12:48:04 -0600 | [diff] [blame] | 1696 | return false; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1697 | } |
Chris Forbes | 5798913 | 2016-07-26 17:06:10 +1200 | [diff] [blame] | 1698 | |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 1699 | // Loop through the write updates to do for a push descriptor set, ignoring dstSet |
Jeff Bolz | 41a1ced | 2019-10-11 11:40:49 -0500 | [diff] [blame] | 1700 | void cvdescriptorset::DescriptorSet::PerformPushDescriptorsUpdate(ValidationStateTracker *dev_data, uint32_t write_count, |
| 1701 | const VkWriteDescriptorSet *p_wds) { |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 1702 | assert(IsPushDescriptor()); |
| 1703 | for (uint32_t i = 0; i < write_count; i++) { |
Jeff Bolz | 41a1ced | 2019-10-11 11:40:49 -0500 | [diff] [blame] | 1704 | PerformWriteUpdate(dev_data, &p_wds[i]); |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 1705 | } |
Jason Macnak | 83cfd58 | 2019-07-31 10:14:24 -0700 | [diff] [blame] | 1706 | |
| 1707 | push_descriptor_set_writes.clear(); |
| 1708 | push_descriptor_set_writes.reserve(static_cast<std::size_t>(write_count)); |
| 1709 | for (uint32_t i = 0; i < write_count; i++) { |
| 1710 | push_descriptor_set_writes.push_back(safe_VkWriteDescriptorSet(&p_wds[i])); |
| 1711 | } |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 1712 | } |
| 1713 | |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1714 | // Perform write update in given update struct |
Jeff Bolz | 41a1ced | 2019-10-11 11:40:49 -0500 | [diff] [blame] | 1715 | void cvdescriptorset::DescriptorSet::PerformWriteUpdate(ValidationStateTracker *dev_data, const VkWriteDescriptorSet *update) { |
Tobin Ehlis | f922ef8 | 2016-11-30 10:19:14 -0700 | [diff] [blame] | 1716 | // Perform update on a per-binding basis as consecutive updates roll over to next binding |
| 1717 | auto descriptors_remaining = update->descriptorCount; |
Tobin Ehlis | f922ef8 | 2016-11-30 10:19:14 -0700 | [diff] [blame] | 1718 | auto offset = update->dstArrayElement; |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 1719 | auto orig_binding = DescriptorSetLayout::ConstBindingIterator(layout_.get(), update->dstBinding); |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 1720 | auto current_binding = orig_binding; |
| 1721 | |
Tobin Ehlis | e16805c | 2017-08-09 09:10:37 -0600 | [diff] [blame] | 1722 | uint32_t update_index = 0; |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 1723 | // Verify next consecutive binding matches type, stage flags & immutable sampler use and if AtEnd |
| 1724 | while (descriptors_remaining && orig_binding.IsConsistent(current_binding)) { |
| 1725 | const auto &index_range = current_binding.GetGlobalIndexRange(); |
| 1726 | auto global_idx = index_range.start + offset; |
| 1727 | // global_idx is which descriptor is needed to update. If global_idx > index_range.end, it means the descriptor isn't in |
| 1728 | // this binding, maybe in next binding. |
| 1729 | if (global_idx >= index_range.end) { |
| 1730 | offset -= current_binding.GetDescriptorCount(); |
| 1731 | ++current_binding; |
| 1732 | continue; |
| 1733 | } |
| 1734 | |
Tobin Ehlis | f922ef8 | 2016-11-30 10:19:14 -0700 | [diff] [blame] | 1735 | // Loop over the updates for a single binding at a time |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 1736 | uint32_t update_count = std::min(descriptors_remaining, current_binding.GetDescriptorCount() - offset); |
Tobin Ehlis | e16805c | 2017-08-09 09:10:37 -0600 | [diff] [blame] | 1737 | for (uint32_t di = 0; di < update_count; ++di, ++update_index) { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 1738 | descriptors_[global_idx + di]->WriteUpdate(this, state_data_, update, update_index); |
ziga-lunarg | e5d2854 | 2021-10-24 21:14:25 +0200 | [diff] [blame] | 1739 | descriptors_[global_idx + di]->SetDescriptorType(update->descriptorType); |
Tobin Ehlis | f922ef8 | 2016-11-30 10:19:14 -0700 | [diff] [blame] | 1740 | } |
| 1741 | // Roll over to next binding in case of consecutive update |
| 1742 | descriptors_remaining -= update_count; |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 1743 | if (descriptors_remaining) { |
| 1744 | // Starting offset is beyond the current binding. Check consistency, update counters and advance to the next binding, |
| 1745 | // looking for the start point. All bindings (even those skipped) must be consistent with the update and with the |
| 1746 | // original binding. |
| 1747 | offset = 0; |
| 1748 | ++current_binding; |
| 1749 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1750 | } |
Jeff Bolz | dd4cfa1 | 2019-08-11 20:57:51 -0500 | [diff] [blame] | 1751 | if (update->descriptorCount) { |
| 1752 | some_update_ = true; |
| 1753 | change_count_++; |
| 1754 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 1755 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 1756 | if (!IsPushDescriptor() && !(layout_->GetDescriptorBindingFlagsFromBinding(update->dstBinding) & |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1757 | (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT))) { |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1758 | Invalidate(false); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1759 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1760 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1761 | // Validate Copy update |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1762 | bool CoreChecks::ValidateCopyUpdate(const VkCopyDescriptorSet *update, const DescriptorSet *dst_set, const DescriptorSet *src_set, |
Jeff Bolz | 46c0ea0 | 2019-10-09 13:06:29 -0500 | [diff] [blame] | 1763 | const char *func_name, std::string *error_code, std::string *error_msg) const { |
Jeff Bolz | 6aad174 | 2019-10-16 11:10:09 -0500 | [diff] [blame] | 1764 | auto dst_layout = dst_set->GetLayout().get(); |
| 1765 | auto src_layout = src_set->GetLayout().get(); |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1766 | |
John Zulauf | 5dfd45c | 2018-01-17 11:06:34 -0700 | [diff] [blame] | 1767 | // Verify dst layout still valid |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1768 | if (dst_layout->Destroyed()) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1769 | *error_code = "VUID-VkCopyDescriptorSet-dstSet-parameter"; |
Mark Lobodzinski | 23e395e | 2020-04-09 10:17:31 -0600 | [diff] [blame] | 1770 | std::ostringstream str; |
| 1771 | str << "Cannot call " << func_name << " to perform copy update on dstSet " << report_data->FormatHandle(dst_set->GetSet()) |
| 1772 | << " created with destroyed " << report_data->FormatHandle(dst_layout->GetDescriptorSetLayout()) << "."; |
| 1773 | *error_msg = str.str(); |
John Zulauf | 5dfd45c | 2018-01-17 11:06:34 -0700 | [diff] [blame] | 1774 | return false; |
| 1775 | } |
| 1776 | |
| 1777 | // Verify src layout still valid |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1778 | if (src_layout->Destroyed()) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1779 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-parameter"; |
Mark Lobodzinski | 23e395e | 2020-04-09 10:17:31 -0600 | [diff] [blame] | 1780 | std::ostringstream str; |
| 1781 | str << "Cannot call " << func_name << " to perform copy update on dstSet " << report_data->FormatHandle(dst_set->GetSet()) |
| 1782 | << " from srcSet " << report_data->FormatHandle(src_set->GetSet()) << " created with destroyed " |
| 1783 | << report_data->FormatHandle(src_layout->GetDescriptorSetLayout()) << "."; |
| 1784 | *error_msg = str.str(); |
John Zulauf | 5dfd45c | 2018-01-17 11:06:34 -0700 | [diff] [blame] | 1785 | return false; |
| 1786 | } |
| 1787 | |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1788 | if (!dst_layout->HasBinding(update->dstBinding)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1789 | *error_code = "VUID-VkCopyDescriptorSet-dstBinding-00347"; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1790 | std::stringstream error_str; |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1791 | error_str << "DescriptorSet " << report_data->FormatHandle(dst_set->GetSet()) |
| 1792 | << " does not have copy update dest binding of " << update->dstBinding; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1793 | *error_msg = error_str.str(); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1794 | return false; |
| 1795 | } |
| 1796 | if (!src_set->HasBinding(update->srcBinding)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1797 | *error_code = "VUID-VkCopyDescriptorSet-srcBinding-00345"; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1798 | std::stringstream error_str; |
sourav parmar | f4a7825 | 2020-04-10 13:04:21 -0700 | [diff] [blame] | 1799 | error_str << "DescriptorSet " << report_data->FormatHandle(src_set->GetSet()) |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1800 | << " does not have copy update src binding of " << update->srcBinding; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1801 | *error_msg = error_str.str(); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1802 | return false; |
| 1803 | } |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1804 | // Verify idle ds |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1805 | if (dst_set->InUse() && |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1806 | !(dst_layout->GetDescriptorBindingFlagsFromBinding(update->dstBinding) & |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1807 | (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT))) { |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1808 | // TODO : Re-using Free Idle error code, need copy update idle error code |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1809 | *error_code = "VUID-vkFreeDescriptorSets-pDescriptorSets-00309"; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1810 | std::stringstream error_str; |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1811 | error_str << "Cannot call " << func_name << " to perform copy update on descriptor set " |
| 1812 | << report_data->FormatHandle(dst_set->GetSet()) << " that is in use by a command buffer"; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1813 | *error_msg = error_str.str(); |
| 1814 | return false; |
| 1815 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1816 | // src & dst set bindings are valid |
| 1817 | // Check bounds of src & dst |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 1818 | auto src_start_idx = src_set->GetGlobalIndexRangeFromBinding(update->srcBinding).start + update->srcArrayElement; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1819 | if ((src_start_idx + update->descriptorCount) > src_set->GetTotalDescriptorCount()) { |
| 1820 | // SRC update out of bounds |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1821 | *error_code = "VUID-VkCopyDescriptorSet-srcArrayElement-00346"; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1822 | std::stringstream error_str; |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1823 | error_str << "Attempting copy update from descriptorSet " << report_data->FormatHandle(update->srcSet) << " binding#" |
| 1824 | << update->srcBinding << " with offset index of " |
| 1825 | << src_set->GetGlobalIndexRangeFromBinding(update->srcBinding).start << " plus update array offset of " |
| 1826 | << update->srcArrayElement << " and update of " << update->descriptorCount |
Tobin Ehlis | 1d81edd | 2016-11-21 09:50:49 -0700 | [diff] [blame] | 1827 | << " descriptors oversteps total number of descriptors in set: " << src_set->GetTotalDescriptorCount(); |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1828 | *error_msg = error_str.str(); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1829 | return false; |
| 1830 | } |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1831 | auto dst_start_idx = dst_layout->GetGlobalIndexRangeFromBinding(update->dstBinding).start + update->dstArrayElement; |
| 1832 | if ((dst_start_idx + update->descriptorCount) > dst_layout->GetTotalDescriptorCount()) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1833 | // DST update out of bounds |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1834 | *error_code = "VUID-VkCopyDescriptorSet-dstArrayElement-00348"; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1835 | std::stringstream error_str; |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1836 | error_str << "Attempting copy update to descriptorSet " << report_data->FormatHandle(dst_set->GetSet()) << " binding#" |
| 1837 | << update->dstBinding << " with offset index of " |
| 1838 | << dst_layout->GetGlobalIndexRangeFromBinding(update->dstBinding).start << " plus update array offset of " |
| 1839 | << update->dstArrayElement << " and update of " << update->descriptorCount |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1840 | << " descriptors oversteps total number of descriptors in set: " << dst_layout->GetTotalDescriptorCount(); |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1841 | *error_msg = error_str.str(); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1842 | return false; |
| 1843 | } |
| 1844 | // Check that types match |
Shannon McPherson | afe5512 | 2020-05-25 16:20:19 -0600 | [diff] [blame] | 1845 | // TODO : Base default error case going from here is "VUID-VkAcquireNextImageInfoKHR-semaphore-parameter" 2ba which covers all |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 1846 | // consistency issues, need more fine-grained error codes |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1847 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-00349"; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1848 | auto src_type = src_set->GetTypeFromBinding(update->srcBinding); |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1849 | auto dst_type = dst_layout->GetTypeFromBinding(update->dstBinding); |
Ricardo Garcia | 14f4f76 | 2021-04-13 11:36:12 +0200 | [diff] [blame] | 1850 | if (src_type != VK_DESCRIPTOR_TYPE_MUTABLE_VALVE && dst_type != VK_DESCRIPTOR_TYPE_MUTABLE_VALVE && src_type != dst_type) { |
sourav parmar | f4a7825 | 2020-04-10 13:04:21 -0700 | [diff] [blame] | 1851 | *error_code = "VUID-VkCopyDescriptorSet-dstBinding-02632"; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1852 | std::stringstream error_str; |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1853 | error_str << "Attempting copy update to descriptorSet " << report_data->FormatHandle(dst_set->GetSet()) << " binding #" |
| 1854 | << update->dstBinding << " with type " << string_VkDescriptorType(dst_type) << " from descriptorSet " |
| 1855 | << report_data->FormatHandle(src_set->GetSet()) << " binding #" << update->srcBinding << " with type " |
| 1856 | << string_VkDescriptorType(src_type) << ". Types do not match"; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1857 | *error_msg = error_str.str(); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1858 | return false; |
| 1859 | } |
| 1860 | // Verify consistency of src & dst bindings if update crosses binding boundaries |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1861 | if ((!VerifyUpdateConsistency(report_data, DescriptorSetLayout::ConstBindingIterator(src_layout, update->srcBinding), |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 1862 | update->srcArrayElement, update->descriptorCount, "copy update from", src_set->GetSet(), |
| 1863 | error_msg)) || |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1864 | (!VerifyUpdateConsistency(report_data, DescriptorSetLayout::ConstBindingIterator(dst_layout, update->dstBinding), |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1865 | update->dstArrayElement, update->descriptorCount, "copy update to", dst_set->GetSet(), |
| 1866 | error_msg))) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1867 | return false; |
| 1868 | } |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1869 | |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1870 | if ((src_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT) && |
| 1871 | !(dst_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1872 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-01918"; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1873 | std::stringstream error_str; |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1874 | error_str << "If pname:srcSet's (" << report_data->FormatHandle(update->srcSet) |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1875 | << ") layout was created with the " |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1876 | "ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT flag " |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1877 | "set, then pname:dstSet's (" |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1878 | << report_data->FormatHandle(update->dstSet) |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1879 | << ") layout must: also have been created with the " |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1880 | "ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT flag set"; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1881 | *error_msg = error_str.str(); |
| 1882 | return false; |
| 1883 | } |
| 1884 | |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 1885 | if (IsExtEnabled(device_extensions.vk_valve_mutable_descriptor_type)) { |
Mike Schuchardt | 294a159 | 2021-05-12 15:38:00 -0700 | [diff] [blame] | 1886 | if (!(src_layout->GetCreateFlags() & (VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT | |
| 1887 | VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE)) && |
| 1888 | (dst_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT)) { |
| 1889 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-04885"; |
| 1890 | std::stringstream error_str; |
| 1891 | error_str << "If pname:srcSet's (" << report_data->FormatHandle(update->srcSet) |
| 1892 | << ") layout was created with neither ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT nor " |
| 1893 | "ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE flags set, then pname:dstSet's (" |
| 1894 | << report_data->FormatHandle(update->dstSet) |
| 1895 | << ") layout must: have been created without the " |
| 1896 | "ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT flag set"; |
| 1897 | *error_msg = error_str.str(); |
| 1898 | return false; |
| 1899 | } |
| 1900 | } else { |
| 1901 | if (!(src_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT) && |
| 1902 | (dst_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT)) { |
| 1903 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-04886"; |
| 1904 | std::stringstream error_str; |
| 1905 | error_str << "If pname:srcSet's (" << report_data->FormatHandle(update->srcSet) |
| 1906 | << ") layout was created without the ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT flag " |
| 1907 | "set, then pname:dstSet's (" |
| 1908 | << report_data->FormatHandle(update->dstSet) |
| 1909 | << ") layout must: also have been created without the " |
| 1910 | "ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT flag set"; |
| 1911 | *error_msg = error_str.str(); |
| 1912 | return false; |
| 1913 | } |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1914 | } |
| 1915 | |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1916 | if ((src_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT) && |
| 1917 | !(dst_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1918 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-01920"; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1919 | std::stringstream error_str; |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1920 | error_str << "If the descriptor pool from which pname:srcSet (" << report_data->FormatHandle(update->srcSet) |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1921 | << ") was allocated was created " |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1922 | "with the ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag " |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1923 | "set, then the descriptor pool from which pname:dstSet (" |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1924 | << report_data->FormatHandle(update->dstSet) |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1925 | << ") was allocated must: " |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1926 | "also have been created with the ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag set"; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1927 | *error_msg = error_str.str(); |
| 1928 | return false; |
| 1929 | } |
| 1930 | |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 1931 | if (IsExtEnabled(device_extensions.vk_valve_mutable_descriptor_type)) { |
Mike Schuchardt | 294a159 | 2021-05-12 15:38:00 -0700 | [diff] [blame] | 1932 | if (!(src_set->GetPoolState()->createInfo.flags & |
| 1933 | (VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT | VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE)) && |
| 1934 | (dst_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT)) { |
| 1935 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-04887"; |
| 1936 | std::stringstream error_str; |
| 1937 | error_str << "If the descriptor pool from which pname:srcSet (" << report_data->FormatHandle(update->srcSet) |
| 1938 | << ") was allocated was created with neither ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT nor " |
| 1939 | "ename:VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE flags set, then the descriptor pool from which " |
| 1940 | "pname:dstSet (" |
| 1941 | << report_data->FormatHandle(update->dstSet) |
| 1942 | << ") was allocated must: have been created without the " |
| 1943 | "ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag set"; |
| 1944 | *error_msg = error_str.str(); |
| 1945 | return false; |
| 1946 | } |
| 1947 | } else { |
| 1948 | if (!(src_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT) && |
| 1949 | (dst_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT)) { |
| 1950 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-04888"; |
| 1951 | std::stringstream error_str; |
| 1952 | error_str << "If the descriptor pool from which pname:srcSet (" << report_data->FormatHandle(update->srcSet) |
| 1953 | << ") was allocated was created without the ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag set, " |
| 1954 | "then the descriptor pool from which pname:dstSet (" |
| 1955 | << report_data->FormatHandle(update->dstSet) |
| 1956 | << ") was allocated must: also have been created without the " |
| 1957 | "ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag set"; |
| 1958 | *error_msg = error_str.str(); |
| 1959 | return false; |
| 1960 | } |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1961 | } |
| 1962 | |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 1963 | if (src_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) { |
| 1964 | if ((update->srcArrayElement % 4) != 0) { |
| 1965 | *error_code = "VUID-VkCopyDescriptorSet-srcBinding-02223"; |
| 1966 | std::stringstream error_str; |
| 1967 | error_str << "Attempting copy update to VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT binding with " |
| 1968 | << "srcArrayElement " << update->srcArrayElement << " not a multiple of 4"; |
| 1969 | *error_msg = error_str.str(); |
| 1970 | return false; |
| 1971 | } |
| 1972 | if ((update->dstArrayElement % 4) != 0) { |
| 1973 | *error_code = "VUID-VkCopyDescriptorSet-dstBinding-02224"; |
| 1974 | std::stringstream error_str; |
| 1975 | error_str << "Attempting copy update to VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT binding with " |
| 1976 | << "dstArrayElement " << update->dstArrayElement << " not a multiple of 4"; |
| 1977 | *error_msg = error_str.str(); |
| 1978 | return false; |
| 1979 | } |
| 1980 | if ((update->descriptorCount % 4) != 0) { |
| 1981 | *error_code = "VUID-VkCopyDescriptorSet-srcBinding-02225"; |
| 1982 | std::stringstream error_str; |
| 1983 | error_str << "Attempting copy update to VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT binding with " |
| 1984 | << "descriptorCount " << update->descriptorCount << " not a multiple of 4"; |
| 1985 | *error_msg = error_str.str(); |
| 1986 | return false; |
| 1987 | } |
| 1988 | } |
| 1989 | |
ziga-lunarg | e5d2854 | 2021-10-24 21:14:25 +0200 | [diff] [blame] | 1990 | if (dst_type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) { |
| 1991 | if (src_type != VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) { |
| 1992 | if (!dst_layout->IsTypeMutable(src_type, update->dstBinding)) { |
| 1993 | *error_code = "VUID-VkCopyDescriptorSet-dstSet-04612"; |
| 1994 | std::stringstream error_str; |
| 1995 | error_str << "Attempting copy update with dstBinding descriptor type VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, but the new " |
| 1996 | "active descriptor type " |
| 1997 | << string_VkDescriptorType(src_type) |
| 1998 | << " is not in the corresponding pMutableDescriptorTypeLists list."; |
| 1999 | *error_msg = error_str.str(); |
| 2000 | return false; |
| 2001 | } |
| 2002 | } |
| 2003 | } else if (src_type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) { |
| 2004 | const auto *descriptor = src_set->GetDescriptorFromGlobalIndex(update->srcBinding); |
| 2005 | if (descriptor->active_descriptor_type != dst_type) { |
| 2006 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-04613"; |
| 2007 | std::stringstream error_str; |
| 2008 | error_str << "Attempting copy update with srcBinding descriptor type VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, but the " |
| 2009 | "active descriptor type (" |
| 2010 | << string_VkDescriptorType(descriptor->active_descriptor_type) |
| 2011 | << ") does not match the dstBinding descriptor type " << string_VkDescriptorType(dst_type) << "."; |
| 2012 | *error_msg = error_str.str(); |
| 2013 | return false; |
| 2014 | } |
| 2015 | } |
| 2016 | |
| 2017 | if (dst_type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) { |
| 2018 | if (src_type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) { |
| 2019 | const auto &mutable_src_types = src_layout->GetMutableTypes(update->srcBinding); |
| 2020 | const auto &mutable_dst_types = dst_layout->GetMutableTypes(update->dstBinding); |
| 2021 | bool complete_match = mutable_src_types.size() == mutable_dst_types.size(); |
| 2022 | if (complete_match) { |
| 2023 | for (const auto mutable_src_type : mutable_src_types) { |
| 2024 | if (std::find(mutable_dst_types.begin(), mutable_dst_types.end(), mutable_src_type) == |
| 2025 | mutable_dst_types.end()) { |
| 2026 | complete_match = false; |
| 2027 | break; |
| 2028 | } |
| 2029 | } |
| 2030 | } |
| 2031 | if (!complete_match) { |
| 2032 | *error_code = "VUID-VkCopyDescriptorSet-dstSet-04614"; |
| 2033 | std::stringstream error_str; |
| 2034 | error_str << "Attempting copy update with dstBinding and new active descriptor type being " |
| 2035 | "VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, but their corresponding pMutableDescriptorTypeLists do not match."; |
| 2036 | *error_msg = error_str.str(); |
| 2037 | return false; |
| 2038 | } |
| 2039 | } |
| 2040 | } |
| 2041 | |
ziga-lunarg | 5b2bede | 2021-11-08 11:54:17 +0100 | [diff] [blame] | 2042 | // Update mutable types |
| 2043 | if (src_type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) { |
| 2044 | src_type = src_set->GetDescriptorFromGlobalIndex(update->srcBinding)->active_descriptor_type; |
| 2045 | } |
| 2046 | if (dst_type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) { |
| 2047 | dst_type = dst_set->GetDescriptorFromGlobalIndex(update->dstBinding)->active_descriptor_type; |
| 2048 | } |
| 2049 | |
Tobin Ehlis | d41e7b6 | 2016-05-19 07:56:18 -0600 | [diff] [blame] | 2050 | // Update parameters all look good and descriptor updated so verify update contents |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 2051 | if (!VerifyCopyUpdateContents(update, src_set, src_type, src_start_idx, dst_set, dst_type, dst_start_idx, func_name, error_code, |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2052 | error_msg)) { |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 2053 | return false; |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2054 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2055 | |
| 2056 | // All checks passed so update is good |
| 2057 | return true; |
| 2058 | } |
| 2059 | // Perform Copy update |
Jeff Bolz | 41a1ced | 2019-10-11 11:40:49 -0500 | [diff] [blame] | 2060 | void cvdescriptorset::DescriptorSet::PerformCopyUpdate(ValidationStateTracker *dev_data, const VkCopyDescriptorSet *update, |
| 2061 | const DescriptorSet *src_set) { |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 2062 | auto src_start_idx = src_set->GetGlobalIndexRangeFromBinding(update->srcBinding).start + update->srcArrayElement; |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2063 | auto dst_start_idx = layout_->GetGlobalIndexRangeFromBinding(update->dstBinding).start + update->dstArrayElement; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2064 | // Update parameters all look good so perform update |
| 2065 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 2066 | auto src = src_set->descriptors_[src_start_idx + di].get(); |
| 2067 | auto dst = descriptors_[dst_start_idx + di].get(); |
| 2068 | if (src->updated) { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2069 | dst->CopyUpdate(this, state_data_, src); |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 2070 | some_update_ = true; |
Jeff Bolz | dd4cfa1 | 2019-08-11 20:57:51 -0500 | [diff] [blame] | 2071 | change_count_++; |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 2072 | } else { |
| 2073 | dst->updated = false; |
| 2074 | } |
ziga-lunarg | e5d2854 | 2021-10-24 21:14:25 +0200 | [diff] [blame] | 2075 | dst->active_descriptor_type = src->active_descriptor_type; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2076 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2077 | |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2078 | if (!(layout_->GetDescriptorBindingFlagsFromBinding(update->dstBinding) & |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 2079 | (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT))) { |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 2080 | Invalidate(false); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 2081 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2082 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2083 | |
John Zulauf | 6f3d2bd | 2018-10-29 17:08:42 -0600 | [diff] [blame] | 2084 | // Update the drawing state for the affected descriptors. |
| 2085 | // Set cb_node to this set and this set to cb_node. |
| 2086 | // Add the bindings of the descriptor |
| 2087 | // Set the layout based on the current descriptor layout (will mask subsequent layer mismatch errors) |
| 2088 | // TODO: Modify the UpdateDrawState virtural functions to *only* set initial layout and not change layouts |
Tobin Ehlis | f951910 | 2016-08-17 09:49:13 -0600 | [diff] [blame] | 2089 | // Prereq: This should be called for a set that has been confirmed to be active for the given cb_node, meaning it's going |
| 2090 | // to be used in a draw by the given cb_node |
Jeremy Kniager | e682743 | 2020-04-01 09:05:56 -0600 | [diff] [blame] | 2091 | void cvdescriptorset::DescriptorSet::UpdateDrawState(ValidationStateTracker *device_data, CMD_BUFFER_STATE *cb_node, |
| 2092 | CMD_TYPE cmd_type, const PIPELINE_STATE *pipe, |
sfricke-samsung | 85584a7 | 2021-09-30 21:43:38 -0700 | [diff] [blame] | 2093 | const BindingReqMap &binding_req_map) { |
Tony-LunarG | 7782280 | 2020-05-28 16:35:46 -0600 | [diff] [blame] | 2094 | if (!device_data->disabled[command_buffer_state] && !IsPushDescriptor()) { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2095 | cb_node->AddChild(this); |
Jeff Bolz | afa429a | 2019-08-14 09:59:22 -0500 | [diff] [blame] | 2096 | } |
Jeff Bolz | e18e724 | 2019-08-12 20:55:22 -0500 | [diff] [blame] | 2097 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2098 | // Descriptor UpdateDrawState only call image layout validation callbacks. If it is disabled, skip the entire loop. |
| 2099 | if (device_data->disabled[image_layout_validation]) { |
Jeff Bolz | e18e724 | 2019-08-12 20:55:22 -0500 | [diff] [blame] | 2100 | return; |
| 2101 | } |
| 2102 | |
Tobin Ehlis | f951910 | 2016-08-17 09:49:13 -0600 | [diff] [blame] | 2103 | // For the active slots, use set# to look up descriptorSet from boundDescriptorSets, and bind all of that descriptor set's |
| 2104 | // resources |
locke-lunarg | 540b225 | 2020-08-03 13:23:36 -0600 | [diff] [blame] | 2105 | CMD_BUFFER_STATE::CmdDrawDispatchInfo cmd_info = {}; |
John Zulauf | 79f0658 | 2021-02-27 18:38:39 -0700 | [diff] [blame] | 2106 | for (const auto &binding_req_pair : binding_req_map) { |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2107 | auto index = layout_->GetIndexFromBinding(binding_req_pair.first); |
locke-g | b3ce08f | 2019-09-30 12:30:56 -0600 | [diff] [blame] | 2108 | |
Tony-LunarG | 62c5dba | 2018-12-20 14:27:23 -0700 | [diff] [blame] | 2109 | // We aren't validating descriptors created with PARTIALLY_BOUND or UPDATE_AFTER_BIND, so don't record state |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2110 | auto flags = layout_->GetDescriptorBindingFlagsFromIndex(index); |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 2111 | if (flags & (VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT)) { |
| 2112 | if (!(flags & VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT)) { |
locke-lunarg | 3604599 | 2020-08-20 16:54:37 -0600 | [diff] [blame] | 2113 | cmd_info.binding_infos.emplace_back(binding_req_pair); |
locke-g | b3ce08f | 2019-09-30 12:30:56 -0600 | [diff] [blame] | 2114 | } |
Tony-LunarG | 62c5dba | 2018-12-20 14:27:23 -0700 | [diff] [blame] | 2115 | continue; |
| 2116 | } |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2117 | auto range = layout_->GetGlobalIndexRangeFromIndex(index); |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 2118 | for (uint32_t i = range.start; i < range.end; ++i) { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2119 | const auto descriptor_class = descriptors_[i]->GetClass(); |
| 2120 | switch (descriptor_class) { |
| 2121 | case DescriptorClass::Image: |
| 2122 | case DescriptorClass::ImageSampler: { |
| 2123 | auto *image_desc = static_cast<ImageDescriptor *>(descriptors_[i].get()); |
| 2124 | image_desc->UpdateDrawState(device_data, cb_node); |
| 2125 | break; |
| 2126 | } |
| 2127 | default: |
| 2128 | break; |
| 2129 | } |
Mark Lobodzinski | 2872f4a | 2018-09-03 17:00:53 -0600 | [diff] [blame] | 2130 | } |
| 2131 | } |
locke-lunarg | 540b225 | 2020-08-03 13:23:36 -0600 | [diff] [blame] | 2132 | |
| 2133 | if (cmd_info.binding_infos.size() > 0) { |
| 2134 | cmd_info.cmd_type = cmd_type; |
locke-lunarg | 540b225 | 2020-08-03 13:23:36 -0600 | [diff] [blame] | 2135 | if (cb_node->activeFramebuffer) { |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 2136 | cmd_info.framebuffer = cb_node->activeFramebuffer->framebuffer(); |
locke-lunarg | fc78e93 | 2020-11-19 17:06:24 -0700 | [diff] [blame] | 2137 | cmd_info.attachments = cb_node->active_attachments; |
| 2138 | cmd_info.subpasses = cb_node->active_subpasses; |
locke-lunarg | 540b225 | 2020-08-03 13:23:36 -0600 | [diff] [blame] | 2139 | } |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 2140 | cb_node->validate_descriptorsets_in_queuesubmit[GetSet()].emplace_back(cmd_info); |
locke-lunarg | 540b225 | 2020-08-03 13:23:36 -0600 | [diff] [blame] | 2141 | } |
Mark Lobodzinski | 2872f4a | 2018-09-03 17:00:53 -0600 | [diff] [blame] | 2142 | } |
| 2143 | |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2144 | void cvdescriptorset::DescriptorSet::FilterOneBindingReq(const BindingReqMap::value_type &binding_req_pair, BindingReqMap *out_req, |
| 2145 | const TrackedBindings &bindings, uint32_t limit) { |
| 2146 | if (bindings.size() < limit) { |
| 2147 | const auto it = bindings.find(binding_req_pair.first); |
| 2148 | if (it == bindings.cend()) out_req->emplace(binding_req_pair); |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 2149 | } |
| 2150 | } |
Mark Lobodzinski | 2872f4a | 2018-09-03 17:00:53 -0600 | [diff] [blame] | 2151 | |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2152 | void cvdescriptorset::DescriptorSet::FilterBindingReqs(const CMD_BUFFER_STATE &cb_state, const PIPELINE_STATE &pipeline, |
| 2153 | const BindingReqMap &in_req, BindingReqMap *out_req) const { |
| 2154 | // For const cleanliness we have to find in the maps... |
Jeremy Gebben | 87db52f | 2021-10-14 13:55:09 -0600 | [diff] [blame] | 2155 | const auto validated_it = cb_state.descriptorset_cache.find(this); |
| 2156 | if (validated_it == cb_state.descriptorset_cache.end()) { |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2157 | // We have nothing validated, copy in to out |
| 2158 | for (const auto &binding_req_pair : in_req) { |
| 2159 | out_req->emplace(binding_req_pair); |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 2160 | } |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2161 | return; |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 2162 | } |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2163 | const auto &validated = validated_it->second; |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 2164 | |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2165 | const auto image_sample_version_it = validated.image_samplers.find(&pipeline); |
| 2166 | const VersionedBindings *image_sample_version = nullptr; |
| 2167 | if (image_sample_version_it != validated.image_samplers.cend()) { |
| 2168 | image_sample_version = &(image_sample_version_it->second); |
| 2169 | } |
| 2170 | const auto &dynamic_buffers = validated.dynamic_buffers; |
| 2171 | const auto &non_dynamic_buffers = validated.non_dynamic_buffers; |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2172 | const auto &stats = layout_->GetBindingTypeStats(); |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 2173 | for (const auto &binding_req_pair : in_req) { |
| 2174 | auto binding = binding_req_pair.first; |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2175 | VkDescriptorSetLayoutBinding const *layout_binding = layout_->GetDescriptorSetLayoutBindingPtrFromBinding(binding); |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 2176 | if (!layout_binding) { |
| 2177 | continue; |
| 2178 | } |
| 2179 | // Caching criteria differs per type. |
| 2180 | // If image_layout have changed , the image descriptors need to be validated against them. |
sfricke-samsung | 60f29ec | 2021-03-10 20:37:25 -0800 | [diff] [blame] | 2181 | if (IsBufferDescriptor(layout_binding->descriptorType)) { |
| 2182 | if (IsDynamicDescriptor(layout_binding->descriptorType)) { |
| 2183 | FilterOneBindingReq(binding_req_pair, out_req, dynamic_buffers, stats.dynamic_buffer_count); |
| 2184 | } else { |
| 2185 | FilterOneBindingReq(binding_req_pair, out_req, non_dynamic_buffers, stats.non_dynamic_buffer_count); |
| 2186 | } |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 2187 | } else { |
| 2188 | // This is rather crude, as the changed layouts may not impact the bound descriptors, |
| 2189 | // but the simple "versioning" is a simple "dirt" test. |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2190 | bool stale = true; |
| 2191 | if (image_sample_version) { |
| 2192 | const auto version_it = image_sample_version->find(binding); |
| 2193 | if (version_it != image_sample_version->cend() && (version_it->second == cb_state.image_layout_change_count)) { |
| 2194 | stale = false; |
| 2195 | } |
| 2196 | } |
| 2197 | if (stale) { |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 2198 | out_req->emplace(binding_req_pair); |
| 2199 | } |
| 2200 | } |
| 2201 | } |
| 2202 | } |
Tobin Ehlis | 9252c2b | 2016-07-21 14:40:22 -0600 | [diff] [blame] | 2203 | |
Jeremy Gebben | 87db52f | 2021-10-14 13:55:09 -0600 | [diff] [blame] | 2204 | void cvdescriptorset::DescriptorSet::UpdateValidationCache(CMD_BUFFER_STATE &cb_state, const PIPELINE_STATE &pipeline, |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2205 | const BindingReqMap &updated_bindings) { |
Jeremy Gebben | 87db52f | 2021-10-14 13:55:09 -0600 | [diff] [blame] | 2206 | auto &validated = cb_state.descriptorset_cache[this]; |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2207 | |
| 2208 | auto &image_sample_version = validated.image_samplers[&pipeline]; |
| 2209 | auto &dynamic_buffers = validated.dynamic_buffers; |
| 2210 | auto &non_dynamic_buffers = validated.non_dynamic_buffers; |
| 2211 | for (const auto &binding_req_pair : updated_bindings) { |
| 2212 | auto binding = binding_req_pair.first; |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2213 | VkDescriptorSetLayoutBinding const *layout_binding = layout_->GetDescriptorSetLayoutBindingPtrFromBinding(binding); |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2214 | if (!layout_binding) { |
| 2215 | continue; |
| 2216 | } |
| 2217 | // Caching criteria differs per type. |
sfricke-samsung | 60f29ec | 2021-03-10 20:37:25 -0800 | [diff] [blame] | 2218 | if (IsBufferDescriptor(layout_binding->descriptorType)) { |
| 2219 | if (IsDynamicDescriptor(layout_binding->descriptorType)) { |
| 2220 | dynamic_buffers.emplace(binding); |
| 2221 | } else { |
| 2222 | non_dynamic_buffers.emplace(binding); |
| 2223 | } |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2224 | } else { |
| 2225 | // Save the layout change version... |
| 2226 | image_sample_version[binding] = cb_state.image_layout_change_count; |
| 2227 | } |
| 2228 | } |
| 2229 | } |
| 2230 | |
John Zulauf | d2c3dae | 2019-12-12 11:02:17 -0700 | [diff] [blame] | 2231 | cvdescriptorset::SamplerDescriptor::SamplerDescriptor(const ValidationStateTracker *dev_data, const VkSampler *immut) |
Jeremy Gebben | 059ab50 | 2021-04-26 11:25:02 -0600 | [diff] [blame] | 2232 | : Descriptor(PlainSampler), immutable_(false) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2233 | if (immut) { |
Karl Schultz | 76d16a4 | 2020-11-11 05:05:33 -0700 | [diff] [blame] | 2234 | sampler_state_ = dev_data->GetConstCastShared<SAMPLER_STATE>(*immut); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2235 | immutable_ = true; |
| 2236 | updated = true; |
| 2237 | } |
| 2238 | } |
Tobin Ehlis | e2f8029 | 2016-06-02 10:08:53 -0600 | [diff] [blame] | 2239 | // Validate given sampler. Currently this only checks to make sure it exists in the samplerMap |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame^] | 2240 | bool CoreChecks::ValidateSampler(const VkSampler sampler) const { return Get<SAMPLER_STATE>(sampler).get() != nullptr; } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2241 | |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2242 | bool CoreChecks::ValidateImageUpdate(VkImageView image_view, VkImageLayout image_layout, VkDescriptorType type, |
John Zulauf | bd9b341 | 2019-08-22 17:16:11 -0600 | [diff] [blame] | 2243 | const char *func_name, std::string *error_code, std::string *error_msg) const { |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 2244 | auto iv_state = Get<IMAGE_VIEW_STATE>(image_view); |
Mark Lobodzinski | 3fc3d75 | 2019-06-24 14:22:46 -0600 | [diff] [blame] | 2245 | assert(iv_state); |
| 2246 | |
Tobin Ehlis | 8128096 | 2016-07-20 14:04:20 -0600 | [diff] [blame] | 2247 | // Note that when an imageview is created, we validated that memory is bound so no need to re-check here |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2248 | // Validate that imageLayout is compatible with aspect_mask and image format |
| 2249 | // and validate that image usage bits are correct for given usage |
Jeremy Gebben | b4d1701 | 2021-07-08 13:18:15 -0600 | [diff] [blame] | 2250 | VkImageAspectFlags aspect_mask = iv_state->normalized_subresource_range.aspectMask; |
Tobin Ehlis | 8b26a38 | 2016-09-14 08:02:49 -0600 | [diff] [blame] | 2251 | VkImage image = iv_state->create_info.image; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2252 | VkFormat format = VK_FORMAT_MAX_ENUM; |
| 2253 | VkImageUsageFlags usage = 0; |
Jeremy Gebben | 057f9d5 | 2021-11-05 14:12:31 -0600 | [diff] [blame] | 2254 | auto *image_node = iv_state->image_state.get(); |
Mark Lobodzinski | 3fc3d75 | 2019-06-24 14:22:46 -0600 | [diff] [blame] | 2255 | assert(image_node); |
Chris Forbes | 67757ff | 2017-07-21 13:59:01 -0700 | [diff] [blame] | 2256 | |
Mark Lobodzinski | 3fc3d75 | 2019-06-24 14:22:46 -0600 | [diff] [blame] | 2257 | format = image_node->createInfo.format; |
| 2258 | usage = image_node->createInfo.usage; |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 2259 | const auto stencil_usage_info = LvlFindInChain<VkImageStencilUsageCreateInfo>(image_node->createInfo.pNext); |
Ricardo Garcia | 3f5984c | 2020-04-09 10:56:34 +0200 | [diff] [blame] | 2260 | if (stencil_usage_info) { |
| 2261 | usage |= stencil_usage_info->stencilUsage; |
| 2262 | } |
Mark Lobodzinski | 03d0006 | 2020-06-15 14:35:45 -0600 | [diff] [blame] | 2263 | |
Mark Lobodzinski | 3fc3d75 | 2019-06-24 14:22:46 -0600 | [diff] [blame] | 2264 | // Validate that memory is bound to image |
sfricke-samsung | 52dbd08 | 2021-09-23 11:17:58 -0700 | [diff] [blame] | 2265 | if (ValidateMemoryIsBoundToImage(image_node, func_name, kVUID_Core_Bound_Resource_FreedMemoryAccess)) { |
| 2266 | *error_code = kVUID_Core_Bound_Resource_FreedMemoryAccess; |
Mark Lobodzinski | 3fc3d75 | 2019-06-24 14:22:46 -0600 | [diff] [blame] | 2267 | *error_msg = "No memory bound to image."; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2268 | return false; |
| 2269 | } |
Mark Lobodzinski | 3fc3d75 | 2019-06-24 14:22:46 -0600 | [diff] [blame] | 2270 | |
| 2271 | // KHR_maintenance1 allows rendering into 2D or 2DArray views which slice a 3D image, |
| 2272 | // but not binding them to descriptor sets. |
Jeremy Gebben | 11a68a3 | 2021-07-29 11:59:22 -0600 | [diff] [blame] | 2273 | if (iv_state->IsDepthSliced()) { |
Mark Lobodzinski | 3fc3d75 | 2019-06-24 14:22:46 -0600 | [diff] [blame] | 2274 | *error_code = "VUID-VkDescriptorImageInfo-imageView-00343"; |
| 2275 | *error_msg = "ImageView must not be a 2D or 2DArray view of a 3D image"; |
| 2276 | return false; |
| 2277 | } |
| 2278 | |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 2279 | // TODO : The various image aspect and format checks here are based on general spec language in 11.5 Image Views section under |
| 2280 | // vkCreateImageView(). What's the best way to create unique id for these cases? |
sfricke-samsung | 52dbd08 | 2021-09-23 11:17:58 -0700 | [diff] [blame] | 2281 | *error_code = kVUID_Core_DrawState_InvalidImageView; |
Dave Houlton | 1d2022c | 2017-03-29 11:43:58 -0600 | [diff] [blame] | 2282 | bool ds = FormatIsDepthOrStencil(format); |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2283 | switch (image_layout) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2284 | case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: |
| 2285 | // Only Color bit must be set |
| 2286 | if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != VK_IMAGE_ASPECT_COLOR_BIT) { |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2287 | std::stringstream error_str; |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 2288 | error_str |
Mark Lobodzinski | 298f0fd | 2020-04-09 11:50:19 -0600 | [diff] [blame] | 2289 | << "ImageView (" << report_data->FormatHandle(image_view) |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 2290 | << ") uses layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but does not have VK_IMAGE_ASPECT_COLOR_BIT set."; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 2291 | *error_msg = error_str.str(); |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2292 | return false; |
| 2293 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2294 | // format must NOT be DS |
| 2295 | if (ds) { |
| 2296 | std::stringstream error_str; |
Mark Lobodzinski | 298f0fd | 2020-04-09 11:50:19 -0600 | [diff] [blame] | 2297 | error_str << "ImageView (" << report_data->FormatHandle(image_view) |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2298 | << ") uses layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but the image format is " |
| 2299 | << string_VkFormat(format) << " which is not a color format."; |
| 2300 | *error_msg = error_str.str(); |
| 2301 | return false; |
| 2302 | } |
| 2303 | break; |
| 2304 | case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: |
| 2305 | case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: |
| 2306 | // Depth or stencil bit must be set, but both must NOT be set |
Tobin Ehlis | bbf3f91 | 2016-06-15 13:03:58 -0600 | [diff] [blame] | 2307 | if (aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) { |
| 2308 | if (aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) { |
| 2309 | // both must NOT be set |
| 2310 | std::stringstream error_str; |
Mark Lobodzinski | 298f0fd | 2020-04-09 11:50:19 -0600 | [diff] [blame] | 2311 | error_str << "ImageView (" << report_data->FormatHandle(image_view) |
Mark Lobodzinski | 6b04292 | 2019-06-21 14:46:42 -0600 | [diff] [blame] | 2312 | << ") has both STENCIL and DEPTH aspects set"; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 2313 | *error_msg = error_str.str(); |
Tobin Ehlis | bbf3f91 | 2016-06-15 13:03:58 -0600 | [diff] [blame] | 2314 | return false; |
| 2315 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2316 | } else if (!(aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT)) { |
| 2317 | // Neither were set |
| 2318 | std::stringstream error_str; |
Mark Lobodzinski | 298f0fd | 2020-04-09 11:50:19 -0600 | [diff] [blame] | 2319 | error_str << "ImageView (" << report_data->FormatHandle(image_view) << ") has layout " |
Mark Lobodzinski | 6b04292 | 2019-06-21 14:46:42 -0600 | [diff] [blame] | 2320 | << string_VkImageLayout(image_layout) << " but does not have STENCIL or DEPTH aspects set"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2321 | *error_msg = error_str.str(); |
| 2322 | return false; |
Tobin Ehlis | bbf3f91 | 2016-06-15 13:03:58 -0600 | [diff] [blame] | 2323 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2324 | // format must be DS |
| 2325 | if (!ds) { |
| 2326 | std::stringstream error_str; |
Mark Lobodzinski | 298f0fd | 2020-04-09 11:50:19 -0600 | [diff] [blame] | 2327 | error_str << "ImageView (" << report_data->FormatHandle(image_view) << ") has layout " |
Mark Lobodzinski | 6b04292 | 2019-06-21 14:46:42 -0600 | [diff] [blame] | 2328 | << string_VkImageLayout(image_layout) << " but the image format is " << string_VkFormat(format) |
| 2329 | << " which is not a depth/stencil format."; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2330 | *error_msg = error_str.str(); |
| 2331 | return false; |
| 2332 | } |
| 2333 | break; |
| 2334 | default: |
| 2335 | // For other layouts if the source is depth/stencil image, both aspect bits must not be set |
| 2336 | if (ds) { |
| 2337 | if (aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) { |
| 2338 | if (aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) { |
| 2339 | // both must NOT be set |
| 2340 | std::stringstream error_str; |
Mark Lobodzinski | 298f0fd | 2020-04-09 11:50:19 -0600 | [diff] [blame] | 2341 | error_str << "ImageView (" << report_data->FormatHandle(image_view) << ") has layout " |
Mark Lobodzinski | 6b04292 | 2019-06-21 14:46:42 -0600 | [diff] [blame] | 2342 | << string_VkImageLayout(image_layout) << " and is using depth/stencil image of format " |
| 2343 | << string_VkFormat(format) |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2344 | << " but it has both STENCIL and DEPTH aspects set, which is illegal. When using a depth/stencil " |
| 2345 | "image in a descriptor set, please only set either VK_IMAGE_ASPECT_DEPTH_BIT or " |
| 2346 | "VK_IMAGE_ASPECT_STENCIL_BIT depending on whether it will be used for depth reads or stencil " |
| 2347 | "reads respectively."; |
Mark Lobodzinski | 4d05d7a | 2019-06-25 09:12:06 -0600 | [diff] [blame] | 2348 | *error_code = "VUID-VkDescriptorImageInfo-imageView-01976"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2349 | *error_msg = error_str.str(); |
| 2350 | return false; |
| 2351 | } |
| 2352 | } |
| 2353 | } |
| 2354 | break; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2355 | } |
| 2356 | // Now validate that usage flags are correctly set for given type of update |
Tobin Ehlis | fb4cf71 | 2016-10-10 14:02:48 -0600 | [diff] [blame] | 2357 | // As we're switching per-type, if any type has specific layout requirements, check those here as well |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 2358 | // TODO : The various image usage bit requirements are in general spec language for VkImageUsageFlags bit block in 11.3 Images |
| 2359 | // under vkCreateImage() |
Jeff Bolz | 6d3beaa | 2019-02-09 21:00:05 -0600 | [diff] [blame] | 2360 | const char *error_usage_bit = nullptr; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2361 | switch (type) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2362 | case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: |
sfricke-samsung | 088f124 | 2020-06-06 02:15:35 -0700 | [diff] [blame] | 2363 | if (iv_state->samplerConversion != VK_NULL_HANDLE) { |
| 2364 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-01946"; |
| 2365 | std::stringstream error_str; |
| 2366 | error_str << "ImageView (" << report_data->FormatHandle(image_view) << ")" |
| 2367 | << "used as a VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE can't be created with VkSamplerYcbcrConversion"; |
| 2368 | *error_msg = error_str.str(); |
| 2369 | return false; |
| 2370 | } |
| 2371 | // drop through |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2372 | case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: { |
| 2373 | if (!(usage & VK_IMAGE_USAGE_SAMPLED_BIT)) { |
| 2374 | error_usage_bit = "VK_IMAGE_USAGE_SAMPLED_BIT"; |
sfricke-samsung | 7923f21 | 2020-02-29 21:17:35 -0800 | [diff] [blame] | 2375 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00337"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2376 | } |
| 2377 | break; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2378 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2379 | case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: { |
| 2380 | if (!(usage & VK_IMAGE_USAGE_STORAGE_BIT)) { |
| 2381 | error_usage_bit = "VK_IMAGE_USAGE_STORAGE_BIT"; |
sfricke-samsung | 7923f21 | 2020-02-29 21:17:35 -0800 | [diff] [blame] | 2382 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00339"; |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 2383 | } else if ((VK_IMAGE_LAYOUT_GENERAL != image_layout) && |
| 2384 | (!IsExtEnabled(device_extensions.vk_khr_shared_presentable_image) || |
| 2385 | (VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR != image_layout))) { |
sfricke-samsung | f105898 | 2020-09-10 22:36:49 -0700 | [diff] [blame] | 2386 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-04152"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2387 | std::stringstream error_str; |
sfricke-samsung | ada55a1 | 2020-08-15 03:39:41 -0700 | [diff] [blame] | 2388 | error_str << "Descriptor update with descriptorType VK_DESCRIPTOR_TYPE_STORAGE_IMAGE" |
| 2389 | << " is being updated with invalid imageLayout " << string_VkImageLayout(image_layout) << " for image " |
| 2390 | << report_data->FormatHandle(image) << " in imageView " << report_data->FormatHandle(image_view) |
| 2391 | << ". Allowed layouts are: VK_IMAGE_LAYOUT_GENERAL"; |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 2392 | if (IsExtEnabled(device_extensions.vk_khr_shared_presentable_image)) { |
sfricke-samsung | ada55a1 | 2020-08-15 03:39:41 -0700 | [diff] [blame] | 2393 | error_str << " or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR"; |
Tobin Ehlis | bb03e5f | 2017-05-11 08:52:51 -0600 | [diff] [blame] | 2394 | } |
sfricke-samsung | ada55a1 | 2020-08-15 03:39:41 -0700 | [diff] [blame] | 2395 | *error_msg = error_str.str(); |
| 2396 | return false; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2397 | } |
| 2398 | break; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2399 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2400 | case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: { |
| 2401 | if (!(usage & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)) { |
| 2402 | error_usage_bit = "VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT"; |
sfricke-samsung | 7923f21 | 2020-02-29 21:17:35 -0800 | [diff] [blame] | 2403 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00338"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2404 | } |
| 2405 | break; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2406 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2407 | default: |
| 2408 | break; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2409 | } |
Jeff Bolz | 6d3beaa | 2019-02-09 21:00:05 -0600 | [diff] [blame] | 2410 | if (error_usage_bit) { |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2411 | std::stringstream error_str; |
Mark Lobodzinski | 298f0fd | 2020-04-09 11:50:19 -0600 | [diff] [blame] | 2412 | error_str << "ImageView (" << report_data->FormatHandle(image_view) << ") with usage mask " << std::hex << std::showbase |
| 2413 | << usage << " being used for a descriptor update of type " << string_VkDescriptorType(type) << " does not have " |
| 2414 | << error_usage_bit << " set."; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 2415 | *error_msg = error_str.str(); |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2416 | return false; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2417 | } |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 2418 | |
sfricke-samsung | ada55a1 | 2020-08-15 03:39:41 -0700 | [diff] [blame] | 2419 | // All the following types share the same image layouts |
| 2420 | // checkf or Storage Images above |
| 2421 | if ((type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) || (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) || |
| 2422 | (type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)) { |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 2423 | // Test that the layout is compatible with the descriptorType for the two sampled image types |
| 2424 | const static std::array<VkImageLayout, 3> valid_layouts = { |
Jeremy Hayes | d0549f6 | 2019-06-05 10:15:36 -0600 | [diff] [blame] | 2425 | {VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL}}; |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 2426 | |
| 2427 | struct ExtensionLayout { |
| 2428 | VkImageLayout layout; |
Tony-LunarG | 2ec96bb | 2019-11-26 13:43:02 -0700 | [diff] [blame] | 2429 | ExtEnabled DeviceExtensions::*extension; |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 2430 | }; |
Ricardo Garcia | 602c202 | 2021-07-30 10:42:17 +0200 | [diff] [blame] | 2431 | const static std::array<ExtensionLayout, 7> extended_layouts{{ |
Jeremy Gebben | 579aaca | 2021-02-15 13:36:18 -0700 | [diff] [blame] | 2432 | // Note double brace req'd for aggregate initialization |
| 2433 | {VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, &DeviceExtensions::vk_khr_shared_presentable_image}, |
| 2434 | {VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, &DeviceExtensions::vk_khr_maintenance2}, |
| 2435 | {VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, &DeviceExtensions::vk_khr_maintenance2}, |
Mike Schuchardt | c57de4a | 2021-07-20 17:26:32 -0700 | [diff] [blame] | 2436 | {VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR, &DeviceExtensions::vk_khr_synchronization2}, |
| 2437 | {VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR, &DeviceExtensions::vk_khr_synchronization2}, |
Ricardo Garcia | 602c202 | 2021-07-30 10:42:17 +0200 | [diff] [blame] | 2438 | {VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, &DeviceExtensions::vk_khr_separate_depth_stencil_layouts}, |
| 2439 | {VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, &DeviceExtensions::vk_khr_separate_depth_stencil_layouts}, |
Jeremy Gebben | 579aaca | 2021-02-15 13:36:18 -0700 | [diff] [blame] | 2440 | }}; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2441 | auto is_layout = [image_layout, this](const ExtensionLayout &ext_layout) { |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 2442 | return IsExtEnabled(device_extensions.*(ext_layout.extension)) && (ext_layout.layout == image_layout); |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 2443 | }; |
| 2444 | |
| 2445 | bool valid_layout = (std::find(valid_layouts.cbegin(), valid_layouts.cend(), image_layout) != valid_layouts.cend()) || |
| 2446 | std::any_of(extended_layouts.cbegin(), extended_layouts.cend(), is_layout); |
| 2447 | |
| 2448 | if (!valid_layout) { |
sfricke-samsung | f105898 | 2020-09-10 22:36:49 -0700 | [diff] [blame] | 2449 | // The following works as currently all 3 descriptor types share the same set of valid layouts |
sfricke-samsung | ada55a1 | 2020-08-15 03:39:41 -0700 | [diff] [blame] | 2450 | switch (type) { |
| 2451 | case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: |
Shannon McPherson | 2c793ba | 2020-08-28 12:13:24 -0600 | [diff] [blame] | 2452 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-04149"; |
sfricke-samsung | ada55a1 | 2020-08-15 03:39:41 -0700 | [diff] [blame] | 2453 | break; |
| 2454 | case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: |
Shannon McPherson | 2c793ba | 2020-08-28 12:13:24 -0600 | [diff] [blame] | 2455 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-04150"; |
sfricke-samsung | ada55a1 | 2020-08-15 03:39:41 -0700 | [diff] [blame] | 2456 | break; |
| 2457 | case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: |
sfricke-samsung | f105898 | 2020-09-10 22:36:49 -0700 | [diff] [blame] | 2458 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-04151"; |
sfricke-samsung | ada55a1 | 2020-08-15 03:39:41 -0700 | [diff] [blame] | 2459 | break; |
| 2460 | default: |
| 2461 | break; |
| 2462 | } |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 2463 | std::stringstream error_str; |
| 2464 | error_str << "Descriptor update with descriptorType " << string_VkDescriptorType(type) |
Mark Lobodzinski | 74eddba | 2019-06-21 14:16:33 -0600 | [diff] [blame] | 2465 | << " is being updated with invalid imageLayout " << string_VkImageLayout(image_layout) << " for image " |
Mark Lobodzinski | 298f0fd | 2020-04-09 11:50:19 -0600 | [diff] [blame] | 2466 | << report_data->FormatHandle(image) << " in imageView " << report_data->FormatHandle(image_view) |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 2467 | << ". Allowed layouts are: VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, " |
| 2468 | << "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL"; |
| 2469 | for (auto &ext_layout : extended_layouts) { |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 2470 | if (IsExtEnabled(device_extensions.*(ext_layout.extension))) { |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 2471 | error_str << ", " << string_VkImageLayout(ext_layout.layout); |
| 2472 | } |
| 2473 | } |
| 2474 | *error_msg = error_str.str(); |
| 2475 | return false; |
| 2476 | } |
| 2477 | } |
| 2478 | |
sfricke-samsung | bd0e805 | 2020-06-06 01:36:39 -0700 | [diff] [blame] | 2479 | if ((type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) || (type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)) { |
| 2480 | const VkComponentMapping components = iv_state->create_info.components; |
| 2481 | if (IsIdentitySwizzle(components) == false) { |
| 2482 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00336"; |
| 2483 | std::stringstream error_str; |
| 2484 | error_str << "ImageView (" << report_data->FormatHandle(image_view) << ") has a non-identiy swizzle component, " |
| 2485 | << " r swizzle = " << string_VkComponentSwizzle(components.r) << "," |
| 2486 | << " g swizzle = " << string_VkComponentSwizzle(components.g) << "," |
| 2487 | << " b swizzle = " << string_VkComponentSwizzle(components.b) << "," |
| 2488 | << " a swizzle = " << string_VkComponentSwizzle(components.a) << "."; |
| 2489 | *error_msg = error_str.str(); |
| 2490 | return false; |
| 2491 | } |
| 2492 | } |
| 2493 | |
Tony-LunarG | 69604c4 | 2021-11-22 16:00:12 -0700 | [diff] [blame] | 2494 | if ((type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) && (iv_state->min_lod != 0.0f)) { |
| 2495 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-06450"; |
| 2496 | std::stringstream error_str; |
| 2497 | error_str << "ImageView (" << report_data->FormatHandle(image_view) |
| 2498 | << ") , written to a descriptor of type VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT with a minLod (" << iv_state->min_lod |
| 2499 | << ") that is not 0.0"; |
| 2500 | *error_msg = error_str.str(); |
| 2501 | return false; |
| 2502 | } |
| 2503 | |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2504 | return true; |
| 2505 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2506 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2507 | // Helper template to change shared pointer members of a Descriptor, while |
| 2508 | // correctly managing links to the parent DescriptorSet. |
| 2509 | // src and dst are shared pointers. |
| 2510 | template <typename T> |
| 2511 | static void ReplaceStatePtr(DescriptorSet *set_state, T &dst, const T &src) { |
| 2512 | if (dst) { |
| 2513 | dst->RemoveParent(set_state); |
| 2514 | } |
| 2515 | dst = src; |
| 2516 | if (dst) { |
| 2517 | dst->AddParent(set_state); |
| 2518 | } |
| 2519 | } |
| 2520 | |
| 2521 | void cvdescriptorset::SamplerDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
| 2522 | const VkWriteDescriptorSet *update, const uint32_t index) { |
Chris Forbes | fea2c54 | 2018-04-13 09:34:15 -0700 | [diff] [blame] | 2523 | if (!immutable_) { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2524 | ReplaceStatePtr(set_state, sampler_state_ , dev_data->GetConstCastShared<SAMPLER_STATE>(update->pImageInfo[index].sampler)); |
Chris Forbes | fea2c54 | 2018-04-13 09:34:15 -0700 | [diff] [blame] | 2525 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2526 | updated = true; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2527 | } |
| 2528 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2529 | void cvdescriptorset::SamplerDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
| 2530 | const Descriptor *src) { |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2531 | updated = true; |
| 2532 | // Mutable descriptors not currently tracked or validated. If copied from mutable, set to mutable to keep from validating. |
| 2533 | if (src->descriptor_class == Mutable) { |
| 2534 | this->descriptor_class = Mutable; |
| 2535 | return; |
| 2536 | } |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2537 | auto *sampler_src = static_cast<const SamplerDescriptor *>(src); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2538 | if (!immutable_) { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2539 | ReplaceStatePtr(set_state, sampler_state_, sampler_src->sampler_state_); |
Tobin Ehlis | 8020eea | 2016-08-17 11:10:41 -0600 | [diff] [blame] | 2540 | } |
| 2541 | } |
| 2542 | |
John Zulauf | d2c3dae | 2019-12-12 11:02:17 -0700 | [diff] [blame] | 2543 | cvdescriptorset::ImageSamplerDescriptor::ImageSamplerDescriptor(const ValidationStateTracker *dev_data, const VkSampler *immut) |
Jeremy Gebben | 059ab50 | 2021-04-26 11:25:02 -0600 | [diff] [blame] | 2544 | : ImageDescriptor(ImageSampler), immutable_(false) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2545 | if (immut) { |
Karl Schultz | 76d16a4 | 2020-11-11 05:05:33 -0700 | [diff] [blame] | 2546 | sampler_state_ = dev_data->GetConstCastShared<SAMPLER_STATE>(*immut); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2547 | immutable_ = true; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2548 | } |
| 2549 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2550 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2551 | void cvdescriptorset::ImageSamplerDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
John Zulauf | d2c3dae | 2019-12-12 11:02:17 -0700 | [diff] [blame] | 2552 | const VkWriteDescriptorSet *update, const uint32_t index) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2553 | updated = true; |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2554 | const auto &image_info = update->pImageInfo[index]; |
Chris Forbes | fea2c54 | 2018-04-13 09:34:15 -0700 | [diff] [blame] | 2555 | if (!immutable_) { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2556 | ReplaceStatePtr(set_state, sampler_state_, dev_data->GetConstCastShared<SAMPLER_STATE>(image_info.sampler)); |
Chris Forbes | fea2c54 | 2018-04-13 09:34:15 -0700 | [diff] [blame] | 2557 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2558 | image_layout_ = image_info.imageLayout; |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2559 | ReplaceStatePtr(set_state, image_view_state_, dev_data->GetConstCastShared<IMAGE_VIEW_STATE>(image_info.imageView)); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2560 | } |
| 2561 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2562 | void cvdescriptorset::ImageSamplerDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
| 2563 | const Descriptor *src) { |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2564 | updated = true; |
| 2565 | // Mutable descriptors not currently tracked or validated. If copied from mutable, set to mutable to keep from validating. |
| 2566 | if (src->descriptor_class == Mutable) { |
| 2567 | this->descriptor_class = Mutable; |
| 2568 | return; |
| 2569 | } |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2570 | auto *image_src = static_cast<const ImageSamplerDescriptor *>(src); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2571 | if (!immutable_) { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2572 | ReplaceStatePtr(set_state, sampler_state_, image_src->sampler_state_); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2573 | } |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2574 | ImageDescriptor::CopyUpdate(set_state, dev_data, src); |
Tobin Ehlis | 8020eea | 2016-08-17 11:10:41 -0600 | [diff] [blame] | 2575 | } |
| 2576 | |
Jeremy Gebben | 059ab50 | 2021-04-26 11:25:02 -0600 | [diff] [blame] | 2577 | cvdescriptorset::ImageDescriptor::ImageDescriptor(const VkDescriptorType type) |
| 2578 | : Descriptor(Image), image_layout_(VK_IMAGE_LAYOUT_UNDEFINED) {} |
| 2579 | |
| 2580 | cvdescriptorset::ImageDescriptor::ImageDescriptor(DescriptorClass class_) |
| 2581 | : Descriptor(class_), image_layout_(VK_IMAGE_LAYOUT_UNDEFINED) {} |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2582 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2583 | void cvdescriptorset::ImageDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
| 2584 | const VkWriteDescriptorSet *update, const uint32_t index) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2585 | updated = true; |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2586 | const auto &image_info = update->pImageInfo[index]; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2587 | image_layout_ = image_info.imageLayout; |
Karl Schultz | 76d16a4 | 2020-11-11 05:05:33 -0700 | [diff] [blame] | 2588 | image_view_state_ = dev_data->GetConstCastShared<IMAGE_VIEW_STATE>(image_info.imageView); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2589 | } |
| 2590 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2591 | void cvdescriptorset::ImageDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
| 2592 | const Descriptor *src) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2593 | updated = true; |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2594 | // Mutable descriptors not currently tracked or validated. If copied from mutable, set to mutable to keep from validating. |
| 2595 | if (src->descriptor_class == Mutable) { |
| 2596 | this->descriptor_class = Mutable; |
| 2597 | return; |
| 2598 | } |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2599 | auto *image_src = static_cast<const ImageDescriptor *>(src); |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2600 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2601 | image_layout_ = image_src->image_layout_; |
| 2602 | ReplaceStatePtr(set_state, image_view_state_, image_src->image_view_state_); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2603 | } |
| 2604 | |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2605 | void cvdescriptorset::ImageDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) { |
Tobin Ehlis | 81e4637 | 2016-08-17 13:33:44 -0600 | [diff] [blame] | 2606 | // Add binding for image |
Jeff Bolz | faffeb3 | 2019-10-04 12:47:16 -0500 | [diff] [blame] | 2607 | auto iv_state = GetImageViewState(); |
Tobin Ehlis | 8b26a38 | 2016-09-14 08:02:49 -0600 | [diff] [blame] | 2608 | if (iv_state) { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2609 | dev_data->CallSetImageViewInitialLayoutCallback(cb_node, *iv_state, image_layout_); |
Jeff Bolz | 148d94e | 2018-12-13 21:25:56 -0600 | [diff] [blame] | 2610 | } |
Tobin Ehlis | 8020eea | 2016-08-17 11:10:41 -0600 | [diff] [blame] | 2611 | } |
| 2612 | |
Jeremy Gebben | 059ab50 | 2021-04-26 11:25:02 -0600 | [diff] [blame] | 2613 | cvdescriptorset::BufferDescriptor::BufferDescriptor(const VkDescriptorType type) |
| 2614 | : Descriptor(GeneralBuffer), offset_(0), range_(0) {} |
| 2615 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2616 | void cvdescriptorset::BufferDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
| 2617 | const VkWriteDescriptorSet *update, const uint32_t index) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2618 | updated = true; |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2619 | const auto &buffer_info = update->pBufferInfo[index]; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2620 | offset_ = buffer_info.offset; |
| 2621 | range_ = buffer_info.range; |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2622 | ReplaceStatePtr(set_state, buffer_state_, dev_data->GetConstCastShared<BUFFER_STATE>(buffer_info.buffer)); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2623 | } |
| 2624 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2625 | void cvdescriptorset::BufferDescriptor::CopyUpdate(DescriptorSet* set_state, const ValidationStateTracker *dev_data, |
| 2626 | const Descriptor *src) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2627 | updated = true; |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2628 | // Mutable descriptors not currently tracked or validated. If copied from mutable, set to mutable to keep from validating. |
| 2629 | if (src->descriptor_class == Mutable) { |
| 2630 | this->descriptor_class = Mutable; |
| 2631 | return; |
| 2632 | } |
Karl Schultz | 76d16a4 | 2020-11-11 05:05:33 -0700 | [diff] [blame] | 2633 | const auto buff_desc = static_cast<const BufferDescriptor *>(src); |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2634 | offset_ = buff_desc->offset_; |
| 2635 | range_ = buff_desc->range_; |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2636 | ReplaceStatePtr(set_state, buffer_state_, buff_desc->buffer_state_); |
Tobin Ehlis | 8020eea | 2016-08-17 11:10:41 -0600 | [diff] [blame] | 2637 | } |
| 2638 | |
Jeremy Gebben | 059ab50 | 2021-04-26 11:25:02 -0600 | [diff] [blame] | 2639 | cvdescriptorset::TexelDescriptor::TexelDescriptor(const VkDescriptorType type) : Descriptor(TexelBuffer) {} |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2640 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2641 | void cvdescriptorset::TexelDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
| 2642 | const VkWriteDescriptorSet *update, const uint32_t index) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2643 | updated = true; |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2644 | ReplaceStatePtr(set_state, buffer_view_state_, |
| 2645 | dev_data->GetConstCastShared<BUFFER_VIEW_STATE>(update->pTexelBufferView[index])); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2646 | } |
| 2647 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2648 | void cvdescriptorset::TexelDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
| 2649 | const Descriptor *src) { |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2650 | updated = true; |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2651 | // Mutable descriptors not currently tracked or validated. If copied from mutable, set to mutable to keep from validating. |
| 2652 | if (src->descriptor_class == Mutable) { |
| 2653 | this->descriptor_class = Mutable; |
| 2654 | return; |
| 2655 | } |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2656 | ReplaceStatePtr(set_state, buffer_view_state_, static_cast<const TexelDescriptor *>(src)->buffer_view_state_); |
Tobin Ehlis | 8020eea | 2016-08-17 11:10:41 -0600 | [diff] [blame] | 2657 | } |
| 2658 | |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 2659 | cvdescriptorset::AccelerationStructureDescriptor::AccelerationStructureDescriptor(const VkDescriptorType type) |
Jeremy Gebben | 059ab50 | 2021-04-26 11:25:02 -0600 | [diff] [blame] | 2660 | : Descriptor(AccelerationStructure), acc_(VK_NULL_HANDLE), acc_nv_(VK_NULL_HANDLE) { |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2661 | is_khr_ = false; |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 2662 | } |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2663 | void cvdescriptorset::AccelerationStructureDescriptor::WriteUpdate(DescriptorSet *set_state, |
| 2664 | const ValidationStateTracker *dev_data, |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 2665 | const VkWriteDescriptorSet *update, const uint32_t index) { |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 2666 | const auto *acc_info = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureKHR>(update->pNext); |
| 2667 | const auto *acc_info_nv = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureNV>(update->pNext); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2668 | assert(acc_info || acc_info_nv); |
| 2669 | is_khr_ = (acc_info != NULL); |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 2670 | updated = true; |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2671 | if (is_khr_) { |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2672 | acc_ = acc_info->pAccelerationStructures[index]; |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2673 | ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_)); |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2674 | } else { |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2675 | acc_nv_ = acc_info_nv->pAccelerationStructures[index]; |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2676 | ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_)); |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2677 | } |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 2678 | } |
| 2679 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2680 | void cvdescriptorset::AccelerationStructureDescriptor::CopyUpdate(DescriptorSet *set_state, |
| 2681 | const ValidationStateTracker *dev_data, const Descriptor *src) { |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 2682 | auto acc_desc = static_cast<const AccelerationStructureDescriptor *>(src); |
| 2683 | updated = true; |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2684 | // Mutable descriptors not currently tracked or validated. If copied from mutable, set to mutable to keep from validating. |
| 2685 | if (src->descriptor_class == Mutable) { |
| 2686 | this->descriptor_class = Mutable; |
| 2687 | return; |
| 2688 | } |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2689 | if (is_khr_) { |
| 2690 | acc_ = acc_desc->acc_; |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2691 | ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_)); |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2692 | } else { |
| 2693 | acc_nv_ = acc_desc->acc_nv_; |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2694 | ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_)); |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2695 | } |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 2696 | } |
| 2697 | |
Jeremy Gebben | 059ab50 | 2021-04-26 11:25:02 -0600 | [diff] [blame] | 2698 | cvdescriptorset::MutableDescriptor::MutableDescriptor() : Descriptor(Mutable) { active_descriptor_class_ = NoDescriptorClass; } |
Tony-LunarG | f563b36 | 2021-03-18 16:13:18 -0600 | [diff] [blame] | 2699 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2700 | void cvdescriptorset::MutableDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
Tony-LunarG | f563b36 | 2021-03-18 16:13:18 -0600 | [diff] [blame] | 2701 | const VkWriteDescriptorSet *update, const uint32_t index) { |
| 2702 | updated = true; |
| 2703 | } |
| 2704 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2705 | void cvdescriptorset::MutableDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
| 2706 | const Descriptor *src) { |
Tony-LunarG | f563b36 | 2021-03-18 16:13:18 -0600 | [diff] [blame] | 2707 | updated = true; |
| 2708 | } |
| 2709 | |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2710 | // This is a helper function that iterates over a set of Write and Copy updates, pulls the DescriptorSet* for updated |
| 2711 | // sets, and then calls their respective Validate[Write|Copy]Update functions. |
| 2712 | // If the update hits an issue for which the callback returns "true", meaning that the call down the chain should |
| 2713 | // be skipped, then true is returned. |
| 2714 | // If there is no issue with the update, then false is returned. |
Mark Lobodzinski | 3840ca0 | 2019-03-08 18:36:11 -0700 | [diff] [blame] | 2715 | bool CoreChecks::ValidateUpdateDescriptorSets(uint32_t write_count, const VkWriteDescriptorSet *p_wds, uint32_t copy_count, |
Jeff Bolz | 46c0ea0 | 2019-10-09 13:06:29 -0500 | [diff] [blame] | 2716 | const VkCopyDescriptorSet *p_cds, const char *func_name) const { |
Mark Lobodzinski | bdc3b02 | 2017-04-24 09:11:35 -0600 | [diff] [blame] | 2717 | bool skip = false; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2718 | // Validate Write updates |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2719 | for (uint32_t i = 0; i < write_count; i++) { |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2720 | auto dest_set = p_wds[i].dstSet; |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 2721 | auto set_node = Get<cvdescriptorset::DescriptorSet>(dest_set); |
Tobin Ehlis | 6a72dc7 | 2016-06-01 16:41:17 -0600 | [diff] [blame] | 2722 | if (!set_node) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 2723 | skip |= LogError(dest_set, kVUID_Core_DrawState_InvalidDescriptorSet, |
| 2724 | "Cannot call %s on %s that has not been allocated in pDescriptorWrites[%u].", func_name, |
| 2725 | report_data->FormatHandle(dest_set).c_str(), i); |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2726 | } else { |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 2727 | std::string error_code; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2728 | std::string error_str; |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame^] | 2729 | if (!ValidateWriteUpdate(set_node.get(), &p_wds[i], func_name, &error_code, &error_str, false)) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 2730 | skip |= |
| 2731 | LogError(dest_set, error_code, "%s pDescriptorWrites[%u] failed write update validation for %s with error: %s.", |
| 2732 | func_name, i, report_data->FormatHandle(dest_set).c_str(), error_str.c_str()); |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2733 | } |
| 2734 | } |
sourav parmar | a24fb7b | 2020-05-26 10:50:04 -0700 | [diff] [blame] | 2735 | if (p_wds[i].pNext) { |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 2736 | const auto *pnext_struct = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureKHR>(p_wds[i].pNext); |
Mark Lobodzinski | 17dc460 | 2020-05-29 07:48:40 -0600 | [diff] [blame] | 2737 | if (pnext_struct) { |
| 2738 | for (uint32_t j = 0; j < pnext_struct->accelerationStructureCount; ++j) { |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 2739 | const auto as_state = Get<ACCELERATION_STRUCTURE_STATE_KHR>(pnext_struct->pAccelerationStructures[j]); |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2740 | if (as_state && (as_state->create_infoKHR.sType == VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR && |
sourav parmar | 766e2a7 | 2020-12-03 16:17:11 -0800 | [diff] [blame] | 2741 | (as_state->create_infoKHR.type != VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR && |
| 2742 | as_state->create_infoKHR.type != VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR))) { |
sourav parmar | a24fb7b | 2020-05-26 10:50:04 -0700 | [diff] [blame] | 2743 | skip |= |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2744 | LogError(dest_set, "VUID-VkWriteDescriptorSetAccelerationStructureKHR-pAccelerationStructures-03579", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 2745 | "%s: For pDescriptorWrites[%u] acceleration structure in pAccelerationStructures[%u] must " |
| 2746 | "have been created with " |
sourav parmar | bcee751 | 2020-12-28 14:34:49 -0800 | [diff] [blame] | 2747 | "VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR or VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR.", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 2748 | func_name, i, j); |
sourav parmar | a24fb7b | 2020-05-26 10:50:04 -0700 | [diff] [blame] | 2749 | } |
| 2750 | } |
| 2751 | } |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 2752 | const auto *pnext_struct_nv = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureNV>(p_wds[i].pNext); |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2753 | if (pnext_struct_nv) { |
| 2754 | for (uint32_t j = 0; j < pnext_struct_nv->accelerationStructureCount; ++j) { |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 2755 | const auto as_state = Get<ACCELERATION_STRUCTURE_STATE>(pnext_struct_nv->pAccelerationStructures[j]); |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2756 | if (as_state && (as_state->create_infoNV.sType == VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV && |
| 2757 | as_state->create_infoNV.info.type != VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV)) { |
| 2758 | skip |= LogError(dest_set, "VUID-VkWriteDescriptorSetAccelerationStructureNV-pAccelerationStructures-03748", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 2759 | "%s: For pDescriptorWrites[%u] acceleration structure in pAccelerationStructures[%u] must " |
| 2760 | "have been created with" |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2761 | " VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV.", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 2762 | func_name, i, j); |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2763 | } |
| 2764 | } |
| 2765 | } |
sourav parmar | a24fb7b | 2020-05-26 10:50:04 -0700 | [diff] [blame] | 2766 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2767 | } |
| 2768 | // Now validate copy updates |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2769 | for (uint32_t i = 0; i < copy_count; ++i) { |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2770 | auto dst_set = p_cds[i].dstSet; |
| 2771 | auto src_set = p_cds[i].srcSet; |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 2772 | auto src_node = Get<cvdescriptorset::DescriptorSet>(src_set); |
| 2773 | auto dst_node = Get<cvdescriptorset::DescriptorSet>(dst_set); |
Tobin Ehlis | a171275 | 2017-01-04 09:41:47 -0700 | [diff] [blame] | 2774 | // Object_tracker verifies that src & dest descriptor set are valid |
| 2775 | assert(src_node); |
| 2776 | assert(dst_node); |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 2777 | std::string error_code; |
Tobin Ehlis | a171275 | 2017-01-04 09:41:47 -0700 | [diff] [blame] | 2778 | std::string error_str; |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame^] | 2779 | if (!ValidateCopyUpdate(&p_cds[i], dst_node.get(), src_node.get(), func_name, &error_code, &error_str)) { |
Mark Lobodzinski | 9d38ea2 | 2020-03-16 18:22:16 -0600 | [diff] [blame] | 2780 | LogObjectList objlist(dst_set); |
| 2781 | objlist.add(src_set); |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 2782 | skip |= LogError(objlist, error_code, "%s pDescriptorCopies[%u] failed copy update from %s to %s with error: %s.", |
| 2783 | func_name, i, report_data->FormatHandle(src_set).c_str(), report_data->FormatHandle(dst_set).c_str(), |
| 2784 | error_str.c_str()); |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2785 | } |
| 2786 | } |
Mark Lobodzinski | bdc3b02 | 2017-04-24 09:11:35 -0600 | [diff] [blame] | 2787 | return skip; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2788 | } |
| 2789 | // This is a helper function that iterates over a set of Write and Copy updates, pulls the DescriptorSet* for updated |
| 2790 | // sets, and then calls their respective Perform[Write|Copy]Update functions. |
| 2791 | // Prerequisite : ValidateUpdateDescriptorSets() should be called and return "false" prior to calling PerformUpdateDescriptorSets() |
| 2792 | // with the same set of updates. |
| 2793 | // This is split from the validate code to allow validation prior to calling down the chain, and then update after |
| 2794 | // calling down the chain. |
John Zulauf | e3b35f3 | 2019-06-25 14:21:21 -0600 | [diff] [blame] | 2795 | void cvdescriptorset::PerformUpdateDescriptorSets(ValidationStateTracker *dev_data, uint32_t write_count, |
| 2796 | const VkWriteDescriptorSet *p_wds, uint32_t copy_count, |
| 2797 | const VkCopyDescriptorSet *p_cds) { |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2798 | // Write updates first |
| 2799 | uint32_t i = 0; |
| 2800 | for (i = 0; i < write_count; ++i) { |
| 2801 | auto dest_set = p_wds[i].dstSet; |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 2802 | auto set_node = dev_data->Get<cvdescriptorset::DescriptorSet>(dest_set); |
Tobin Ehlis | 6a72dc7 | 2016-06-01 16:41:17 -0600 | [diff] [blame] | 2803 | if (set_node) { |
Jeff Bolz | 41a1ced | 2019-10-11 11:40:49 -0500 | [diff] [blame] | 2804 | set_node->PerformWriteUpdate(dev_data, &p_wds[i]); |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2805 | } |
| 2806 | } |
| 2807 | // Now copy updates |
| 2808 | for (i = 0; i < copy_count; ++i) { |
| 2809 | auto dst_set = p_cds[i].dstSet; |
| 2810 | auto src_set = p_cds[i].srcSet; |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 2811 | auto src_node = dev_data->Get<cvdescriptorset::DescriptorSet>(src_set); |
| 2812 | auto dst_node = dev_data->Get<cvdescriptorset::DescriptorSet>(dst_set); |
Tobin Ehlis | 6a72dc7 | 2016-06-01 16:41:17 -0600 | [diff] [blame] | 2813 | if (src_node && dst_node) { |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame^] | 2814 | dst_node->PerformCopyUpdate(dev_data, &p_cds[i], src_node.get()); |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2815 | } |
| 2816 | } |
| 2817 | } |
Mark Lobodzinski | 3d63a04 | 2017-03-09 16:24:13 -0700 | [diff] [blame] | 2818 | |
John Zulauf | e3b35f3 | 2019-06-25 14:21:21 -0600 | [diff] [blame] | 2819 | cvdescriptorset::DecodedTemplateUpdate::DecodedTemplateUpdate(const ValidationStateTracker *device_data, |
Jeremy Gebben | fc89045 | 2021-10-27 10:56:49 -0600 | [diff] [blame] | 2820 | VkDescriptorSet descriptorSet, |
| 2821 | const UPDATE_TEMPLATE_STATE *template_state, const void *pData, |
| 2822 | VkDescriptorSetLayout push_layout) { |
John Zulauf | b845eb2 | 2018-10-12 11:41:06 -0600 | [diff] [blame] | 2823 | auto const &create_info = template_state->create_info; |
| 2824 | inline_infos.resize(create_info.descriptorUpdateEntryCount); // Make sure we have one if we need it |
sourav parmar | 480d277 | 2021-01-24 22:24:54 -0800 | [diff] [blame] | 2825 | inline_infos_khr.resize(create_info.descriptorUpdateEntryCount); |
| 2826 | inline_infos_nv.resize(create_info.descriptorUpdateEntryCount); |
John Zulauf | b845eb2 | 2018-10-12 11:41:06 -0600 | [diff] [blame] | 2827 | desc_writes.reserve(create_info.descriptorUpdateEntryCount); // emplaced, so reserved without initialization |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 2828 | VkDescriptorSetLayout effective_dsl = create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET |
| 2829 | ? create_info.descriptorSetLayout |
| 2830 | : push_layout; |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame^] | 2831 | auto layout_obj = device_data->Get<cvdescriptorset::DescriptorSetLayout>(effective_dsl); |
Mark Lobodzinski | 3d63a04 | 2017-03-09 16:24:13 -0700 | [diff] [blame] | 2832 | |
| 2833 | // Create a WriteDescriptorSet struct for each template update entry |
| 2834 | for (uint32_t i = 0; i < create_info.descriptorUpdateEntryCount; i++) { |
| 2835 | auto binding_count = layout_obj->GetDescriptorCountFromBinding(create_info.pDescriptorUpdateEntries[i].dstBinding); |
| 2836 | auto binding_being_updated = create_info.pDescriptorUpdateEntries[i].dstBinding; |
| 2837 | auto dst_array_element = create_info.pDescriptorUpdateEntries[i].dstArrayElement; |
| 2838 | |
John Zulauf | b6d7120 | 2017-12-22 16:47:09 -0700 | [diff] [blame] | 2839 | desc_writes.reserve(desc_writes.size() + create_info.pDescriptorUpdateEntries[i].descriptorCount); |
Mark Lobodzinski | 3d63a04 | 2017-03-09 16:24:13 -0700 | [diff] [blame] | 2840 | for (uint32_t j = 0; j < create_info.pDescriptorUpdateEntries[i].descriptorCount; j++) { |
| 2841 | desc_writes.emplace_back(); |
| 2842 | auto &write_entry = desc_writes.back(); |
| 2843 | |
| 2844 | size_t offset = create_info.pDescriptorUpdateEntries[i].offset + j * create_info.pDescriptorUpdateEntries[i].stride; |
| 2845 | char *update_entry = (char *)(pData) + offset; |
| 2846 | |
| 2847 | if (dst_array_element >= binding_count) { |
| 2848 | dst_array_element = 0; |
Mark Lobodzinski | 4aa479d | 2017-03-10 09:14:00 -0700 | [diff] [blame] | 2849 | binding_being_updated = layout_obj->GetNextValidBinding(binding_being_updated); |
Mark Lobodzinski | 3d63a04 | 2017-03-09 16:24:13 -0700 | [diff] [blame] | 2850 | } |
| 2851 | |
| 2852 | write_entry.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; |
| 2853 | write_entry.pNext = NULL; |
| 2854 | write_entry.dstSet = descriptorSet; |
| 2855 | write_entry.dstBinding = binding_being_updated; |
| 2856 | write_entry.dstArrayElement = dst_array_element; |
| 2857 | write_entry.descriptorCount = 1; |
| 2858 | write_entry.descriptorType = create_info.pDescriptorUpdateEntries[i].descriptorType; |
| 2859 | |
| 2860 | switch (create_info.pDescriptorUpdateEntries[i].descriptorType) { |
| 2861 | case VK_DESCRIPTOR_TYPE_SAMPLER: |
| 2862 | case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: |
| 2863 | case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: |
| 2864 | case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: |
| 2865 | case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: |
| 2866 | write_entry.pImageInfo = reinterpret_cast<VkDescriptorImageInfo *>(update_entry); |
| 2867 | break; |
| 2868 | |
| 2869 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: |
| 2870 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: |
| 2871 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: |
| 2872 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: |
| 2873 | write_entry.pBufferInfo = reinterpret_cast<VkDescriptorBufferInfo *>(update_entry); |
| 2874 | break; |
| 2875 | |
| 2876 | case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: |
| 2877 | case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: |
| 2878 | write_entry.pTexelBufferView = reinterpret_cast<VkBufferView *>(update_entry); |
| 2879 | break; |
Dave Houlton | 142c4cb | 2018-10-17 15:04:41 -0600 | [diff] [blame] | 2880 | case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: { |
| 2881 | VkWriteDescriptorSetInlineUniformBlockEXT *inline_info = &inline_infos[i]; |
| 2882 | inline_info->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT; |
| 2883 | inline_info->pNext = nullptr; |
| 2884 | inline_info->dataSize = create_info.pDescriptorUpdateEntries[i].descriptorCount; |
| 2885 | inline_info->pData = update_entry; |
| 2886 | write_entry.pNext = inline_info; |
Ricardo Garcia | fee1573 | 2019-05-28 11:13:31 +0200 | [diff] [blame] | 2887 | // descriptorCount must match the dataSize member of the VkWriteDescriptorSetInlineUniformBlockEXT structure |
| 2888 | write_entry.descriptorCount = inline_info->dataSize; |
Dave Houlton | 142c4cb | 2018-10-17 15:04:41 -0600 | [diff] [blame] | 2889 | // skip the rest of the array, they just represent bytes in the update |
| 2890 | j = create_info.pDescriptorUpdateEntries[i].descriptorCount; |
| 2891 | break; |
| 2892 | } |
sourav parmar | 480d277 | 2021-01-24 22:24:54 -0800 | [diff] [blame] | 2893 | case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: { |
| 2894 | VkWriteDescriptorSetAccelerationStructureKHR *inline_info_khr = &inline_infos_khr[i]; |
| 2895 | inline_info_khr->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR; |
| 2896 | inline_info_khr->pNext = nullptr; |
| 2897 | inline_info_khr->accelerationStructureCount = create_info.pDescriptorUpdateEntries[i].descriptorCount; |
| 2898 | inline_info_khr->pAccelerationStructures = reinterpret_cast<VkAccelerationStructureKHR *>(update_entry); |
| 2899 | write_entry.pNext = inline_info_khr; |
| 2900 | break; |
| 2901 | } |
| 2902 | case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV: { |
| 2903 | VkWriteDescriptorSetAccelerationStructureNV *inline_info_nv = &inline_infos_nv[i]; |
| 2904 | inline_info_nv->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV; |
| 2905 | inline_info_nv->pNext = nullptr; |
| 2906 | inline_info_nv->accelerationStructureCount = create_info.pDescriptorUpdateEntries[i].descriptorCount; |
| 2907 | inline_info_nv->pAccelerationStructures = reinterpret_cast<VkAccelerationStructureNV *>(update_entry); |
| 2908 | write_entry.pNext = inline_info_nv; |
| 2909 | break; |
| 2910 | } |
Mark Lobodzinski | 3d63a04 | 2017-03-09 16:24:13 -0700 | [diff] [blame] | 2911 | default: |
| 2912 | assert(0); |
| 2913 | break; |
| 2914 | } |
| 2915 | dst_array_element++; |
| 2916 | } |
| 2917 | } |
John Zulauf | b845eb2 | 2018-10-12 11:41:06 -0600 | [diff] [blame] | 2918 | } |
John Zulauf | b45fdc3 | 2018-10-12 15:14:17 -0600 | [diff] [blame] | 2919 | // These helper functions carry out the validate and record descriptor updates peformed via update templates. They decode |
| 2920 | // the templatized data and leverage the non-template UpdateDescriptor helper functions. |
Jeremy Gebben | fc89045 | 2021-10-27 10:56:49 -0600 | [diff] [blame] | 2921 | bool CoreChecks::ValidateUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet, |
| 2922 | const UPDATE_TEMPLATE_STATE *template_state, const void *pData) const { |
John Zulauf | b45fdc3 | 2018-10-12 15:14:17 -0600 | [diff] [blame] | 2923 | // Translate the templated update into a normal update for validation... |
Mark Lobodzinski | 3840ca0 | 2019-03-08 18:36:11 -0700 | [diff] [blame] | 2924 | cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData); |
| 2925 | return ValidateUpdateDescriptorSets(static_cast<uint32_t>(decoded_update.desc_writes.size()), decoded_update.desc_writes.data(), |
| 2926 | 0, NULL, "vkUpdateDescriptorSetWithTemplate()"); |
John Zulauf | b45fdc3 | 2018-10-12 15:14:17 -0600 | [diff] [blame] | 2927 | } |
John Zulauf | b845eb2 | 2018-10-12 11:41:06 -0600 | [diff] [blame] | 2928 | |
John Zulauf | 4e7bcb5 | 2018-11-02 10:46:30 -0600 | [diff] [blame] | 2929 | std::string cvdescriptorset::DescriptorSet::StringifySetAndLayout() const { |
| 2930 | std::string out; |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2931 | auto layout_handle = layout_->GetDescriptorSetLayout(); |
John Zulauf | 4e7bcb5 | 2018-11-02 10:46:30 -0600 | [diff] [blame] | 2932 | if (IsPushDescriptor()) { |
Mark Lobodzinski | 23e395e | 2020-04-09 10:17:31 -0600 | [diff] [blame] | 2933 | std::ostringstream str; |
Tony-LunarG | 1d3ee2d | 2020-10-27 15:54:52 -0600 | [diff] [blame] | 2934 | str << "Push Descriptors defined with " << state_data_->report_data->FormatHandle(layout_handle); |
Mark Lobodzinski | 23e395e | 2020-04-09 10:17:31 -0600 | [diff] [blame] | 2935 | out = str.str(); |
John Zulauf | 4e7bcb5 | 2018-11-02 10:46:30 -0600 | [diff] [blame] | 2936 | } else { |
Mark Lobodzinski | 23e395e | 2020-04-09 10:17:31 -0600 | [diff] [blame] | 2937 | std::ostringstream str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 2938 | str << state_data_->report_data->FormatHandle(GetSet()) << " allocated with " |
Mark Lobodzinski | 23e395e | 2020-04-09 10:17:31 -0600 | [diff] [blame] | 2939 | << state_data_->report_data->FormatHandle(layout_handle); |
| 2940 | out = str.str(); |
John Zulauf | 4e7bcb5 | 2018-11-02 10:46:30 -0600 | [diff] [blame] | 2941 | } |
| 2942 | return out; |
| 2943 | }; |
| 2944 | |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 2945 | // Loop through the write updates to validate for a push descriptor set, ignoring dstSet |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2946 | bool CoreChecks::ValidatePushDescriptorsUpdate(const DescriptorSet *push_set, uint32_t write_count, |
John Zulauf | bd9b341 | 2019-08-22 17:16:11 -0600 | [diff] [blame] | 2947 | const VkWriteDescriptorSet *p_wds, const char *func_name) const { |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 2948 | assert(push_set->IsPushDescriptor()); |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 2949 | bool skip = false; |
| 2950 | for (uint32_t i = 0; i < write_count; i++) { |
| 2951 | std::string error_code; |
| 2952 | std::string error_str; |
ziga-lunarg | 6c46b24 | 2021-09-13 18:33:37 +0200 | [diff] [blame] | 2953 | if (!ValidateWriteUpdate(push_set, &p_wds[i], func_name, &error_code, &error_str, true)) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 2954 | skip |= LogError(push_set->GetDescriptorSetLayout(), error_code, |
| 2955 | "%s VkWriteDescriptorSet[%u] failed update validation: %s.", func_name, i, error_str.c_str()); |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 2956 | } |
| 2957 | } |
| 2958 | return skip; |
| 2959 | } |
| 2960 | |
Tobin Ehlis | 6bd2b98 | 2016-05-24 12:33:42 -0600 | [diff] [blame] | 2961 | // For the given buffer, verify that its creation parameters are appropriate for the given type |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 2962 | // If there's an error, update the error_msg string with details and return false, else return true |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 2963 | bool cvdescriptorset::ValidateBufferUsage(debug_report_data *report_data, BUFFER_STATE const *buffer_node, VkDescriptorType type, |
| 2964 | std::string *error_code, std::string *error_msg) { |
Tobin Ehlis | 6bd2b98 | 2016-05-24 12:33:42 -0600 | [diff] [blame] | 2965 | // Verify that usage bits set correctly for given type |
Tobin Ehlis | 94bc5d2 | 2016-06-02 07:46:52 -0600 | [diff] [blame] | 2966 | auto usage = buffer_node->createInfo.usage; |
Jeff Bolz | 6d3beaa | 2019-02-09 21:00:05 -0600 | [diff] [blame] | 2967 | const char *error_usage_bit = nullptr; |
Tobin Ehlis | 6bd2b98 | 2016-05-24 12:33:42 -0600 | [diff] [blame] | 2968 | switch (type) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2969 | case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: |
| 2970 | if (!(usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 2971 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00334"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2972 | error_usage_bit = "VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT"; |
| 2973 | } |
| 2974 | break; |
| 2975 | case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: |
| 2976 | if (!(usage & VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 2977 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00335"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2978 | error_usage_bit = "VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT"; |
| 2979 | } |
| 2980 | break; |
| 2981 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: |
| 2982 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: |
| 2983 | if (!(usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 2984 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00330"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2985 | error_usage_bit = "VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT"; |
| 2986 | } |
| 2987 | break; |
| 2988 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: |
| 2989 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: |
| 2990 | if (!(usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 2991 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00331"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2992 | error_usage_bit = "VK_BUFFER_USAGE_STORAGE_BUFFER_BIT"; |
| 2993 | } |
| 2994 | break; |
| 2995 | default: |
| 2996 | break; |
Tobin Ehlis | 6bd2b98 | 2016-05-24 12:33:42 -0600 | [diff] [blame] | 2997 | } |
Jeff Bolz | 6d3beaa | 2019-02-09 21:00:05 -0600 | [diff] [blame] | 2998 | if (error_usage_bit) { |
Tobin Ehlis | 6bd2b98 | 2016-05-24 12:33:42 -0600 | [diff] [blame] | 2999 | std::stringstream error_str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3000 | error_str << "Buffer (" << report_data->FormatHandle(buffer_node->buffer()) << ") with usage mask " << std::hex |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3001 | << std::showbase << usage << " being used for a descriptor update of type " << string_VkDescriptorType(type) |
| 3002 | << " does not have " << error_usage_bit << " set."; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 3003 | *error_msg = error_str.str(); |
Tobin Ehlis | 6bd2b98 | 2016-05-24 12:33:42 -0600 | [diff] [blame] | 3004 | return false; |
| 3005 | } |
| 3006 | return true; |
| 3007 | } |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3008 | // For buffer descriptor updates, verify the buffer usage and VkDescriptorBufferInfo struct which includes: |
| 3009 | // 1. buffer is valid |
| 3010 | // 2. buffer was created with correct usage flags |
| 3011 | // 3. offset is less than buffer size |
| 3012 | // 4. range is either VK_WHOLE_SIZE or falls in (0, (buffer size - offset)] |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3013 | // 5. range and offset are within the device's limits |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 3014 | // If there's an error, update the error_msg string with details and return false, else return true |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3015 | bool CoreChecks::ValidateBufferUpdate(VkDescriptorBufferInfo const *buffer_info, VkDescriptorType type, const char *func_name, |
John Zulauf | bd9b341 | 2019-08-22 17:16:11 -0600 | [diff] [blame] | 3016 | std::string *error_code, std::string *error_msg) const { |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3017 | // First make sure that buffer is valid |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 3018 | auto buffer_node = Get<BUFFER_STATE>(buffer_info->buffer); |
Tobin Ehlis | fa8b618 | 2016-12-22 13:40:45 -0700 | [diff] [blame] | 3019 | // Any invalid buffer should already be caught by object_tracker |
| 3020 | assert(buffer_node); |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame^] | 3021 | if (ValidateMemoryIsBoundToBuffer(buffer_node.get(), func_name, "VUID-VkWriteDescriptorSet-descriptorType-00329")) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3022 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00329"; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 3023 | *error_msg = "No memory bound to buffer."; |
Tobin Ehlis | 8128096 | 2016-07-20 14:04:20 -0600 | [diff] [blame] | 3024 | return false; |
Tobin Ehlis | fed999f | 2016-09-21 15:09:45 -0600 | [diff] [blame] | 3025 | } |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3026 | // Verify usage bits |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame^] | 3027 | if (!cvdescriptorset::ValidateBufferUsage(report_data, buffer_node.get(), type, error_code, error_msg)) { |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 3028 | // error_msg will have been updated by ValidateBufferUsage() |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3029 | return false; |
| 3030 | } |
| 3031 | // offset must be less than buffer size |
Jeremy Hayes | d1a6a82 | 2017-03-09 14:39:45 -0700 | [diff] [blame] | 3032 | if (buffer_info->offset >= buffer_node->createInfo.size) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3033 | *error_code = "VUID-VkDescriptorBufferInfo-offset-00340"; |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3034 | std::stringstream error_str; |
Jeremy Hayes | d1a6a82 | 2017-03-09 14:39:45 -0700 | [diff] [blame] | 3035 | error_str << "VkDescriptorBufferInfo offset of " << buffer_info->offset << " is greater than or equal to buffer " |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3036 | << report_data->FormatHandle(buffer_node->buffer()) << " size of " << buffer_node->createInfo.size; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 3037 | *error_msg = error_str.str(); |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3038 | return false; |
| 3039 | } |
| 3040 | if (buffer_info->range != VK_WHOLE_SIZE) { |
| 3041 | // Range must be VK_WHOLE_SIZE or > 0 |
| 3042 | if (!buffer_info->range) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3043 | *error_code = "VUID-VkDescriptorBufferInfo-range-00341"; |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3044 | std::stringstream error_str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3045 | error_str << "For buffer " << report_data->FormatHandle(buffer_node->buffer()) |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3046 | << " VkDescriptorBufferInfo range is not VK_WHOLE_SIZE and is zero, which is not allowed."; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 3047 | *error_msg = error_str.str(); |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3048 | return false; |
| 3049 | } |
| 3050 | // Range must be VK_WHOLE_SIZE or <= (buffer size - offset) |
| 3051 | if (buffer_info->range > (buffer_node->createInfo.size - buffer_info->offset)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3052 | *error_code = "VUID-VkDescriptorBufferInfo-range-00342"; |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3053 | std::stringstream error_str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3054 | error_str << "For buffer " << report_data->FormatHandle(buffer_node->buffer()) << " VkDescriptorBufferInfo range is " |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3055 | << buffer_info->range << " which is greater than buffer size (" << buffer_node->createInfo.size |
| 3056 | << ") minus requested offset of " << buffer_info->offset; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 3057 | *error_msg = error_str.str(); |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3058 | return false; |
| 3059 | } |
| 3060 | } |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3061 | // Check buffer update sizes against device limits |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3062 | const auto &limits = phys_dev_props.limits; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3063 | if (VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER == type || VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC == type) { |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 3064 | auto max_ub_range = limits.maxUniformBufferRange; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3065 | if (buffer_info->range != VK_WHOLE_SIZE && buffer_info->range > max_ub_range) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3066 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00332"; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3067 | std::stringstream error_str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3068 | error_str << "For buffer " << report_data->FormatHandle(buffer_node->buffer()) << " VkDescriptorBufferInfo range is " |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3069 | << buffer_info->range << " which is greater than this device's maxUniformBufferRange (" << max_ub_range |
| 3070 | << ")"; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3071 | *error_msg = error_str.str(); |
| 3072 | return false; |
Peter Kohaut | 2794a29 | 2018-07-13 11:13:47 +0200 | [diff] [blame] | 3073 | } else if (buffer_info->range == VK_WHOLE_SIZE && (buffer_node->createInfo.size - buffer_info->offset) > max_ub_range) { |
| 3074 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00332"; |
| 3075 | std::stringstream error_str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3076 | error_str << "For buffer " << report_data->FormatHandle(buffer_node->buffer()) |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3077 | << " VkDescriptorBufferInfo range is VK_WHOLE_SIZE but effective range " |
Peter Kohaut | 18f413d | 2018-07-16 13:15:42 +0200 | [diff] [blame] | 3078 | << "(" << (buffer_node->createInfo.size - buffer_info->offset) << ") is greater than this device's " |
Peter Kohaut | 2794a29 | 2018-07-13 11:13:47 +0200 | [diff] [blame] | 3079 | << "maxUniformBufferRange (" << max_ub_range << ")"; |
| 3080 | *error_msg = error_str.str(); |
| 3081 | return false; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3082 | } |
| 3083 | } else if (VK_DESCRIPTOR_TYPE_STORAGE_BUFFER == type || VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC == type) { |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 3084 | auto max_sb_range = limits.maxStorageBufferRange; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3085 | if (buffer_info->range != VK_WHOLE_SIZE && buffer_info->range > max_sb_range) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3086 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00333"; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3087 | std::stringstream error_str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3088 | error_str << "For buffer " << report_data->FormatHandle(buffer_node->buffer()) << " VkDescriptorBufferInfo range is " |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3089 | << buffer_info->range << " which is greater than this device's maxStorageBufferRange (" << max_sb_range |
| 3090 | << ")"; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3091 | *error_msg = error_str.str(); |
| 3092 | return false; |
Peter Kohaut | 2794a29 | 2018-07-13 11:13:47 +0200 | [diff] [blame] | 3093 | } else if (buffer_info->range == VK_WHOLE_SIZE && (buffer_node->createInfo.size - buffer_info->offset) > max_sb_range) { |
| 3094 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00333"; |
| 3095 | std::stringstream error_str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3096 | error_str << "For buffer " << report_data->FormatHandle(buffer_node->buffer()) |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3097 | << " VkDescriptorBufferInfo range is VK_WHOLE_SIZE but effective range " |
Peter Kohaut | 18f413d | 2018-07-16 13:15:42 +0200 | [diff] [blame] | 3098 | << "(" << (buffer_node->createInfo.size - buffer_info->offset) << ") is greater than this device's " |
Peter Kohaut | 2794a29 | 2018-07-13 11:13:47 +0200 | [diff] [blame] | 3099 | << "maxStorageBufferRange (" << max_sb_range << ")"; |
| 3100 | *error_msg = error_str.str(); |
| 3101 | return false; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3102 | } |
| 3103 | } |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3104 | return true; |
| 3105 | } |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 3106 | template <typename T> |
| 3107 | bool CoreChecks::ValidateAccelerationStructureUpdate(T acc_node, const char *func_name, std::string *error_code, |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 3108 | std::string *error_msg) const { |
ziga-lunarg | 5f3a091 | 2021-11-06 19:18:45 +0100 | [diff] [blame] | 3109 | // nullDescriptor feature allows this to be VK_NULL_HANDLE |
| 3110 | if (acc_node) { |
| 3111 | if (ValidateMemoryIsBoundToAccelerationStructure(acc_node, func_name, kVUIDUndefined)) { |
| 3112 | *error_code = kVUIDUndefined; |
| 3113 | *error_msg = "No memory bound to acceleration structure."; |
| 3114 | return false; |
| 3115 | } |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 3116 | } |
| 3117 | return true; |
| 3118 | } |
| 3119 | |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3120 | // Verify that the contents of the update are ok, but don't perform actual update |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 3121 | bool CoreChecks::VerifyCopyUpdateContents(const VkCopyDescriptorSet *update, const DescriptorSet *src_set, |
| 3122 | VkDescriptorType src_type, uint32_t src_index, const DescriptorSet *dst_set, |
| 3123 | VkDescriptorType dst_type, uint32_t dst_index, const char *func_name, |
| 3124 | std::string *error_code, std::string *error_msg) const { |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 3125 | // Note : Repurposing some Write update error codes here as specific details aren't called out for copy updates like they are |
| 3126 | // for write updates |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3127 | using DescriptorClass = cvdescriptorset::DescriptorClass; |
| 3128 | using BufferDescriptor = cvdescriptorset::BufferDescriptor; |
| 3129 | using ImageDescriptor = cvdescriptorset::ImageDescriptor; |
| 3130 | using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor; |
| 3131 | using SamplerDescriptor = cvdescriptorset::SamplerDescriptor; |
| 3132 | using TexelDescriptor = cvdescriptorset::TexelDescriptor; |
| 3133 | |
| 3134 | auto device_data = this; |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 3135 | |
| 3136 | if (dst_type == VK_DESCRIPTOR_TYPE_SAMPLER) { |
| 3137 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
| 3138 | const auto dst_desc = dst_set->GetDescriptorFromGlobalIndex(dst_index + di); |
| 3139 | if (!dst_desc->updated) continue; |
| 3140 | if (dst_desc->IsImmutableSampler()) { |
| 3141 | *error_code = "VUID-VkCopyDescriptorSet-dstBinding-02753"; |
| 3142 | std::stringstream error_str; |
| 3143 | error_str << "Attempted copy update to an immutable sampler descriptor."; |
| 3144 | *error_msg = error_str.str(); |
| 3145 | return false; |
| 3146 | } |
| 3147 | } |
| 3148 | } |
| 3149 | |
| 3150 | switch (src_set->GetDescriptorFromGlobalIndex(src_index)->descriptor_class) { |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3151 | case DescriptorClass::PlainSampler: { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3152 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 3153 | const auto src_desc = src_set->GetDescriptorFromGlobalIndex(src_index + di); |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 3154 | if (!src_desc->updated) continue; |
| 3155 | if (!src_desc->IsImmutableSampler()) { |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 3156 | auto update_sampler = static_cast<const SamplerDescriptor *>(src_desc)->GetSampler(); |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3157 | if (!ValidateSampler(update_sampler)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3158 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00325"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3159 | std::stringstream error_str; |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3160 | error_str << "Attempted copy update to sampler descriptor with invalid sampler: " |
| 3161 | << report_data->FormatHandle(update_sampler) << "."; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3162 | *error_msg = error_str.str(); |
| 3163 | return false; |
| 3164 | } |
| 3165 | } else { |
| 3166 | // TODO : Warn here |
| 3167 | } |
| 3168 | } |
| 3169 | break; |
| 3170 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3171 | case DescriptorClass::ImageSampler: { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3172 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 3173 | const auto src_desc = src_set->GetDescriptorFromGlobalIndex(src_index + di); |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 3174 | if (!src_desc->updated) continue; |
| 3175 | auto img_samp_desc = static_cast<const ImageSamplerDescriptor *>(src_desc); |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3176 | // First validate sampler |
| 3177 | if (!img_samp_desc->IsImmutableSampler()) { |
| 3178 | auto update_sampler = img_samp_desc->GetSampler(); |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3179 | if (!ValidateSampler(update_sampler)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3180 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00325"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3181 | std::stringstream error_str; |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3182 | error_str << "Attempted copy update to sampler descriptor with invalid sampler: " |
| 3183 | << report_data->FormatHandle(update_sampler) << "."; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3184 | *error_msg = error_str.str(); |
| 3185 | return false; |
| 3186 | } |
| 3187 | } else { |
| 3188 | // TODO : Warn here |
| 3189 | } |
| 3190 | // Validate image |
| 3191 | auto image_view = img_samp_desc->GetImageView(); |
| 3192 | auto image_layout = img_samp_desc->GetImageLayout(); |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3193 | if (image_view) { |
| 3194 | if (!ValidateImageUpdate(image_view, image_layout, src_type, func_name, error_code, error_msg)) { |
| 3195 | std::stringstream error_str; |
| 3196 | error_str << "Attempted copy update to combined image sampler descriptor failed due to: " |
| 3197 | << error_msg->c_str(); |
| 3198 | *error_msg = error_str.str(); |
| 3199 | return false; |
| 3200 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3201 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3202 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3203 | break; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3204 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3205 | case DescriptorClass::Image: { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3206 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 3207 | const auto src_desc = src_set->GetDescriptorFromGlobalIndex(src_index + di); |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 3208 | if (!src_desc->updated) continue; |
| 3209 | auto img_desc = static_cast<const ImageDescriptor *>(src_desc); |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3210 | auto image_view = img_desc->GetImageView(); |
| 3211 | auto image_layout = img_desc->GetImageLayout(); |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3212 | if (image_view) { |
| 3213 | if (!ValidateImageUpdate(image_view, image_layout, src_type, func_name, error_code, error_msg)) { |
| 3214 | std::stringstream error_str; |
| 3215 | error_str << "Attempted copy update to image descriptor failed due to: " << error_msg->c_str(); |
| 3216 | *error_msg = error_str.str(); |
| 3217 | return false; |
| 3218 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3219 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3220 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3221 | break; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3222 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3223 | case DescriptorClass::TexelBuffer: { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3224 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 3225 | const auto src_desc = src_set->GetDescriptorFromGlobalIndex(src_index + di); |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 3226 | if (!src_desc->updated) continue; |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 3227 | auto buffer_view = static_cast<const TexelDescriptor *>(src_desc)->GetBufferView(); |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3228 | if (buffer_view) { |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 3229 | auto bv_state = device_data->Get<BUFFER_VIEW_STATE>(buffer_view); |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3230 | if (!bv_state) { |
| 3231 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02994"; |
| 3232 | std::stringstream error_str; |
| 3233 | error_str << "Attempted copy update to texel buffer descriptor with invalid buffer view: " |
| 3234 | << report_data->FormatHandle(buffer_view); |
| 3235 | *error_msg = error_str.str(); |
| 3236 | return false; |
| 3237 | } |
| 3238 | auto buffer = bv_state->create_info.buffer; |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame^] | 3239 | auto buffer_state = Get<BUFFER_STATE>(buffer); |
| 3240 | if (!cvdescriptorset::ValidateBufferUsage(report_data, buffer_state.get(), src_type, error_code, error_msg)) { |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3241 | std::stringstream error_str; |
| 3242 | error_str << "Attempted copy update to texel buffer descriptor failed due to: " << error_msg->c_str(); |
| 3243 | *error_msg = error_str.str(); |
| 3244 | return false; |
| 3245 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3246 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3247 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3248 | break; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3249 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3250 | case DescriptorClass::GeneralBuffer: { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3251 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 3252 | const auto src_desc = src_set->GetDescriptorFromGlobalIndex(src_index + di); |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 3253 | if (!src_desc->updated) continue; |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame^] | 3254 | auto buffer_state = static_cast<const BufferDescriptor *>(src_desc)->GetBufferState(); |
| 3255 | if (buffer_state) { |
| 3256 | if (!cvdescriptorset::ValidateBufferUsage(report_data, buffer_state, src_type, error_code, error_msg)) { |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3257 | std::stringstream error_str; |
| 3258 | error_str << "Attempted copy update to buffer descriptor failed due to: " << error_msg->c_str(); |
| 3259 | *error_msg = error_str.str(); |
| 3260 | return false; |
| 3261 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3262 | } |
Tobin Ehlis | cbcf234 | 2016-05-24 13:07:12 -0600 | [diff] [blame] | 3263 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3264 | break; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3265 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3266 | case DescriptorClass::InlineUniform: |
| 3267 | case DescriptorClass::AccelerationStructure: |
Ricardo Garcia | 14f4f76 | 2021-04-13 11:36:12 +0200 | [diff] [blame] | 3268 | case DescriptorClass::Mutable: |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 3269 | break; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3270 | default: |
| 3271 | assert(0); // We've already verified update type so should never get here |
| 3272 | break; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3273 | } |
| 3274 | // All checks passed so update contents are good |
| 3275 | return true; |
Chris Forbes | b4e0bdb | 2016-05-31 16:34:40 +1200 | [diff] [blame] | 3276 | } |
Tobin Ehlis | ee47146 | 2016-05-26 11:21:59 -0600 | [diff] [blame] | 3277 | // Verify that the state at allocate time is correct, but don't actually allocate the sets yet |
Mark Lobodzinski | 3840ca0 | 2019-03-08 18:36:11 -0700 | [diff] [blame] | 3278 | bool CoreChecks::ValidateAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info, |
Jeff Bolz | 46c0ea0 | 2019-10-09 13:06:29 -0500 | [diff] [blame] | 3279 | const cvdescriptorset::AllocateDescriptorSetsData *ds_data) const { |
Mark Lobodzinski | bdc3b02 | 2017-04-24 09:11:35 -0600 | [diff] [blame] | 3280 | bool skip = false; |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 3281 | auto pool_state = Get<DESCRIPTOR_POOL_STATE>(p_alloc_info->descriptorPool); |
Tobin Ehlis | ee47146 | 2016-05-26 11:21:59 -0600 | [diff] [blame] | 3282 | |
| 3283 | for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) { |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame^] | 3284 | auto layout = Get<cvdescriptorset::DescriptorSetLayout>(p_alloc_info->pSetLayouts[i]); |
John Zulauf | 5562d06 | 2018-01-24 11:54:05 -0700 | [diff] [blame] | 3285 | if (layout) { // nullptr layout indicates no valid layout handle for this device, validated/logged in object_tracker |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 3286 | if (layout->IsPushDescriptor()) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 3287 | skip |= LogError(p_alloc_info->pSetLayouts[i], "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-00308", |
| 3288 | "%s specified at pSetLayouts[%" PRIu32 |
| 3289 | "] in vkAllocateDescriptorSets() was created with invalid flag %s set.", |
| 3290 | report_data->FormatHandle(p_alloc_info->pSetLayouts[i]).c_str(), i, |
| 3291 | "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR"); |
John Zulauf | 5562d06 | 2018-01-24 11:54:05 -0700 | [diff] [blame] | 3292 | } |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 3293 | if (layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT && |
| 3294 | !(pool_state->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT)) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 3295 | skip |= LogError( |
| 3296 | device, "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-03044", |
| 3297 | "vkAllocateDescriptorSets(): Descriptor set layout create flags and pool create flags mismatch for index (%d)", |
| 3298 | i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 3299 | } |
ziga-lunarg | 2ab9653 | 2021-07-19 11:06:41 +0200 | [diff] [blame] | 3300 | if (layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE && |
| 3301 | !(pool_state->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE)) { |
| 3302 | skip |= LogError(device, "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-04610", |
| 3303 | "vkAllocateDescriptorSets(): pSetLayouts[%d].flags contain " |
| 3304 | "VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE bit, but the pool was not created " |
| 3305 | "with the VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE bit.", |
| 3306 | i); |
| 3307 | } |
Tobin Ehlis | ee47146 | 2016-05-26 11:21:59 -0600 | [diff] [blame] | 3308 | } |
| 3309 | } |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 3310 | if (!IsExtEnabled(device_extensions.vk_khr_maintenance1)) { |
Mike Schuchardt | 64b5bb7 | 2017-03-21 16:33:26 -0600 | [diff] [blame] | 3311 | // Track number of descriptorSets allowable in this pool |
| 3312 | if (pool_state->availableSets < p_alloc_info->descriptorSetCount) { |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 3313 | skip |= LogError(pool_state->Handle(), "VUID-VkDescriptorSetAllocateInfo-descriptorSetCount-00306", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 3314 | "vkAllocateDescriptorSets(): Unable to allocate %u descriptorSets from %s" |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 3315 | ". This pool only has %d descriptorSets remaining.", |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 3316 | p_alloc_info->descriptorSetCount, report_data->FormatHandle(pool_state->Handle()).c_str(), |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 3317 | pool_state->availableSets); |
Mike Schuchardt | 64b5bb7 | 2017-03-21 16:33:26 -0600 | [diff] [blame] | 3318 | } |
| 3319 | // Determine whether descriptor counts are satisfiable |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 3320 | for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) { |
Jeff Bolz | 46c0ea0 | 2019-10-09 13:06:29 -0500 | [diff] [blame] | 3321 | auto count_iter = pool_state->availableDescriptorTypeCount.find(it->first); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 3322 | uint32_t available_count = (count_iter != pool_state->availableDescriptorTypeCount.end()) ? count_iter->second : 0; |
Jeff Bolz | 46c0ea0 | 2019-10-09 13:06:29 -0500 | [diff] [blame] | 3323 | |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 3324 | if (ds_data->required_descriptors_by_type.at(it->first) > available_count) { |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 3325 | skip |= LogError(pool_state->Handle(), "VUID-VkDescriptorSetAllocateInfo-descriptorPool-00307", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 3326 | "vkAllocateDescriptorSets(): Unable to allocate %u descriptors of type %s from %s" |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 3327 | ". This pool only has %d descriptors of this type remaining.", |
| 3328 | ds_data->required_descriptors_by_type.at(it->first), |
| 3329 | string_VkDescriptorType(VkDescriptorType(it->first)), |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 3330 | report_data->FormatHandle(pool_state->Handle()).c_str(), available_count); |
Mike Schuchardt | 64b5bb7 | 2017-03-21 16:33:26 -0600 | [diff] [blame] | 3331 | } |
Tobin Ehlis | ee47146 | 2016-05-26 11:21:59 -0600 | [diff] [blame] | 3332 | } |
| 3333 | } |
Tobin Ehlis | 5d749ea | 2016-07-18 13:14:01 -0600 | [diff] [blame] | 3334 | |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 3335 | const auto *count_allocate_info = LvlFindInChain<VkDescriptorSetVariableDescriptorCountAllocateInfo>(p_alloc_info->pNext); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 3336 | |
| 3337 | if (count_allocate_info) { |
| 3338 | if (count_allocate_info->descriptorSetCount != 0 && |
| 3339 | count_allocate_info->descriptorSetCount != p_alloc_info->descriptorSetCount) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 3340 | skip |= LogError(device, "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfo-descriptorSetCount-03045", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 3341 | "vkAllocateDescriptorSets(): VkDescriptorSetAllocateInfo::descriptorSetCount (%d) != " |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 3342 | "VkDescriptorSetVariableDescriptorCountAllocateInfo::descriptorSetCount (%d)", |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 3343 | p_alloc_info->descriptorSetCount, count_allocate_info->descriptorSetCount); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 3344 | } |
| 3345 | if (count_allocate_info->descriptorSetCount == p_alloc_info->descriptorSetCount) { |
| 3346 | for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) { |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 3347 | auto layout = Get<cvdescriptorset::DescriptorSetLayout>(p_alloc_info->pSetLayouts[i]); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 3348 | if (count_allocate_info->pDescriptorCounts[i] > layout->GetDescriptorCountFromBinding(layout->GetMaxBinding())) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 3349 | skip |= LogError(device, "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfo-pSetLayouts-03046", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 3350 | "vkAllocateDescriptorSets(): pDescriptorCounts[%d] = (%d), binding's descriptorCount = (%d)", |
| 3351 | i, count_allocate_info->pDescriptorCounts[i], |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 3352 | layout->GetDescriptorCountFromBinding(layout->GetMaxBinding())); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 3353 | } |
| 3354 | } |
| 3355 | } |
| 3356 | } |
| 3357 | |
Mark Lobodzinski | bdc3b02 | 2017-04-24 09:11:35 -0600 | [diff] [blame] | 3358 | return skip; |
Tobin Ehlis | ee47146 | 2016-05-26 11:21:59 -0600 | [diff] [blame] | 3359 | } |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 3360 | |
Jeff Bolz | dd4cfa1 | 2019-08-11 20:57:51 -0500 | [diff] [blame] | 3361 | const BindingReqMap &cvdescriptorset::PrefilterBindRequestMap::FilteredMap(const CMD_BUFFER_STATE &cb_state, |
| 3362 | const PIPELINE_STATE &pipeline) { |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 3363 | if (IsManyDescriptors()) { |
Karl Schultz | 7090a05 | 2020-11-10 08:54:21 -0700 | [diff] [blame] | 3364 | filtered_map_.reset(new BindingReqMap); |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 3365 | descriptor_set_.FilterBindingReqs(cb_state, pipeline, orig_map_, filtered_map_.get()); |
| 3366 | return *filtered_map_; |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 3367 | } |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 3368 | return orig_map_; |
Artem Kharytoniuk | 2456f99 | 2018-01-12 14:17:41 +0100 | [diff] [blame] | 3369 | } |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 3370 | |
| 3371 | // Starting at offset descriptor of given binding, parse over update_count |
| 3372 | // descriptor updates and verify that for any binding boundaries that are crossed, the next binding(s) are all consistent |
| 3373 | // Consistency means that their type, stage flags, and whether or not they use immutable samplers matches |
| 3374 | // If so, return true. If not, fill in error_msg and return false |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3375 | bool cvdescriptorset::VerifyUpdateConsistency(debug_report_data *report_data, |
| 3376 | DescriptorSetLayout::ConstBindingIterator current_binding, uint32_t offset, |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 3377 | uint32_t update_count, const char *type, const VkDescriptorSet set, |
| 3378 | std::string *error_msg) { |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 3379 | bool pass = true; |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 3380 | // Verify consecutive bindings match (if needed) |
| 3381 | auto orig_binding = current_binding; |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 3382 | |
| 3383 | while (pass && update_count) { |
| 3384 | // First, it's legal to offset beyond your own binding so handle that case |
| 3385 | if (offset > 0) { |
| 3386 | const auto &index_range = current_binding.GetGlobalIndexRange(); |
| 3387 | // index_range.start + offset is which descriptor is needed to update. If it > index_range.end, it means the descriptor |
| 3388 | // isn't in this binding, maybe in next binding. |
| 3389 | if ((index_range.start + offset) >= index_range.end) { |
| 3390 | // Advance to next binding, decrement offset by binding size |
| 3391 | offset -= current_binding.GetDescriptorCount(); |
| 3392 | ++current_binding; |
| 3393 | // Verify next consecutive binding matches type, stage flags & immutable sampler use and if AtEnd |
| 3394 | if (!orig_binding.IsConsistent(current_binding)) { |
| 3395 | pass = false; |
| 3396 | } |
| 3397 | continue; |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 3398 | } |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 3399 | } |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 3400 | |
| 3401 | update_count -= std::min(update_count, current_binding.GetDescriptorCount() - offset); |
| 3402 | if (update_count) { |
| 3403 | // Starting offset is beyond the current binding. Check consistency, update counters and advance to the next binding, |
| 3404 | // looking for the start point. All bindings (even those skipped) must be consistent with the update and with the |
| 3405 | // original binding. |
| 3406 | offset = 0; |
| 3407 | ++current_binding; |
| 3408 | // Verify next consecutive binding matches type, stage flags & immutable sampler use and if AtEnd |
| 3409 | if (!orig_binding.IsConsistent(current_binding)) { |
| 3410 | pass = false; |
| 3411 | } |
| 3412 | } |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 3413 | } |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 3414 | |
| 3415 | if (!pass) { |
| 3416 | std::stringstream error_str; |
| 3417 | error_str << "Attempting " << type; |
| 3418 | if (current_binding.Layout()->IsPushDescriptor()) { |
| 3419 | error_str << " push descriptors"; |
| 3420 | } else { |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3421 | error_str << " descriptor set " << report_data->FormatHandle(set); |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 3422 | } |
| 3423 | error_str << " binding #" << orig_binding.Binding() << " with #" << update_count |
| 3424 | << " descriptors being updated but this update oversteps the bounds of this binding and the next binding is " |
sfricke-samsung | 5de3488 | 2021-04-15 22:33:23 -0700 | [diff] [blame] | 3425 | "not consistent with current binding"; |
| 3426 | |
| 3427 | // Get what was not consistent in IsConsistent() as a more detailed error message |
| 3428 | const auto *binding_ci = orig_binding.GetDescriptorSetLayoutBindingPtr(); |
| 3429 | const auto *other_binding_ci = current_binding.GetDescriptorSetLayoutBindingPtr(); |
| 3430 | if (binding_ci == nullptr || other_binding_ci == nullptr) { |
| 3431 | error_str << " (No two valid DescriptorSetLayoutBinding to compare)"; |
| 3432 | } else if (binding_ci->descriptorType != other_binding_ci->descriptorType) { |
| 3433 | error_str << " (" << string_VkDescriptorType(binding_ci->descriptorType) |
| 3434 | << " != " << string_VkDescriptorType(other_binding_ci->descriptorType) << ")"; |
| 3435 | } else if (binding_ci->stageFlags != other_binding_ci->stageFlags) { |
| 3436 | error_str << " (" << string_VkShaderStageFlags(binding_ci->stageFlags) |
| 3437 | << " != " << string_VkShaderStageFlags(other_binding_ci->stageFlags) << ")"; |
| 3438 | } else if (!hash_util::similar_for_nullity(binding_ci->pImmutableSamplers, other_binding_ci->pImmutableSamplers)) { |
| 3439 | error_str << " (pImmutableSamplers don't match)"; |
| 3440 | } else if (orig_binding.GetDescriptorBindingFlags() != current_binding.GetDescriptorBindingFlags()) { |
| 3441 | error_str << " (" << string_VkDescriptorBindingFlags(orig_binding.GetDescriptorBindingFlags()) |
| 3442 | << " != " << string_VkDescriptorBindingFlags(current_binding.GetDescriptorBindingFlags()) << ")"; |
| 3443 | } |
| 3444 | |
| 3445 | error_str << " so this update is invalid"; |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 3446 | *error_msg = error_str.str(); |
| 3447 | } |
| 3448 | return pass; |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 3449 | } |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3450 | |
| 3451 | // Validate the state for a given write update but don't actually perform the update |
| 3452 | // If an error would occur for this update, return false and fill in details in error_msg string |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3453 | bool CoreChecks::ValidateWriteUpdate(const DescriptorSet *dest_set, const VkWriteDescriptorSet *update, const char *func_name, |
ziga-lunarg | 6c46b24 | 2021-09-13 18:33:37 +0200 | [diff] [blame] | 3454 | std::string *error_code, std::string *error_msg, bool push) const { |
Jeff Bolz | 6aad174 | 2019-10-16 11:10:09 -0500 | [diff] [blame] | 3455 | const auto dest_layout = dest_set->GetLayout().get(); |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3456 | |
| 3457 | // Verify dst layout still valid |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 3458 | if (dest_layout->Destroyed()) { |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3459 | *error_code = "VUID-VkWriteDescriptorSet-dstSet-00320"; |
Mark Lobodzinski | 23e395e | 2020-04-09 10:17:31 -0600 | [diff] [blame] | 3460 | std::ostringstream str; |
| 3461 | str << "Cannot call " << func_name << " to perform write update on " << dest_set->StringifySetAndLayout() |
| 3462 | << " which has been destroyed"; |
| 3463 | *error_msg = str.str(); |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3464 | return false; |
| 3465 | } |
| 3466 | // Verify dst binding exists |
| 3467 | if (!dest_layout->HasBinding(update->dstBinding)) { |
| 3468 | *error_code = "VUID-VkWriteDescriptorSet-dstBinding-00315"; |
| 3469 | std::stringstream error_str; |
| 3470 | error_str << dest_set->StringifySetAndLayout() << " does not have binding " << update->dstBinding; |
| 3471 | *error_msg = error_str.str(); |
| 3472 | return false; |
| 3473 | } |
| 3474 | |
Jeff Bolz | 6aad174 | 2019-10-16 11:10:09 -0500 | [diff] [blame] | 3475 | DescriptorSetLayout::ConstBindingIterator dest(dest_layout, update->dstBinding); |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3476 | // Make sure binding isn't empty |
| 3477 | if (0 == dest.GetDescriptorCount()) { |
| 3478 | *error_code = "VUID-VkWriteDescriptorSet-dstBinding-00316"; |
| 3479 | std::stringstream error_str; |
| 3480 | error_str << dest_set->StringifySetAndLayout() << " cannot updated binding " << update->dstBinding |
| 3481 | << " that has 0 descriptors"; |
| 3482 | *error_msg = error_str.str(); |
| 3483 | return false; |
| 3484 | } |
| 3485 | |
| 3486 | // Verify idle ds |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 3487 | if (dest_set->InUse() && !(dest.GetDescriptorBindingFlags() & (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT | |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 3488 | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT))) { |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3489 | // TODO : Re-using Free Idle error code, need write update idle error code |
| 3490 | *error_code = "VUID-vkFreeDescriptorSets-pDescriptorSets-00309"; |
| 3491 | std::stringstream error_str; |
| 3492 | error_str << "Cannot call " << func_name << " to perform write update on " << dest_set->StringifySetAndLayout() |
| 3493 | << " that is in use by a command buffer"; |
| 3494 | *error_msg = error_str.str(); |
| 3495 | return false; |
| 3496 | } |
| 3497 | // We know that binding is valid, verify update and do update on each descriptor |
| 3498 | auto start_idx = dest.GetGlobalIndexRange().start + update->dstArrayElement; |
| 3499 | auto type = dest.GetType(); |
Tony-LunarG | f563b36 | 2021-03-18 16:13:18 -0600 | [diff] [blame] | 3500 | if ((type != VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) && (type != update->descriptorType)) { |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3501 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00319"; |
| 3502 | std::stringstream error_str; |
| 3503 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding |
| 3504 | << " with type " << string_VkDescriptorType(type) << " but update type is " |
| 3505 | << string_VkDescriptorType(update->descriptorType); |
| 3506 | *error_msg = error_str.str(); |
| 3507 | return false; |
| 3508 | } |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3509 | if (type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) { |
| 3510 | if ((update->dstArrayElement % 4) != 0) { |
| 3511 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02219"; |
| 3512 | std::stringstream error_str; |
| 3513 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding |
| 3514 | << " with " |
| 3515 | << "dstArrayElement " << update->dstArrayElement << " not a multiple of 4"; |
| 3516 | *error_msg = error_str.str(); |
| 3517 | return false; |
| 3518 | } |
| 3519 | if ((update->descriptorCount % 4) != 0) { |
| 3520 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02220"; |
| 3521 | std::stringstream error_str; |
| 3522 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding |
| 3523 | << " with " |
| 3524 | << "descriptorCount " << update->descriptorCount << " not a multiple of 4"; |
| 3525 | *error_msg = error_str.str(); |
| 3526 | return false; |
| 3527 | } |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 3528 | const auto *write_inline_info = LvlFindInChain<VkWriteDescriptorSetInlineUniformBlockEXT>(update->pNext); |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3529 | if (!write_inline_info || write_inline_info->dataSize != update->descriptorCount) { |
| 3530 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02221"; |
| 3531 | std::stringstream error_str; |
| 3532 | if (!write_inline_info) { |
| 3533 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" |
| 3534 | << update->dstBinding << " with " |
| 3535 | << "VkWriteDescriptorSetInlineUniformBlockEXT missing"; |
| 3536 | } else { |
| 3537 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" |
| 3538 | << update->dstBinding << " with " |
| 3539 | << "VkWriteDescriptorSetInlineUniformBlockEXT dataSize " << write_inline_info->dataSize |
| 3540 | << " not equal to " |
| 3541 | << "VkWriteDescriptorSet descriptorCount " << update->descriptorCount; |
| 3542 | } |
| 3543 | *error_msg = error_str.str(); |
| 3544 | return false; |
| 3545 | } |
| 3546 | // This error is probably unreachable due to the previous two errors |
| 3547 | if (write_inline_info && (write_inline_info->dataSize % 4) != 0) { |
| 3548 | *error_code = "VUID-VkWriteDescriptorSetInlineUniformBlockEXT-dataSize-02222"; |
| 3549 | std::stringstream error_str; |
| 3550 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding |
| 3551 | << " with " |
| 3552 | << "VkWriteDescriptorSetInlineUniformBlockEXT dataSize " << write_inline_info->dataSize |
| 3553 | << " not a multiple of 4"; |
| 3554 | *error_msg = error_str.str(); |
| 3555 | return false; |
| 3556 | } |
| 3557 | } |
sfricke-samsung | 941d48b | 2020-02-10 00:20:01 -0800 | [diff] [blame] | 3558 | // Verify all bindings update share identical properties across all items |
| 3559 | if (update->descriptorCount > 0) { |
| 3560 | // Save first binding information and error if something different is found |
| 3561 | DescriptorSetLayout::ConstBindingIterator current_binding(dest_layout, update->dstBinding); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 3562 | VkShaderStageFlags stage_flags = current_binding.GetStageFlags(); |
| 3563 | VkDescriptorType descriptor_type = current_binding.GetType(); |
| 3564 | bool immutable_samplers = (current_binding.GetImmutableSamplerPtr() == nullptr); |
| 3565 | uint32_t dst_array_element = update->dstArrayElement; |
sfricke-samsung | 941d48b | 2020-02-10 00:20:01 -0800 | [diff] [blame] | 3566 | |
Jeff Bolz | 9198e88 | 2020-03-18 13:03:30 -0500 | [diff] [blame] | 3567 | for (uint32_t i = 0; i < update->descriptorCount;) { |
sfricke-samsung | 941d48b | 2020-02-10 00:20:01 -0800 | [diff] [blame] | 3568 | if (current_binding.AtEnd() == true) { |
| 3569 | break; // prevents setting error here if bindings don't exist |
| 3570 | } |
| 3571 | |
Quentin Huot-Marchand | 98d84dd | 2021-06-24 09:54:58 +0200 | [diff] [blame] | 3572 | // All consecutive bindings updated, except those with a descriptorCount of zero, must have identical descType and stageFlags |
| 3573 | if(current_binding.GetDescriptorCount() > 0) { |
| 3574 | // Check for consistent stageFlags and descriptorType |
| 3575 | if ((current_binding.GetStageFlags() != stage_flags) || (current_binding.GetType() != descriptor_type)) { |
| 3576 | *error_code = "VUID-VkWriteDescriptorSet-descriptorCount-00317"; |
| 3577 | std::stringstream error_str; |
| 3578 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding index #" |
| 3579 | << current_binding.GetIndex() << " (" << i << " from dstBinding offset)" |
| 3580 | << " with a different stageFlag and/or descriptorType from previous bindings." |
| 3581 | << " All bindings must have consecutive stageFlag and/or descriptorType across a VkWriteDescriptorSet"; |
| 3582 | *error_msg = error_str.str(); |
| 3583 | return false; |
| 3584 | } |
| 3585 | // Check if all immutableSamplers or not |
| 3586 | if ((current_binding.GetImmutableSamplerPtr() == nullptr) != immutable_samplers) { |
| 3587 | *error_code = "VUID-VkWriteDescriptorSet-descriptorCount-00318"; |
| 3588 | std::stringstream error_str; |
| 3589 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding index #" |
| 3590 | << current_binding.GetIndex() << " (" << i << " from dstBinding offset)" |
| 3591 | << " with a different usage of immutable samplers from previous bindings." |
| 3592 | << " All bindings must have all or none usage of immutable samplers across a VkWriteDescriptorSet"; |
| 3593 | *error_msg = error_str.str(); |
| 3594 | return false; |
| 3595 | } |
sfricke-samsung | 941d48b | 2020-02-10 00:20:01 -0800 | [diff] [blame] | 3596 | } |
Jeff Bolz | 9198e88 | 2020-03-18 13:03:30 -0500 | [diff] [blame] | 3597 | |
| 3598 | // Skip the remaining descriptors for this binding, and move to the next binding |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 3599 | i += (current_binding.GetDescriptorCount() - dst_array_element); |
| 3600 | dst_array_element = 0; |
sfricke-samsung | 941d48b | 2020-02-10 00:20:01 -0800 | [diff] [blame] | 3601 | ++current_binding; |
| 3602 | } |
| 3603 | } |
| 3604 | |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3605 | // Verify consecutive bindings match (if needed) |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3606 | if (!VerifyUpdateConsistency(report_data, DescriptorSetLayout::ConstBindingIterator(dest_layout, update->dstBinding), |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3607 | update->dstArrayElement, update->descriptorCount, "write update to", dest_set->GetSet(), |
| 3608 | error_msg)) { |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3609 | *error_code = "VUID-VkWriteDescriptorSet-dstArrayElement-00321"; |
| 3610 | return false; |
| 3611 | } |
Tony-LunarG | 1f79c95 | 2020-10-27 15:55:51 -0600 | [diff] [blame] | 3612 | // Verify write to variable descriptor |
| 3613 | if (dest_set->IsVariableDescriptorCount(update->dstBinding)) { |
| 3614 | if ((update->dstArrayElement + update->descriptorCount) > dest_set->GetVariableDescriptorCount()) { |
| 3615 | std::stringstream error_str; |
| 3616 | *error_code = "VUID-VkWriteDescriptorSet-dstArrayElement-00321"; |
| 3617 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding index #" |
| 3618 | << update->dstBinding << " array element " << update->dstArrayElement << " with " << update->descriptorCount |
| 3619 | << " writes but variable descriptor size is " << dest_set->GetVariableDescriptorCount(); |
| 3620 | *error_msg = error_str.str(); |
| 3621 | return false; |
| 3622 | } |
| 3623 | } |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3624 | // Update is within bounds and consistent so last step is to validate update contents |
ziga-lunarg | 6c46b24 | 2021-09-13 18:33:37 +0200 | [diff] [blame] | 3625 | if (!VerifyWriteUpdateContents(dest_set, update, start_idx, func_name, error_code, error_msg, push)) { |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3626 | std::stringstream error_str; |
| 3627 | error_str << "Write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding |
| 3628 | << " failed with error message: " << error_msg->c_str(); |
| 3629 | *error_msg = error_str.str(); |
| 3630 | return false; |
| 3631 | } |
ziga-lunarg | d67b5f5 | 2021-10-16 23:52:59 +0200 | [diff] [blame] | 3632 | const auto orig_binding = DescriptorSetLayout::ConstBindingIterator(dest_set->GetLayout().get(), update->dstBinding); |
| 3633 | if (!orig_binding.AtEnd() && orig_binding.GetType() == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) { |
| 3634 | // Check if the new descriptor descriptor type is in the list of allowed mutable types for this binding |
| 3635 | if (!orig_binding.Layout()->IsTypeMutable(update->descriptorType, update->dstBinding)) { |
| 3636 | *error_code = "VUID-VkWriteDescriptorSet-dstSet-04611"; |
| 3637 | std::stringstream error_str; |
| 3638 | error_str << "Write update type is " << string_VkDescriptorType(update->descriptorType) |
| 3639 | << ", but descriptor set layout binding was created with type VK_DESCRIPTOR_TYPE_MUTABLE_VALVE and used type " |
| 3640 | "is not in VkMutableDescriptorTypeListVALVE::pDescriptorTypes for this binding."; |
| 3641 | *error_msg = error_str.str(); |
| 3642 | return false; |
| 3643 | } |
| 3644 | } |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3645 | // All checks passed, update is clean |
| 3646 | return true; |
| 3647 | } |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3648 | |
| 3649 | // Verify that the contents of the update are ok, but don't perform actual update |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3650 | bool CoreChecks::VerifyWriteUpdateContents(const DescriptorSet *dest_set, const VkWriteDescriptorSet *update, const uint32_t index, |
ziga-lunarg | 6c46b24 | 2021-09-13 18:33:37 +0200 | [diff] [blame] | 3651 | const char *func_name, std::string *error_code, std::string *error_msg, |
| 3652 | bool push) const { |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3653 | using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor; |
Nathaniel Cesario | ff52151 | 2020-12-11 16:00:26 -0700 | [diff] [blame] | 3654 | using Descriptor = cvdescriptorset::Descriptor; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3655 | |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3656 | switch (update->descriptorType) { |
| 3657 | case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: { |
| 3658 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
| 3659 | // Validate image |
| 3660 | auto image_view = update->pImageInfo[di].imageView; |
| 3661 | auto image_layout = update->pImageInfo[di].imageLayout; |
Mark Lobodzinski | 3ca937b | 2020-02-14 14:56:06 -0700 | [diff] [blame] | 3662 | auto sampler = update->pImageInfo[di].sampler; |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 3663 | auto iv_state = Get<IMAGE_VIEW_STATE>(image_view); |
Nathaniel Cesario | ff52151 | 2020-12-11 16:00:26 -0700 | [diff] [blame] | 3664 | const ImageSamplerDescriptor *desc = |
| 3665 | (const ImageSamplerDescriptor *)dest_set->GetDescriptorFromGlobalIndex(index + di); |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3666 | if (image_view) { |
| 3667 | auto image_state = iv_state->image_state.get(); |
| 3668 | if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, func_name, error_code, error_msg)) { |
| 3669 | std::stringstream error_str; |
| 3670 | error_str << "Attempted write update to combined image sampler descriptor failed due to: " |
| 3671 | << error_msg->c_str(); |
| 3672 | *error_msg = error_str.str(); |
| 3673 | return false; |
| 3674 | } |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 3675 | if (IsExtEnabled(device_extensions.vk_khr_sampler_ycbcr_conversion)) { |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3676 | if (desc->IsImmutableSampler()) { |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 3677 | auto sampler_state = Get<SAMPLER_STATE>(desc->GetSampler()); |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3678 | if (iv_state && sampler_state) { |
| 3679 | if (iv_state->samplerConversion != sampler_state->samplerConversion) { |
| 3680 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-01948"; |
| 3681 | std::stringstream error_str; |
| 3682 | error_str |
| 3683 | << "Attempted write update to combined image sampler and image view and sampler ycbcr " |
| 3684 | "conversions are not identical, sampler: " |
| 3685 | << report_data->FormatHandle(desc->GetSampler()) |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3686 | << " image view: " << report_data->FormatHandle(iv_state->image_view()) << "."; |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3687 | *error_msg = error_str.str(); |
| 3688 | return false; |
| 3689 | } |
| 3690 | } |
| 3691 | } else { |
| 3692 | if (iv_state && (iv_state->samplerConversion != VK_NULL_HANDLE)) { |
| 3693 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02738"; |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3694 | std::stringstream error_str; |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3695 | error_str << "Because dstSet (" << report_data->FormatHandle(update->dstSet) |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3696 | << ") is bound to image view (" << report_data->FormatHandle(iv_state->image_view()) |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3697 | << ") that includes a YCBCR conversion, it must have been allocated with a layout that " |
| 3698 | "includes an immutable sampler."; |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3699 | *error_msg = error_str.str(); |
| 3700 | return false; |
| 3701 | } |
| 3702 | } |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3703 | } |
John Bauman | da8abff | 2020-10-19 21:25:21 +0000 | [diff] [blame] | 3704 | // If there is an immutable sampler then |sampler| isn't used, so the following VU does not apply. |
| 3705 | if (sampler && !desc->IsImmutableSampler() && FormatIsMultiplane(image_state->createInfo.format)) { |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3706 | // multiplane formats must be created with mutable format bit |
| 3707 | if (0 == (image_state->createInfo.flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT)) { |
| 3708 | *error_code = "VUID-VkDescriptorImageInfo-sampler-01564"; |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3709 | std::stringstream error_str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3710 | error_str << "image " << report_data->FormatHandle(image_state->image()) |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3711 | << " combined image sampler is a multi-planar " |
| 3712 | << "format and was not was not created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT"; |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3713 | *error_msg = error_str.str(); |
| 3714 | return false; |
| 3715 | } |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3716 | // image view need aspect mask for only the planes supported of format |
| 3717 | VkImageAspectFlags legal_aspect_flags = (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT); |
| 3718 | legal_aspect_flags |= |
| 3719 | (FormatPlaneCount(image_state->createInfo.format) == 3) ? VK_IMAGE_ASPECT_PLANE_2_BIT : 0; |
| 3720 | if (0 != (iv_state->create_info.subresourceRange.aspectMask & (~legal_aspect_flags))) { |
| 3721 | *error_code = "VUID-VkDescriptorImageInfo-sampler-01564"; |
| 3722 | std::stringstream error_str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3723 | error_str << "image " << report_data->FormatHandle(image_state->image()) |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3724 | << " combined image sampler is a multi-planar " |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3725 | << "format and " << report_data->FormatHandle(iv_state->image_view()) |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3726 | << " aspectMask must only include " << string_VkImageAspectFlags(legal_aspect_flags); |
| 3727 | *error_msg = error_str.str(); |
| 3728 | return false; |
| 3729 | } |
sfricke-samsung | 27e5d5a | 2020-01-07 21:07:08 -0800 | [diff] [blame] | 3730 | } |
Nathaniel Cesario | 23afadd | 2020-11-17 12:51:45 -0700 | [diff] [blame] | 3731 | |
| 3732 | // Verify portability |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 3733 | auto sampler_state = Get<SAMPLER_STATE>(sampler); |
Nathaniel Cesario | 23afadd | 2020-11-17 12:51:45 -0700 | [diff] [blame] | 3734 | if (sampler_state) { |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 3735 | if (IsExtEnabled(device_extensions.vk_khr_portability_subset)) { |
Nathaniel Cesario | 23afadd | 2020-11-17 12:51:45 -0700 | [diff] [blame] | 3736 | if ((VK_FALSE == enabled_features.portability_subset_features.mutableComparisonSamplers) && |
| 3737 | (VK_FALSE != sampler_state->createInfo.compareEnable)) { |
| 3738 | LogError(device, "VUID-VkDescriptorImageInfo-mutableComparisonSamplers-04450", |
| 3739 | "%s (portability error): sampler comparison not available.", func_name); |
| 3740 | } |
| 3741 | } |
| 3742 | } |
sfricke-samsung | 27e5d5a | 2020-01-07 21:07:08 -0800 | [diff] [blame] | 3743 | } |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3744 | } |
| 3745 | } |
Mark Lobodzinski | ac72777 | 2020-01-08 10:47:30 -0700 | [diff] [blame] | 3746 | // Fall through |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3747 | case VK_DESCRIPTOR_TYPE_SAMPLER: { |
| 3748 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 3749 | const auto *desc = static_cast<const Descriptor *>(dest_set->GetDescriptorFromGlobalIndex(index + di)); |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3750 | if (!desc->IsImmutableSampler()) { |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3751 | if (!ValidateSampler(update->pImageInfo[di].sampler)) { |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3752 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00325"; |
| 3753 | std::stringstream error_str; |
| 3754 | error_str << "Attempted write update to sampler descriptor with invalid sampler: " |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3755 | << report_data->FormatHandle(update->pImageInfo[di].sampler) << "."; |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3756 | *error_msg = error_str.str(); |
| 3757 | return false; |
| 3758 | } |
ziga-lunarg | 6c46b24 | 2021-09-13 18:33:37 +0200 | [diff] [blame] | 3759 | } else if (update->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER && !push) { |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 3760 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02752"; |
| 3761 | std::stringstream error_str; |
| 3762 | error_str << "Attempted write update to an immutable sampler descriptor."; |
| 3763 | *error_msg = error_str.str(); |
| 3764 | return false; |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3765 | } |
| 3766 | } |
| 3767 | break; |
| 3768 | } |
| 3769 | case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: |
| 3770 | case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: |
| 3771 | case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: { |
| 3772 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
| 3773 | auto image_view = update->pImageInfo[di].imageView; |
| 3774 | auto image_layout = update->pImageInfo[di].imageLayout; |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3775 | if (image_view) { |
| 3776 | if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, func_name, error_code, error_msg)) { |
| 3777 | std::stringstream error_str; |
| 3778 | error_str << "Attempted write update to image descriptor failed due to: " << error_msg->c_str(); |
| 3779 | *error_msg = error_str.str(); |
| 3780 | return false; |
| 3781 | } |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3782 | } |
| 3783 | } |
| 3784 | break; |
| 3785 | } |
| 3786 | case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: |
| 3787 | case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: { |
| 3788 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
| 3789 | auto buffer_view = update->pTexelBufferView[di]; |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3790 | if (buffer_view) { |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 3791 | auto bv_state = Get<BUFFER_VIEW_STATE>(buffer_view); |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3792 | if (!bv_state) { |
| 3793 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02994"; |
| 3794 | std::stringstream error_str; |
| 3795 | error_str << "Attempted write update to texel buffer descriptor with invalid buffer view: " |
| 3796 | << report_data->FormatHandle(buffer_view); |
| 3797 | *error_msg = error_str.str(); |
| 3798 | return false; |
| 3799 | } |
| 3800 | auto buffer = bv_state->create_info.buffer; |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 3801 | auto buffer_state = Get<BUFFER_STATE>(buffer); |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3802 | // Verify that buffer underlying the view hasn't been destroyed prematurely |
| 3803 | if (!buffer_state) { |
| 3804 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02994"; |
| 3805 | std::stringstream error_str; |
| 3806 | error_str << "Attempted write update to texel buffer descriptor failed because underlying buffer (" |
| 3807 | << report_data->FormatHandle(buffer) << ") has been destroyed: " << error_msg->c_str(); |
| 3808 | *error_msg = error_str.str(); |
| 3809 | return false; |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame^] | 3810 | } else if (!cvdescriptorset::ValidateBufferUsage(report_data, buffer_state.get(), update->descriptorType, |
| 3811 | error_code, error_msg)) { |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3812 | std::stringstream error_str; |
| 3813 | error_str << "Attempted write update to texel buffer descriptor failed due to: " << error_msg->c_str(); |
| 3814 | *error_msg = error_str.str(); |
| 3815 | return false; |
| 3816 | } |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3817 | } |
| 3818 | } |
| 3819 | break; |
| 3820 | } |
| 3821 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: |
| 3822 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: |
| 3823 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: |
| 3824 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: { |
| 3825 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3826 | if (update->pBufferInfo[di].buffer) { |
| 3827 | if (!ValidateBufferUpdate(update->pBufferInfo + di, update->descriptorType, func_name, error_code, error_msg)) { |
| 3828 | std::stringstream error_str; |
| 3829 | error_str << "Attempted write update to buffer descriptor failed due to: " << error_msg->c_str(); |
| 3830 | *error_msg = error_str.str(); |
| 3831 | return false; |
| 3832 | } |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3833 | } |
| 3834 | } |
| 3835 | break; |
| 3836 | } |
| 3837 | case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: |
| 3838 | break; |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 3839 | case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV: { |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 3840 | const auto *acc_info = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureNV>(update->pNext); |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 3841 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame^] | 3842 | auto as_state = Get<ACCELERATION_STRUCTURE_STATE>(acc_info->pAccelerationStructures[di]); |
| 3843 | if (!ValidateAccelerationStructureUpdate(as_state.get(), func_name, error_code, error_msg)) { |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 3844 | std::stringstream error_str; |
| 3845 | error_str << "Attempted write update to acceleration structure descriptor failed due to: " |
| 3846 | << error_msg->c_str(); |
| 3847 | *error_msg = error_str.str(); |
| 3848 | return false; |
| 3849 | } |
| 3850 | } |
| 3851 | |
| 3852 | } break; |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 3853 | // KHR acceleration structures don't require memory to be bound manually to them. |
| 3854 | case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: |
| 3855 | break; |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3856 | default: |
| 3857 | assert(0); // We've already verified update type so should never get here |
| 3858 | break; |
| 3859 | } |
| 3860 | // All checks passed so update contents are good |
| 3861 | return true; |
| 3862 | } |