Jeremy Gebben | 0d9143e | 2022-01-01 12:29:36 -0700 | [diff] [blame] | 1 | /* Copyright (c) 2015-2022 The Khronos Group Inc. |
| 2 | * Copyright (c) 2015-2022 Valve Corporation |
| 3 | * Copyright (c) 2015-2022 LunarG, Inc. |
| 4 | * Copyright (C) 2015-2022 Google Inc. |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 5 | * |
| 6 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 7 | * you may not use this file except in compliance with the License. |
| 8 | * You may obtain a copy of the License at |
| 9 | * |
| 10 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 11 | * |
| 12 | * Unless required by applicable law or agreed to in writing, software |
| 13 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 15 | * See the License for the specific language governing permissions and |
| 16 | * limitations under the License. |
| 17 | * |
| 18 | * Author: Tobin Ehlis <tobine@google.com> |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 19 | * John Zulauf <jzulauf@lunarg.com> |
Jeremy Kniager | e682743 | 2020-04-01 09:05:56 -0600 | [diff] [blame] | 20 | * Jeremy Kniager <jeremyk@lunarg.com> |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 21 | */ |
| 22 | |
Mark Lobodzinski | b56bbb9 | 2019-02-18 11:49:59 -0700 | [diff] [blame] | 23 | #include "chassis.h" |
Mark Lobodzinski | 76d7666 | 2019-02-14 14:38:21 -0700 | [diff] [blame] | 24 | #include "core_validation_error_enums.h" |
| 25 | #include "core_validation.h" |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 26 | #include "descriptor_sets.h" |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 27 | #include "hash_vk_types.h" |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 28 | #include "vk_enum_string_helper.h" |
| 29 | #include "vk_safe_struct.h" |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 30 | #include "vk_typemap_helper.h" |
Tobin Ehlis | c826645 | 2017-04-07 12:20:30 -0600 | [diff] [blame] | 31 | #include "buffer_validation.h" |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 32 | #include <sstream> |
Mark Lobodzinski | 2eee5d8 | 2016-12-02 15:33:18 -0700 | [diff] [blame] | 33 | #include <algorithm> |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 34 | #include <array> |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 35 | #include <memory> |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 36 | |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 37 | static DESCRIPTOR_POOL_STATE::TypeCountMap GetMaxTypeCounts(const VkDescriptorPoolCreateInfo *create_info) { |
| 38 | DESCRIPTOR_POOL_STATE::TypeCountMap counts; |
| 39 | // Collect maximums per descriptor type. |
| 40 | for (uint32_t i = 0; i < create_info->poolSizeCount; ++i) { |
| 41 | const auto &pool_size = create_info->pPoolSizes[i]; |
| 42 | uint32_t type = static_cast<uint32_t>(pool_size.type); |
| 43 | // Same descriptor types can appear several times |
| 44 | counts[type] += pool_size.descriptorCount; |
| 45 | } |
| 46 | return counts; |
| 47 | } |
| 48 | |
| 49 | DESCRIPTOR_POOL_STATE::DESCRIPTOR_POOL_STATE(ValidationStateTracker *dev, const VkDescriptorPool pool, |
| 50 | const VkDescriptorPoolCreateInfo *pCreateInfo) |
| 51 | : BASE_NODE(pool, kVulkanObjectTypeDescriptorPool), |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 52 | maxSets(pCreateInfo->maxSets), |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 53 | createInfo(pCreateInfo), |
| 54 | maxDescriptorTypeCount(GetMaxTypeCounts(pCreateInfo)), |
Jeremy Gebben | 0d9143e | 2022-01-01 12:29:36 -0700 | [diff] [blame] | 55 | available_sets_(pCreateInfo->maxSets), |
| 56 | available_counts_(maxDescriptorTypeCount), |
| 57 | dev_data_(dev) {} |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 58 | |
| 59 | void DESCRIPTOR_POOL_STATE::Allocate(const VkDescriptorSetAllocateInfo *alloc_info, const VkDescriptorSet *descriptor_sets, |
| 60 | const cvdescriptorset::AllocateDescriptorSetsData *ds_data) { |
Jeremy Gebben | 0d9143e | 2022-01-01 12:29:36 -0700 | [diff] [blame] | 61 | auto guard = WriteLock(); |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 62 | // Account for sets and individual descriptors allocated from pool |
Jeremy Gebben | 0d9143e | 2022-01-01 12:29:36 -0700 | [diff] [blame] | 63 | available_sets_ -= alloc_info->descriptorSetCount; |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 64 | for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) { |
Jeremy Gebben | 0d9143e | 2022-01-01 12:29:36 -0700 | [diff] [blame] | 65 | available_counts_[it->first] -= ds_data->required_descriptors_by_type.at(it->first); |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 66 | } |
| 67 | |
| 68 | const auto *variable_count_info = LvlFindInChain<VkDescriptorSetVariableDescriptorCountAllocateInfo>(alloc_info->pNext); |
| 69 | bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == alloc_info->descriptorSetCount; |
| 70 | |
| 71 | // Create tracking object for each descriptor set; insert into global map and the pool's set. |
| 72 | for (uint32_t i = 0; i < alloc_info->descriptorSetCount; i++) { |
| 73 | uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0; |
| 74 | |
| 75 | auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], this, ds_data->layout_nodes[i], |
Jeremy Gebben | 0d9143e | 2022-01-01 12:29:36 -0700 | [diff] [blame] | 76 | variable_count, dev_data_); |
| 77 | sets_.emplace(descriptor_sets[i], new_ds.get()); |
| 78 | dev_data_->Add(std::move(new_ds)); |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 79 | } |
| 80 | } |
| 81 | |
| 82 | void DESCRIPTOR_POOL_STATE::Free(uint32_t count, const VkDescriptorSet *descriptor_sets) { |
Jeremy Gebben | 0d9143e | 2022-01-01 12:29:36 -0700 | [diff] [blame] | 83 | auto guard = WriteLock(); |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 84 | // Update available descriptor sets in pool |
Jeremy Gebben | 0d9143e | 2022-01-01 12:29:36 -0700 | [diff] [blame] | 85 | available_sets_ += count; |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 86 | |
Jeremy Gebben | 082a983 | 2021-10-28 13:40:11 -0600 | [diff] [blame] | 87 | // For each freed descriptor add its resources back into the pool as available and remove from pool and device data |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 88 | for (uint32_t i = 0; i < count; ++i) { |
| 89 | if (descriptor_sets[i] != VK_NULL_HANDLE) { |
Jeremy Gebben | 0d9143e | 2022-01-01 12:29:36 -0700 | [diff] [blame] | 90 | auto iter = sets_.find(descriptor_sets[i]); |
| 91 | assert(iter != sets_.end()); |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 92 | auto *set_state = iter->second; |
| 93 | uint32_t type_index = 0, descriptor_count = 0; |
| 94 | for (uint32_t j = 0; j < set_state->GetBindingCount(); ++j) { |
| 95 | type_index = static_cast<uint32_t>(set_state->GetTypeFromIndex(j)); |
| 96 | descriptor_count = set_state->GetDescriptorCountFromIndex(j); |
Jeremy Gebben | 0d9143e | 2022-01-01 12:29:36 -0700 | [diff] [blame] | 97 | available_counts_[type_index] += descriptor_count; |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 98 | } |
Jeremy Gebben | 0d9143e | 2022-01-01 12:29:36 -0700 | [diff] [blame] | 99 | dev_data_->Destroy<cvdescriptorset::DescriptorSet>(iter->first); |
| 100 | sets_.erase(iter); |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 101 | } |
| 102 | } |
| 103 | } |
| 104 | |
| 105 | void DESCRIPTOR_POOL_STATE::Reset() { |
Jeremy Gebben | 0d9143e | 2022-01-01 12:29:36 -0700 | [diff] [blame] | 106 | auto guard = WriteLock(); |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 107 | // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet |
Jeremy Gebben | 0d9143e | 2022-01-01 12:29:36 -0700 | [diff] [blame] | 108 | for (auto entry : sets_) { |
| 109 | dev_data_->Destroy<cvdescriptorset::DescriptorSet>(entry.first); |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 110 | } |
Jeremy Gebben | 0d9143e | 2022-01-01 12:29:36 -0700 | [diff] [blame] | 111 | sets_.clear(); |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 112 | // Reset available count for each type and available sets for this pool |
Jeremy Gebben | 0d9143e | 2022-01-01 12:29:36 -0700 | [diff] [blame] | 113 | available_counts_ = maxDescriptorTypeCount; |
| 114 | available_sets_ = maxSets; |
| 115 | } |
| 116 | |
| 117 | bool DESCRIPTOR_POOL_STATE::InUse() const { |
| 118 | auto guard = ReadLock(); |
| 119 | for (const auto &entry : sets_) { |
| 120 | const auto *ds = entry.second; |
| 121 | if (ds && ds->InUse()) { |
| 122 | return true; |
| 123 | } |
| 124 | } |
| 125 | return false; |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 126 | } |
| 127 | |
| 128 | void DESCRIPTOR_POOL_STATE::Destroy() { |
| 129 | Reset(); |
| 130 | BASE_NODE::Destroy(); |
| 131 | } |
| 132 | |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 133 | // ExtendedBinding collects a VkDescriptorSetLayoutBinding and any extended |
| 134 | // state that comes from a different array/structure so they can stay together |
| 135 | // while being sorted by binding number. |
| 136 | struct ExtendedBinding { |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 137 | ExtendedBinding(const VkDescriptorSetLayoutBinding *l, VkDescriptorBindingFlags f) : layout_binding(l), binding_flags(f) {} |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 138 | |
| 139 | const VkDescriptorSetLayoutBinding *layout_binding; |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 140 | VkDescriptorBindingFlags binding_flags; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 141 | }; |
| 142 | |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 143 | struct BindingNumCmp { |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 144 | bool operator()(const ExtendedBinding &a, const ExtendedBinding &b) const { |
| 145 | return a.layout_binding->binding < b.layout_binding->binding; |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 146 | } |
| 147 | }; |
| 148 | |
John Zulauf | 613fd98 | 2019-06-04 15:14:41 -0600 | [diff] [blame] | 149 | using DescriptorSet = cvdescriptorset::DescriptorSet; |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 150 | using DescriptorSetLayout = cvdescriptorset::DescriptorSetLayout; |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 151 | using DescriptorSetLayoutDef = cvdescriptorset::DescriptorSetLayoutDef; |
| 152 | using DescriptorSetLayoutId = cvdescriptorset::DescriptorSetLayoutId; |
| 153 | |
John Zulauf | 34ebf27 | 2018-02-16 13:08:47 -0700 | [diff] [blame] | 154 | // Canonical dictionary of DescriptorSetLayoutDef (without any handle/device specific information) |
| 155 | cvdescriptorset::DescriptorSetLayoutDict descriptor_set_layout_dict; |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 156 | |
Shannon McPherson | c06c33d | 2018-06-28 17:21:12 -0600 | [diff] [blame] | 157 | DescriptorSetLayoutId GetCanonicalId(const VkDescriptorSetLayoutCreateInfo *p_create_info) { |
John Zulauf | 34ebf27 | 2018-02-16 13:08:47 -0700 | [diff] [blame] | 158 | return descriptor_set_layout_dict.look_up(DescriptorSetLayoutDef(p_create_info)); |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 159 | } |
John Zulauf | 34ebf27 | 2018-02-16 13:08:47 -0700 | [diff] [blame] | 160 | |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 161 | // Construct DescriptorSetLayout instance from given create info |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 162 | // Proactively reserve and resize as possible, as the reallocation was visible in profiling |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 163 | cvdescriptorset::DescriptorSetLayoutDef::DescriptorSetLayoutDef(const VkDescriptorSetLayoutCreateInfo *p_create_info) |
| 164 | : flags_(p_create_info->flags), binding_count_(0), descriptor_count_(0), dynamic_descriptor_count_(0) { |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 165 | const auto *flags_create_info = LvlFindInChain<VkDescriptorSetLayoutBindingFlagsCreateInfo>(p_create_info->pNext); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 166 | |
sfricke-samsung | 0d00aed | 2021-03-08 23:31:17 -0800 | [diff] [blame] | 167 | binding_type_stats_ = {0, 0}; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 168 | std::set<ExtendedBinding, BindingNumCmp> sorted_bindings; |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 169 | const uint32_t input_bindings_count = p_create_info->bindingCount; |
| 170 | // Sort the input bindings in binding number order, eliminating duplicates |
| 171 | for (uint32_t i = 0; i < input_bindings_count; i++) { |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 172 | VkDescriptorBindingFlags flags = 0; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 173 | if (flags_create_info && flags_create_info->bindingCount == p_create_info->bindingCount) { |
| 174 | flags = flags_create_info->pBindingFlags[i]; |
| 175 | } |
Jeremy Gebben | fc6f815 | 2021-03-18 16:58:55 -0600 | [diff] [blame] | 176 | sorted_bindings.emplace(p_create_info->pBindings + i, flags); |
John Zulauf | b6d7120 | 2017-12-22 16:47:09 -0700 | [diff] [blame] | 177 | } |
| 178 | |
ziga-lunarg | d67b5f5 | 2021-10-16 23:52:59 +0200 | [diff] [blame] | 179 | const auto *mutable_descriptor_type_create_info = LvlFindInChain<VkMutableDescriptorTypeCreateInfoVALVE>(p_create_info->pNext); |
| 180 | if (mutable_descriptor_type_create_info) { |
| 181 | mutable_types_.resize(mutable_descriptor_type_create_info->mutableDescriptorTypeListCount); |
| 182 | for (uint32_t i = 0; i < mutable_descriptor_type_create_info->mutableDescriptorTypeListCount; ++i) { |
| 183 | const auto &list = mutable_descriptor_type_create_info->pMutableDescriptorTypeLists[i]; |
| 184 | mutable_types_[i].reserve(list.descriptorTypeCount); |
| 185 | for (uint32_t j = 0; j < list.descriptorTypeCount; ++j) { |
| 186 | mutable_types_[i].push_back(list.pDescriptorTypes[j]); |
| 187 | } |
ziga-lunarg | 2ab8c47 | 2021-10-27 22:54:02 +0200 | [diff] [blame] | 188 | std::sort(mutable_types_[i].begin(), mutable_types_[i].end()); |
ziga-lunarg | d67b5f5 | 2021-10-16 23:52:59 +0200 | [diff] [blame] | 189 | } |
| 190 | } |
| 191 | |
John Zulauf | b6d7120 | 2017-12-22 16:47:09 -0700 | [diff] [blame] | 192 | // Store the create info in the sorted order from above |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 193 | uint32_t index = 0; |
| 194 | binding_count_ = static_cast<uint32_t>(sorted_bindings.size()); |
| 195 | bindings_.reserve(binding_count_); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 196 | binding_flags_.reserve(binding_count_); |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 197 | binding_to_index_map_.reserve(binding_count_); |
John Zulauf | 79f0658 | 2021-02-27 18:38:39 -0700 | [diff] [blame] | 198 | for (const auto &input_binding : sorted_bindings) { |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 199 | // Add to binding and map, s.t. it is robust to invalid duplication of binding_num |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 200 | const auto binding_num = input_binding.layout_binding->binding; |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 201 | binding_to_index_map_[binding_num] = index++; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 202 | bindings_.emplace_back(input_binding.layout_binding); |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 203 | auto &binding_info = bindings_.back(); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 204 | binding_flags_.emplace_back(input_binding.binding_flags); |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 205 | |
John Zulauf | b6d7120 | 2017-12-22 16:47:09 -0700 | [diff] [blame] | 206 | descriptor_count_ += binding_info.descriptorCount; |
| 207 | if (binding_info.descriptorCount > 0) { |
| 208 | non_empty_bindings_.insert(binding_num); |
Tobin Ehlis | 9637fb2 | 2016-12-12 15:59:34 -0700 | [diff] [blame] | 209 | } |
John Zulauf | b6d7120 | 2017-12-22 16:47:09 -0700 | [diff] [blame] | 210 | |
sfricke-samsung | 60f29ec | 2021-03-10 20:37:25 -0800 | [diff] [blame] | 211 | if (IsDynamicDescriptor(binding_info.descriptorType)) { |
sfricke-samsung | 0d00aed | 2021-03-08 23:31:17 -0800 | [diff] [blame] | 212 | dynamic_descriptor_count_ += binding_info.descriptorCount; |
| 213 | } |
| 214 | |
| 215 | // Get stats depending on descriptor type for caching later |
sfricke-samsung | 60f29ec | 2021-03-10 20:37:25 -0800 | [diff] [blame] | 216 | if (IsBufferDescriptor(binding_info.descriptorType)) { |
| 217 | if (IsDynamicDescriptor(binding_info.descriptorType)) { |
| 218 | binding_type_stats_.dynamic_buffer_count++; |
| 219 | } else { |
| 220 | binding_type_stats_.non_dynamic_buffer_count++; |
| 221 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 222 | } |
| 223 | } |
Tobin Ehlis | 9637fb2 | 2016-12-12 15:59:34 -0700 | [diff] [blame] | 224 | assert(bindings_.size() == binding_count_); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 225 | assert(binding_flags_.size() == binding_count_); |
Tobin Ehlis | 9637fb2 | 2016-12-12 15:59:34 -0700 | [diff] [blame] | 226 | uint32_t global_index = 0; |
John Zulauf | 7705bfc | 2019-06-10 09:52:04 -0600 | [diff] [blame] | 227 | global_index_range_.reserve(binding_count_); |
| 228 | // Vector order is finalized so build vectors of descriptors and dynamic offsets by binding index |
Tobin Ehlis | 9637fb2 | 2016-12-12 15:59:34 -0700 | [diff] [blame] | 229 | for (uint32_t i = 0; i < binding_count_; ++i) { |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 230 | auto final_index = global_index + bindings_[i].descriptorCount; |
John Zulauf | 7705bfc | 2019-06-10 09:52:04 -0600 | [diff] [blame] | 231 | global_index_range_.emplace_back(global_index, final_index); |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 232 | global_index = final_index; |
Tobin Ehlis | 9637fb2 | 2016-12-12 15:59:34 -0700 | [diff] [blame] | 233 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 234 | } |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 235 | |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 236 | size_t cvdescriptorset::DescriptorSetLayoutDef::hash() const { |
| 237 | hash_util::HashCombiner hc; |
| 238 | hc << flags_; |
| 239 | hc.Combine(bindings_); |
John Zulauf | 223b69d | 2018-11-09 16:00:59 -0700 | [diff] [blame] | 240 | hc.Combine(binding_flags_); |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 241 | return hc.Value(); |
| 242 | } |
| 243 | // |
| 244 | |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 245 | // Return valid index or "end" i.e. binding_count_; |
| 246 | // The asserts in "Get" are reduced to the set where no valid answer(like null or 0) could be given |
| 247 | // Common code for all binding lookups. |
| 248 | uint32_t cvdescriptorset::DescriptorSetLayoutDef::GetIndexFromBinding(uint32_t binding) const { |
| 249 | const auto &bi_itr = binding_to_index_map_.find(binding); |
| 250 | if (bi_itr != binding_to_index_map_.cend()) return bi_itr->second; |
| 251 | return GetBindingCount(); |
| 252 | } |
| 253 | VkDescriptorSetLayoutBinding const *cvdescriptorset::DescriptorSetLayoutDef::GetDescriptorSetLayoutBindingPtrFromIndex( |
| 254 | const uint32_t index) const { |
| 255 | if (index >= bindings_.size()) return nullptr; |
| 256 | return bindings_[index].ptr(); |
| 257 | } |
| 258 | // Return descriptorCount for given index, 0 if index is unavailable |
| 259 | uint32_t cvdescriptorset::DescriptorSetLayoutDef::GetDescriptorCountFromIndex(const uint32_t index) const { |
| 260 | if (index >= bindings_.size()) return 0; |
| 261 | return bindings_[index].descriptorCount; |
| 262 | } |
| 263 | // For the given index, return descriptorType |
| 264 | VkDescriptorType cvdescriptorset::DescriptorSetLayoutDef::GetTypeFromIndex(const uint32_t index) const { |
| 265 | assert(index < bindings_.size()); |
| 266 | if (index < bindings_.size()) return bindings_[index].descriptorType; |
| 267 | return VK_DESCRIPTOR_TYPE_MAX_ENUM; |
| 268 | } |
| 269 | // For the given index, return stageFlags |
| 270 | VkShaderStageFlags cvdescriptorset::DescriptorSetLayoutDef::GetStageFlagsFromIndex(const uint32_t index) const { |
| 271 | assert(index < bindings_.size()); |
| 272 | if (index < bindings_.size()) return bindings_[index].stageFlags; |
| 273 | return VkShaderStageFlags(0); |
| 274 | } |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 275 | // Return binding flags for given index, 0 if index is unavailable |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 276 | VkDescriptorBindingFlags cvdescriptorset::DescriptorSetLayoutDef::GetDescriptorBindingFlagsFromIndex(const uint32_t index) const { |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 277 | if (index >= binding_flags_.size()) return 0; |
| 278 | return binding_flags_[index]; |
| 279 | } |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 280 | |
John Zulauf | 7705bfc | 2019-06-10 09:52:04 -0600 | [diff] [blame] | 281 | const cvdescriptorset::IndexRange &cvdescriptorset::DescriptorSetLayoutDef::GetGlobalIndexRangeFromIndex(uint32_t index) const { |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 282 | const static IndexRange k_invalid_range = {0xFFFFFFFF, 0xFFFFFFFF}; |
| 283 | if (index >= binding_flags_.size()) return k_invalid_range; |
John Zulauf | 7705bfc | 2019-06-10 09:52:04 -0600 | [diff] [blame] | 284 | return global_index_range_[index]; |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 285 | } |
| 286 | |
John Zulauf | 7705bfc | 2019-06-10 09:52:04 -0600 | [diff] [blame] | 287 | // For the given binding, return the global index range (half open) |
| 288 | // As start and end are often needed in pairs, get both with a single lookup. |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 289 | const cvdescriptorset::IndexRange &cvdescriptorset::DescriptorSetLayoutDef::GetGlobalIndexRangeFromBinding( |
| 290 | const uint32_t binding) const { |
John Zulauf | 7705bfc | 2019-06-10 09:52:04 -0600 | [diff] [blame] | 291 | uint32_t index = GetIndexFromBinding(binding); |
| 292 | return GetGlobalIndexRangeFromIndex(index); |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 293 | } |
| 294 | |
| 295 | // For given binding, return ptr to ImmutableSampler array |
| 296 | VkSampler const *cvdescriptorset::DescriptorSetLayoutDef::GetImmutableSamplerPtrFromBinding(const uint32_t binding) const { |
| 297 | const auto &bi_itr = binding_to_index_map_.find(binding); |
| 298 | if (bi_itr != binding_to_index_map_.end()) { |
| 299 | return bindings_[bi_itr->second].pImmutableSamplers; |
| 300 | } |
| 301 | return nullptr; |
| 302 | } |
| 303 | // Move to next valid binding having a non-zero binding count |
| 304 | uint32_t cvdescriptorset::DescriptorSetLayoutDef::GetNextValidBinding(const uint32_t binding) const { |
| 305 | auto it = non_empty_bindings_.upper_bound(binding); |
| 306 | assert(it != non_empty_bindings_.cend()); |
| 307 | if (it != non_empty_bindings_.cend()) return *it; |
| 308 | return GetMaxBinding() + 1; |
| 309 | } |
| 310 | // For given index, return ptr to ImmutableSampler array |
| 311 | VkSampler const *cvdescriptorset::DescriptorSetLayoutDef::GetImmutableSamplerPtrFromIndex(const uint32_t index) const { |
| 312 | if (index < bindings_.size()) { |
| 313 | return bindings_[index].pImmutableSamplers; |
| 314 | } |
| 315 | return nullptr; |
| 316 | } |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 317 | |
ziga-lunarg | d67b5f5 | 2021-10-16 23:52:59 +0200 | [diff] [blame] | 318 | bool cvdescriptorset::DescriptorSetLayoutDef::IsTypeMutable(const VkDescriptorType type, uint32_t binding) const { |
| 319 | if (binding < mutable_types_.size()) { |
| 320 | if (mutable_types_[binding].size() > 0) { |
| 321 | for (const auto mutable_type : mutable_types_[binding]) { |
| 322 | if (type == mutable_type) { |
| 323 | return true; |
| 324 | } |
| 325 | } |
| 326 | return false; |
| 327 | } |
| 328 | } |
| 329 | // If mutableDescriptorTypeListCount is zero or if VkMutableDescriptorTypeCreateInfoVALVE structure is not included in the pNext |
| 330 | // chain, the VkMutableDescriptorTypeListVALVE for each element is considered to be zero or NULL for each member. |
| 331 | return false; |
| 332 | } |
| 333 | |
ziga-lunarg | 2ab8c47 | 2021-10-27 22:54:02 +0200 | [diff] [blame] | 334 | const std::vector<std::vector<VkDescriptorType>>& cvdescriptorset::DescriptorSetLayoutDef::GetMutableTypes() const { |
| 335 | return mutable_types_; |
| 336 | } |
| 337 | |
ziga-lunarg | e5d2854 | 2021-10-24 21:14:25 +0200 | [diff] [blame] | 338 | const std::vector<VkDescriptorType> &cvdescriptorset::DescriptorSetLayoutDef::GetMutableTypes(uint32_t binding) const { |
| 339 | if (binding >= mutable_types_.size()) { |
| 340 | static const std::vector<VkDescriptorType> empty = {}; |
| 341 | return empty; |
| 342 | } |
| 343 | return mutable_types_[binding]; |
| 344 | } |
| 345 | |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 346 | // If our layout is compatible with rh_ds_layout, return true. |
| 347 | bool cvdescriptorset::DescriptorSetLayout::IsCompatible(DescriptorSetLayout const *rh_ds_layout) const { |
| 348 | bool compatible = (this == rh_ds_layout) || (GetLayoutDef() == rh_ds_layout->GetLayoutDef()); |
| 349 | return compatible; |
| 350 | } |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 351 | |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 352 | // TODO: Find a way to add smarts to the autogenerated version of this |
| 353 | static std::string smart_string_VkShaderStageFlags(VkShaderStageFlags stage_flags) { |
| 354 | if (stage_flags == VK_SHADER_STAGE_ALL) { |
| 355 | return string_VkShaderStageFlagBits(VK_SHADER_STAGE_ALL); |
| 356 | } |
| 357 | |
| 358 | return string_VkShaderStageFlags(stage_flags); |
| 359 | } |
| 360 | |
| 361 | // If our layout is compatible with bound_dsl, return true, |
| 362 | // else return false and fill in error_msg will description of what causes incompatibility |
| 363 | bool cvdescriptorset::VerifySetLayoutCompatibility(const debug_report_data *report_data, DescriptorSetLayout const *layout_dsl, |
| 364 | DescriptorSetLayout const *bound_dsl, std::string *error_msg) { |
| 365 | // Short circuit the detailed check. |
| 366 | if (layout_dsl->IsCompatible(bound_dsl)) return true; |
| 367 | |
| 368 | // Do a detailed compatibility check of this lhs def (referenced by layout_dsl), vs. the rhs (layout and def) |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 369 | // Should only be run if trivial accept has failed, and in that context should return false. |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 370 | VkDescriptorSetLayout layout_dsl_handle = layout_dsl->GetDescriptorSetLayout(); |
| 371 | VkDescriptorSetLayout bound_dsl_handle = bound_dsl->GetDescriptorSetLayout(); |
| 372 | DescriptorSetLayoutDef const *layout_ds_layout_def = layout_dsl->GetLayoutDef(); |
| 373 | DescriptorSetLayoutDef const *bound_ds_layout_def = bound_dsl->GetLayoutDef(); |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 374 | |
| 375 | // Check descriptor counts |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 376 | const auto bound_total_count = bound_ds_layout_def->GetTotalDescriptorCount(); |
| 377 | if (layout_ds_layout_def->GetTotalDescriptorCount() != bound_ds_layout_def->GetTotalDescriptorCount()) { |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 378 | std::stringstream error_str; |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 379 | error_str << report_data->FormatHandle(layout_dsl_handle) << " from pipeline layout has " |
| 380 | << layout_ds_layout_def->GetTotalDescriptorCount() << " total descriptors, but " |
| 381 | << report_data->FormatHandle(bound_dsl_handle) << ", which is bound, has " << bound_total_count |
| 382 | << " total descriptors."; |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 383 | *error_msg = error_str.str(); |
| 384 | return false; // trivial fail case |
| 385 | } |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 386 | |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 387 | // Descriptor counts match so need to go through bindings one-by-one |
| 388 | // and verify that type and stageFlags match |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 389 | for (const auto &layout_binding : layout_ds_layout_def->GetBindings()) { |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 390 | // TODO : Do we also need to check immutable samplers? |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 391 | const auto bound_binding = bound_ds_layout_def->GetBindingInfoFromBinding(layout_binding.binding); |
| 392 | if (layout_binding.descriptorCount != bound_binding->descriptorCount) { |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 393 | std::stringstream error_str; |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 394 | error_str << "Binding " << layout_binding.binding << " for " << report_data->FormatHandle(layout_dsl_handle) |
| 395 | << " from pipeline layout has a descriptorCount of " << layout_binding.descriptorCount << " but binding " |
| 396 | << layout_binding.binding << " for " << report_data->FormatHandle(bound_dsl_handle) |
| 397 | << ", which is bound, has a descriptorCount of " << bound_binding->descriptorCount; |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 398 | *error_msg = error_str.str(); |
| 399 | return false; |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 400 | } else if (layout_binding.descriptorType != bound_binding->descriptorType) { |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 401 | std::stringstream error_str; |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 402 | error_str << "Binding " << layout_binding.binding << " for " << report_data->FormatHandle(layout_dsl_handle) |
| 403 | << " from pipeline layout is type '" << string_VkDescriptorType(layout_binding.descriptorType) |
| 404 | << "' but binding " << layout_binding.binding << " for " << report_data->FormatHandle(bound_dsl_handle) |
| 405 | << ", which is bound, is type '" << string_VkDescriptorType(bound_binding->descriptorType) << "'"; |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 406 | *error_msg = error_str.str(); |
| 407 | return false; |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 408 | } else if (layout_binding.stageFlags != bound_binding->stageFlags) { |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 409 | std::stringstream error_str; |
John Zulauf | f43695f | 2019-09-13 17:56:26 -0600 | [diff] [blame] | 410 | error_str << "Binding " << layout_binding.binding << " for " << report_data->FormatHandle(layout_dsl_handle) |
| 411 | << " from pipeline layout has stageFlags " << smart_string_VkShaderStageFlags(layout_binding.stageFlags) |
| 412 | << " but binding " << layout_binding.binding << " for " << report_data->FormatHandle(bound_dsl_handle) |
| 413 | << ", which is bound, has stageFlags " << smart_string_VkShaderStageFlags(bound_binding->stageFlags); |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 414 | *error_msg = error_str.str(); |
| 415 | return false; |
| 416 | } |
| 417 | } |
Tony-LunarG | 692b8b4 | 2019-09-30 16:07:26 -0600 | [diff] [blame] | 418 | |
| 419 | const auto &ds_layout_flags = layout_ds_layout_def->GetBindingFlags(); |
| 420 | const auto &bound_layout_flags = bound_ds_layout_def->GetBindingFlags(); |
| 421 | if (bound_layout_flags != ds_layout_flags) { |
| 422 | std::stringstream error_str; |
| 423 | assert(ds_layout_flags.size() == bound_layout_flags.size()); |
| 424 | size_t i; |
| 425 | for (i = 0; i < ds_layout_flags.size(); i++) { |
| 426 | if (ds_layout_flags[i] != bound_layout_flags[i]) break; |
| 427 | } |
| 428 | error_str << report_data->FormatHandle(layout_dsl_handle) |
| 429 | << " from pipeline layout does not have the same binding flags at binding " << i << " ( " |
| 430 | << string_VkDescriptorBindingFlagsEXT(ds_layout_flags[i]) << " ) as " |
| 431 | << report_data->FormatHandle(bound_dsl_handle) << " ( " |
| 432 | << string_VkDescriptorBindingFlagsEXT(bound_layout_flags[i]) << " ), which is bound"; |
| 433 | *error_msg = error_str.str(); |
| 434 | return false; |
| 435 | } |
| 436 | |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 437 | // No detailed check should succeed if the trivial check failed -- or the dictionary has failed somehow. |
| 438 | bool compatible = true; |
| 439 | assert(!compatible); |
| 440 | return compatible; |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 441 | } |
| 442 | |
| 443 | bool cvdescriptorset::DescriptorSetLayoutDef::IsNextBindingConsistent(const uint32_t binding) const { |
| 444 | if (!binding_to_index_map_.count(binding + 1)) return false; |
| 445 | auto const &bi_itr = binding_to_index_map_.find(binding); |
| 446 | if (bi_itr != binding_to_index_map_.end()) { |
| 447 | const auto &next_bi_itr = binding_to_index_map_.find(binding + 1); |
| 448 | if (next_bi_itr != binding_to_index_map_.end()) { |
| 449 | auto type = bindings_[bi_itr->second].descriptorType; |
| 450 | auto stage_flags = bindings_[bi_itr->second].stageFlags; |
| 451 | auto immut_samp = bindings_[bi_itr->second].pImmutableSamplers ? true : false; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 452 | auto flags = binding_flags_[bi_itr->second]; |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 453 | if ((type != bindings_[next_bi_itr->second].descriptorType) || |
| 454 | (stage_flags != bindings_[next_bi_itr->second].stageFlags) || |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 455 | (immut_samp != (bindings_[next_bi_itr->second].pImmutableSamplers ? true : false)) || |
| 456 | (flags != binding_flags_[next_bi_itr->second])) { |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 457 | return false; |
| 458 | } |
| 459 | return true; |
| 460 | } |
| 461 | } |
| 462 | return false; |
| 463 | } |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 464 | |
| 465 | // The DescriptorSetLayout stores the per handle data for a descriptor set layout, and references the common defintion for the |
| 466 | // handle invariant portion |
| 467 | cvdescriptorset::DescriptorSetLayout::DescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo *p_create_info, |
| 468 | const VkDescriptorSetLayout layout) |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 469 | : BASE_NODE(layout, kVulkanObjectTypeDescriptorSetLayout), layout_id_(GetCanonicalId(p_create_info)) {} |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 470 | |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 471 | // Validate descriptor set layout create info |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 472 | bool cvdescriptorset::ValidateDescriptorSetLayoutCreateInfo( |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 473 | const ValidationObject *val_obj, const VkDescriptorSetLayoutCreateInfo *create_info, const bool push_descriptor_ext, |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 474 | const uint32_t max_push_descriptors, const bool descriptor_indexing_ext, |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 475 | const VkPhysicalDeviceVulkan12Features *core12_features, |
Tony-LunarG | 273f32f | 2021-09-28 08:56:30 -0600 | [diff] [blame] | 476 | const VkPhysicalDeviceVulkan13Features* core13_features, |
ziga-lunarg | 637356a | 2021-07-19 12:36:34 +0200 | [diff] [blame] | 477 | const VkPhysicalDeviceInlineUniformBlockPropertiesEXT *inline_uniform_block_props, |
| 478 | const VkPhysicalDeviceAccelerationStructureFeaturesKHR *acceleration_structure_features, |
| 479 | const DeviceExtensions *device_extensions) { |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 480 | bool skip = false; |
Jeremy Gebben | cbf2286 | 2021-03-03 12:01:22 -0700 | [diff] [blame] | 481 | layer_data::unordered_set<uint32_t> bindings; |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 482 | uint64_t total_descriptors = 0; |
| 483 | |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 484 | const auto *flags_create_info = LvlFindInChain<VkDescriptorSetLayoutBindingFlagsCreateInfo>(create_info->pNext); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 485 | |
| 486 | const bool push_descriptor_set = !!(create_info->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR); |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 487 | if (push_descriptor_set && !push_descriptor_ext) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 488 | skip |= val_obj->LogError( |
| 489 | val_obj->device, kVUID_Core_DrawState_ExtensionNotEnabled, |
| 490 | "vkCreateDescriptorSetLayout(): Attempted to use %s in %s but its required extension %s has not been enabled.\n", |
| 491 | "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR", "VkDescriptorSetLayoutCreateInfo::flags", |
| 492 | VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME); |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 493 | } |
| 494 | |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 495 | const bool update_after_bind_set = !!(create_info->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 496 | if (update_after_bind_set && !descriptor_indexing_ext) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 497 | skip |= val_obj->LogError( |
| 498 | val_obj->device, kVUID_Core_DrawState_ExtensionNotEnabled, |
| 499 | "vkCreateDescriptorSetLayout(): Attemped to use %s in %s but its required extension %s has not been enabled.\n", |
| 500 | "VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT", "VkDescriptorSetLayoutCreateInfo::flags", |
| 501 | VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 502 | } |
| 503 | |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 504 | auto valid_type = [push_descriptor_set](const VkDescriptorType type) { |
| 505 | return !push_descriptor_set || |
Dave Houlton | 142c4cb | 2018-10-17 15:04:41 -0600 | [diff] [blame] | 506 | ((type != VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) && (type != VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) && |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 507 | (type != VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT)); |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 508 | }; |
| 509 | |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 510 | uint32_t max_binding = 0; |
| 511 | |
ziga-lunarg | 185ef28 | 2021-07-19 13:13:28 +0200 | [diff] [blame] | 512 | uint32_t update_after_bind = create_info->bindingCount; |
| 513 | uint32_t uniform_buffer_dynamic = create_info->bindingCount; |
| 514 | uint32_t storage_buffer_dynamic = create_info->bindingCount; |
| 515 | |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 516 | for (uint32_t i = 0; i < create_info->bindingCount; ++i) { |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 517 | const auto &binding_info = create_info->pBindings[i]; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 518 | max_binding = std::max(max_binding, binding_info.binding); |
| 519 | |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 520 | if (!bindings.insert(binding_info.binding).second) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 521 | skip |= val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutCreateInfo-binding-00279", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 522 | "vkCreateDescriptorSetLayout(): pBindings[%u] has duplicated binding number (%u).", i, |
| 523 | binding_info.binding); |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 524 | } |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 525 | if (!valid_type(binding_info.descriptorType)) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 526 | skip |= val_obj->LogError(val_obj->device, |
| 527 | (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) |
| 528 | ? "VUID-VkDescriptorSetLayoutCreateInfo-flags-02208" |
| 529 | : "VUID-VkDescriptorSetLayoutCreateInfo-flags-00280", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 530 | "vkCreateDescriptorSetLayout(): pBindings[%u] has invalid type %s , for push descriptors.", i, |
| 531 | string_VkDescriptorType(binding_info.descriptorType)); |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 532 | } |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 533 | |
| 534 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) { |
Tony-LunarG | 273f32f | 2021-09-28 08:56:30 -0600 | [diff] [blame] | 535 | if (!core13_features->inlineUniformBlock) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 536 | skip |= val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutBinding-descriptorType-04604", |
| 537 | "vkCreateDescriptorSetLayout(): pBindings[%u] is creating VkDescriptorSetLayout with " |
| 538 | "descriptor type VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT " |
Jeremy Gebben | da6b48f | 2021-05-13 10:46:18 -0600 | [diff] [blame] | 539 | "but the inlineUniformBlock feature is not enabled", |
| 540 | i); |
Tony-LunarG | d6744bc | 2019-08-23 09:57:10 -0600 | [diff] [blame] | 541 | } else { |
| 542 | if ((binding_info.descriptorCount % 4) != 0) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 543 | skip |= val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutBinding-descriptorType-02209", |
| 544 | "vkCreateDescriptorSetLayout(): pBindings[%u] has descriptorCount =(%" PRIu32 |
| 545 | ") but must be a multiple of 4", |
| 546 | i, binding_info.descriptorCount); |
Tony-LunarG | d6744bc | 2019-08-23 09:57:10 -0600 | [diff] [blame] | 547 | } |
| 548 | if (binding_info.descriptorCount > inline_uniform_block_props->maxInlineUniformBlockSize) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 549 | skip |= |
| 550 | val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutBinding-descriptorType-02210", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 551 | "vkCreateDescriptorSetLayout(): pBindings[%u] has descriptorCount =(%" PRIu32 |
| 552 | ") but must be less than or equal to maxInlineUniformBlockSize (%u)", |
| 553 | i, binding_info.descriptorCount, inline_uniform_block_props->maxInlineUniformBlockSize); |
Tony-LunarG | d6744bc | 2019-08-23 09:57:10 -0600 | [diff] [blame] | 554 | } |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 555 | } |
ziga-lunarg | 185ef28 | 2021-07-19 13:13:28 +0200 | [diff] [blame] | 556 | } else if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) { |
| 557 | uniform_buffer_dynamic = i; |
| 558 | } else if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) { |
| 559 | storage_buffer_dynamic = i; |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 560 | } |
| 561 | |
Tony-LunarG | 7337b31 | 2020-04-15 16:40:25 -0600 | [diff] [blame] | 562 | if ((binding_info.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER || |
| 563 | binding_info.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) && |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 564 | binding_info.pImmutableSamplers && IsExtEnabled(device_extensions->vk_ext_custom_border_color)) { |
Tony-LunarG | 7337b31 | 2020-04-15 16:40:25 -0600 | [diff] [blame] | 565 | const CoreChecks *core_checks = reinterpret_cast<const CoreChecks *>(val_obj); |
| 566 | for (uint32_t j = 0; j < binding_info.descriptorCount; j++) { |
Jeremy Gebben | f444939 | 2022-01-28 10:09:10 -0700 | [diff] [blame] | 567 | auto sampler_state = core_checks->Get<SAMPLER_STATE>(binding_info.pImmutableSamplers[j]); |
Tony-LunarG | 7337b31 | 2020-04-15 16:40:25 -0600 | [diff] [blame] | 568 | if (sampler_state && (sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT || |
| 569 | sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT)) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 570 | skip |= val_obj->LogError( |
| 571 | val_obj->device, "VUID-VkDescriptorSetLayoutBinding-pImmutableSamplers-04009", |
Jeremy Gebben | da6b48f | 2021-05-13 10:46:18 -0600 | [diff] [blame] | 572 | "vkCreateDescriptorSetLayout(): pBindings[%u].pImmutableSamplers[%u] has VkSampler %s" |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 573 | " presented as immutable has a custom border color", |
Jeremy Gebben | da6b48f | 2021-05-13 10:46:18 -0600 | [diff] [blame] | 574 | i, j, val_obj->report_data->FormatHandle(binding_info.pImmutableSamplers[j]).c_str()); |
Tony-LunarG | 7337b31 | 2020-04-15 16:40:25 -0600 | [diff] [blame] | 575 | } |
| 576 | } |
| 577 | } |
| 578 | |
ziga-lunarg | 6d77079 | 2021-07-19 11:27:18 +0200 | [diff] [blame] | 579 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE && binding_info.pImmutableSamplers != nullptr) { |
| 580 | skip |= val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutBinding-descriptorType-04605", |
| 581 | "vkCreateDescriptorSetLayout(): pBindings[%u] has descriptorType " |
| 582 | "VK_DESCRIPTOR_TYPE_MUTABLE_VALVE but pImmutableSamplers is not NULL.", |
| 583 | i); |
| 584 | } |
| 585 | |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 586 | total_descriptors += binding_info.descriptorCount; |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 587 | } |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 588 | |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 589 | if (flags_create_info) { |
| 590 | if (flags_create_info->bindingCount != 0 && flags_create_info->bindingCount != create_info->bindingCount) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 591 | skip |= val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-bindingCount-03002", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 592 | "vkCreateDescriptorSetLayout(): VkDescriptorSetLayoutCreateInfo::bindingCount (%d) != " |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 593 | "VkDescriptorSetLayoutBindingFlagsCreateInfo::bindingCount (%d)", |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 594 | create_info->bindingCount, flags_create_info->bindingCount); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 595 | } |
| 596 | |
| 597 | if (flags_create_info->bindingCount == create_info->bindingCount) { |
| 598 | for (uint32_t i = 0; i < create_info->bindingCount; ++i) { |
| 599 | const auto &binding_info = create_info->pBindings[i]; |
| 600 | |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 601 | if (flags_create_info->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT) { |
ziga-lunarg | 185ef28 | 2021-07-19 13:13:28 +0200 | [diff] [blame] | 602 | update_after_bind = i; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 603 | if (!update_after_bind_set) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 604 | skip |= val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutCreateInfo-flags-03000", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 605 | "vkCreateDescriptorSetLayout(): pBindings[%u] does not have " |
| 606 | "VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT.", |
| 607 | i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 608 | } |
| 609 | |
| 610 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER && |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 611 | !core12_features->descriptorBindingUniformBufferUpdateAfterBind) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 612 | skip |= val_obj->LogError( |
| 613 | val_obj->device, |
| 614 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-" |
| 615 | "descriptorBindingUniformBufferUpdateAfterBind-03005", |
| 616 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 617 | "for %s since descriptorBindingUniformBufferUpdateAfterBind is not enabled.", |
| 618 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 619 | } |
| 620 | if ((binding_info.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER || |
| 621 | binding_info.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER || |
| 622 | binding_info.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) && |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 623 | !core12_features->descriptorBindingSampledImageUpdateAfterBind) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 624 | skip |= val_obj->LogError( |
| 625 | val_obj->device, |
| 626 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-" |
| 627 | "descriptorBindingSampledImageUpdateAfterBind-03006", |
| 628 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 629 | "for %s since descriptorBindingSampledImageUpdateAfterBind is not enabled.", |
| 630 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 631 | } |
| 632 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE && |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 633 | !core12_features->descriptorBindingStorageImageUpdateAfterBind) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 634 | skip |= val_obj->LogError( |
| 635 | val_obj->device, |
| 636 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-" |
| 637 | "descriptorBindingStorageImageUpdateAfterBind-03007", |
| 638 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 639 | "for %s since descriptorBindingStorageImageUpdateAfterBind is not enabled.", |
| 640 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 641 | } |
| 642 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER && |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 643 | !core12_features->descriptorBindingStorageBufferUpdateAfterBind) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 644 | skip |= val_obj->LogError( |
| 645 | val_obj->device, |
| 646 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-" |
| 647 | "descriptorBindingStorageBufferUpdateAfterBind-03008", |
| 648 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 649 | "for %s since descriptorBindingStorageBufferUpdateAfterBind is not enabled.", |
| 650 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 651 | } |
| 652 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER && |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 653 | !core12_features->descriptorBindingUniformTexelBufferUpdateAfterBind) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 654 | skip |= val_obj->LogError( |
| 655 | val_obj->device, |
| 656 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-" |
| 657 | "descriptorBindingUniformTexelBufferUpdateAfterBind-03009", |
| 658 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 659 | "for %s since descriptorBindingUniformTexelBufferUpdateAfterBind is not enabled.", |
| 660 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 661 | } |
| 662 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER && |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 663 | !core12_features->descriptorBindingStorageTexelBufferUpdateAfterBind) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 664 | skip |= val_obj->LogError( |
| 665 | val_obj->device, |
| 666 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-" |
| 667 | "descriptorBindingStorageTexelBufferUpdateAfterBind-03010", |
| 668 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 669 | "for %s since descriptorBindingStorageTexelBufferUpdateAfterBind is not enabled.", |
| 670 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 671 | } |
| 672 | if ((binding_info.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT || |
| 673 | binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC || |
| 674 | binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 675 | skip |= val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-None-03011", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 676 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have " |
| 677 | "VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT for %s.", |
| 678 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 679 | } |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 680 | |
| 681 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT && |
Tony-LunarG | 273f32f | 2021-09-28 08:56:30 -0600 | [diff] [blame] | 682 | !core13_features->descriptorBindingInlineUniformBlockUpdateAfterBind) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 683 | skip |= val_obj->LogError( |
| 684 | val_obj->device, |
| 685 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-" |
| 686 | "descriptorBindingInlineUniformBlockUpdateAfterBind-02211", |
| 687 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 688 | "for %s since descriptorBindingInlineUniformBlockUpdateAfterBind is not enabled.", |
| 689 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 690 | } |
ziga-lunarg | 637356a | 2021-07-19 12:36:34 +0200 | [diff] [blame] | 691 | if ((binding_info.descriptorType == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR || |
| 692 | binding_info.descriptorType == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV) && |
| 693 | !acceleration_structure_features->descriptorBindingAccelerationStructureUpdateAfterBind) { |
| 694 | skip |= val_obj->LogError(val_obj->device, |
| 695 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-" |
| 696 | "descriptorBindingAccelerationStructureUpdateAfterBind-03570", |
| 697 | "vkCreateDescriptorSetLayout(): pBindings[%" PRIu32 |
| 698 | "] can't have VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 699 | "for %s if " |
| 700 | "VkPhysicalDeviceAccelerationStructureFeaturesKHR::" |
| 701 | "descriptorBindingAccelerationStructureUpdateAfterBind is not enabled.", |
| 702 | i, string_VkDescriptorType(binding_info.descriptorType)); |
| 703 | } |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 704 | } |
| 705 | |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 706 | if (flags_create_info->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT) { |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 707 | if (!core12_features->descriptorBindingUpdateUnusedWhilePending) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 708 | skip |= val_obj->LogError( |
| 709 | val_obj->device, |
Mike Schuchardt | 65847d9 | 2019-12-20 13:50:47 -0800 | [diff] [blame] | 710 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-descriptorBindingUpdateUnusedWhilePending-03012", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 711 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have " |
| 712 | "VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT for %s since " |
| 713 | "descriptorBindingUpdateUnusedWhilePending is not enabled.", |
| 714 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 715 | } |
| 716 | } |
| 717 | |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 718 | if (flags_create_info->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT) { |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 719 | if (!core12_features->descriptorBindingPartiallyBound) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 720 | skip |= val_obj->LogError( |
| 721 | val_obj->device, |
| 722 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-descriptorBindingPartiallyBound-03013", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 723 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT for " |
| 724 | "%s since descriptorBindingPartiallyBound is not enabled.", |
| 725 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 726 | } |
| 727 | } |
| 728 | |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 729 | if (flags_create_info->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT) { |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 730 | if (binding_info.binding != max_binding) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 731 | skip |= val_obj->LogError( |
| 732 | val_obj->device, "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-pBindingFlags-03004", |
| 733 | "vkCreateDescriptorSetLayout(): pBindings[%u] has VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT " |
| 734 | "but %u is the largest value of all the bindings.", |
| 735 | i, binding_info.binding); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 736 | } |
| 737 | |
Piers Daniell | 41b8c5d | 2020-01-10 15:42:00 -0700 | [diff] [blame] | 738 | if (!core12_features->descriptorBindingVariableDescriptorCount) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 739 | skip |= val_obj->LogError( |
| 740 | val_obj->device, |
Mike Schuchardt | 65847d9 | 2019-12-20 13:50:47 -0800 | [diff] [blame] | 741 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-descriptorBindingVariableDescriptorCount-03014", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 742 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have " |
| 743 | "VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT for %s since " |
| 744 | "descriptorBindingVariableDescriptorCount is not enabled.", |
| 745 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 746 | } |
sfricke-samsung | 4ba7d6e | 2021-03-06 20:56:35 -0800 | [diff] [blame] | 747 | if ((binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) || |
| 748 | (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 749 | skip |= val_obj->LogError(val_obj->device, |
| 750 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-pBindingFlags-03015", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 751 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have " |
| 752 | "VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT for %s.", |
| 753 | i, string_VkDescriptorType(binding_info.descriptorType)); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 754 | } |
| 755 | } |
| 756 | |
| 757 | if (push_descriptor_set && |
| 758 | (flags_create_info->pBindingFlags[i] & |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 759 | (VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT | VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT | |
| 760 | VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT))) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 761 | skip |= val_obj->LogError( |
| 762 | val_obj->device, "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-flags-03003", |
| 763 | "vkCreateDescriptorSetLayout(): pBindings[%u] can't have VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT, " |
| 764 | "VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, or " |
| 765 | "VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT for with " |
| 766 | "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR.", |
| 767 | i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 768 | } |
| 769 | } |
| 770 | } |
| 771 | } |
| 772 | |
ziga-lunarg | 185ef28 | 2021-07-19 13:13:28 +0200 | [diff] [blame] | 773 | if (update_after_bind < create_info->bindingCount) { |
| 774 | if (uniform_buffer_dynamic < create_info->bindingCount) { |
| 775 | skip |= |
| 776 | val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutCreateInfo-descriptorType-03001", |
| 777 | "vkCreateDescriptorSetLayout(): binding (%" PRIi32 |
| 778 | ") has VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 779 | "flag, but binding (%" PRIi32 ") has descriptor type VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC.", |
| 780 | update_after_bind, uniform_buffer_dynamic); |
| 781 | } |
| 782 | if (storage_buffer_dynamic < create_info->bindingCount) { |
| 783 | skip |= |
| 784 | val_obj->LogError(val_obj->device, "VUID-VkDescriptorSetLayoutCreateInfo-descriptorType-03001", |
| 785 | "vkCreateDescriptorSetLayout(): binding (%" PRIi32 |
| 786 | ") has VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT " |
| 787 | "flag, but binding (%" PRIi32 ") has descriptor type VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC.", |
| 788 | update_after_bind, storage_buffer_dynamic); |
| 789 | } |
| 790 | } |
| 791 | |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 792 | if ((push_descriptor_set) && (total_descriptors > max_push_descriptors)) { |
| 793 | const char *undefined = push_descriptor_ext ? "" : " -- undefined"; |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 794 | skip |= val_obj->LogError( |
| 795 | val_obj->device, "VUID-VkDescriptorSetLayoutCreateInfo-flags-00281", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 796 | "vkCreateDescriptorSetLayout(): for push descriptor, total descriptor count in layout (%" PRIu64 |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 797 | ") must not be greater than VkPhysicalDevicePushDescriptorPropertiesKHR::maxPushDescriptors (%" PRIu32 "%s).", |
| 798 | total_descriptors, max_push_descriptors, undefined); |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 799 | } |
| 800 | |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 801 | return skip; |
| 802 | } |
| 803 | |
Mark Lobodzinski | e12b6e3 | 2020-06-29 11:44:15 -0600 | [diff] [blame] | 804 | void cvdescriptorset::AllocateDescriptorSetsData::Init(uint32_t count) { |
| 805 | layout_nodes.resize(count); |
Mark Lobodzinski | e12b6e3 | 2020-06-29 11:44:15 -0600 | [diff] [blame] | 806 | } |
Tobin Ehlis | 68d0adf | 2016-06-01 11:33:50 -0600 | [diff] [blame] | 807 | |
Jeff Bolz | 41a1ced | 2019-10-11 11:40:49 -0500 | [diff] [blame] | 808 | cvdescriptorset::DescriptorSet::DescriptorSet(const VkDescriptorSet set, DESCRIPTOR_POOL_STATE *pool_state, |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 809 | const std::shared_ptr<DescriptorSetLayout const> &layout, uint32_t variable_count, |
John Zulauf | d2c3dae | 2019-12-12 11:02:17 -0700 | [diff] [blame] | 810 | const cvdescriptorset::DescriptorSet::StateTracker *state_data) |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 811 | : BASE_NODE(set, kVulkanObjectTypeDescriptorSet), |
| 812 | some_update_(false), |
Jeff Bolz | 41a1ced | 2019-10-11 11:40:49 -0500 | [diff] [blame] | 813 | pool_state_(pool_state), |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 814 | layout_(layout), |
John Zulauf | d2c3dae | 2019-12-12 11:02:17 -0700 | [diff] [blame] | 815 | state_data_(state_data), |
Jeff Bolz | dd4cfa1 | 2019-08-11 20:57:51 -0500 | [diff] [blame] | 816 | variable_count_(variable_count), |
| 817 | change_count_(0) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 818 | // Foreach binding, create default descriptors of given type |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 819 | descriptors_.reserve(layout_->GetTotalDescriptorCount()); |
| 820 | descriptor_store_.resize(layout_->GetTotalDescriptorCount()); |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 821 | auto free_descriptor = descriptor_store_.data(); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 822 | for (uint32_t i = 0; i < layout_->GetBindingCount(); ++i) { |
| 823 | auto type = layout_->GetTypeFromIndex(i); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 824 | switch (type) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 825 | case VK_DESCRIPTOR_TYPE_SAMPLER: { |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 826 | auto immut_sampler = layout_->GetImmutableSamplerPtrFromIndex(i); |
| 827 | for (uint32_t di = 0; di < layout_->GetDescriptorCountFromIndex(i); ++di) { |
Tobin Ehlis | 082c751 | 2017-05-08 11:24:57 -0600 | [diff] [blame] | 828 | if (immut_sampler) { |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 829 | descriptors_.emplace_back(new ((free_descriptor++)->Sampler()) |
| 830 | SamplerDescriptor(state_data, immut_sampler + di)); |
Tobin Ehlis | 082c751 | 2017-05-08 11:24:57 -0600 | [diff] [blame] | 831 | some_update_ = true; // Immutable samplers are updated at creation |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 832 | } else { |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 833 | descriptors_.emplace_back(new ((free_descriptor++)->Sampler()) SamplerDescriptor(state_data, nullptr)); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 834 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 835 | } |
| 836 | break; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 837 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 838 | case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: { |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 839 | auto immut = layout_->GetImmutableSamplerPtrFromIndex(i); |
| 840 | for (uint32_t di = 0; di < layout_->GetDescriptorCountFromIndex(i); ++di) { |
Tobin Ehlis | 082c751 | 2017-05-08 11:24:57 -0600 | [diff] [blame] | 841 | if (immut) { |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 842 | descriptors_.emplace_back(new ((free_descriptor++)->ImageSampler()) |
| 843 | ImageSamplerDescriptor(state_data, immut + di)); |
Tobin Ehlis | 082c751 | 2017-05-08 11:24:57 -0600 | [diff] [blame] | 844 | some_update_ = true; // Immutable samplers are updated at creation |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 845 | } else { |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 846 | descriptors_.emplace_back(new ((free_descriptor++)->ImageSampler()) |
| 847 | ImageSamplerDescriptor(state_data, nullptr)); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 848 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 849 | } |
| 850 | break; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 851 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 852 | // ImageDescriptors |
| 853 | case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: |
| 854 | case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: |
| 855 | case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 856 | for (uint32_t di = 0; di < layout_->GetDescriptorCountFromIndex(i); ++di) { |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 857 | descriptors_.emplace_back(new ((free_descriptor++)->Image()) ImageDescriptor(type)); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 858 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 859 | break; |
| 860 | case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: |
| 861 | case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 862 | for (uint32_t di = 0; di < layout_->GetDescriptorCountFromIndex(i); ++di) { |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 863 | descriptors_.emplace_back(new ((free_descriptor++)->Texel()) TexelDescriptor(type)); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 864 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 865 | break; |
sfricke-samsung | 4ca3565 | 2021-03-05 02:22:10 -0800 | [diff] [blame] | 866 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: |
| 867 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: |
| 868 | for (uint32_t di = 0; di < layout_->GetDescriptorCountFromIndex(i); ++di) { |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 869 | descriptors_.emplace_back(new ((free_descriptor++)->Buffer()) BufferDescriptor(type)); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 870 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 871 | break; |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 872 | case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 873 | for (uint32_t di = 0; di < layout_->GetDescriptorCountFromIndex(i); ++di) { |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 874 | descriptors_.emplace_back(new ((free_descriptor++)->InlineUniform()) InlineUniformDescriptor(type)); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 875 | } |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 876 | break; |
Eric Werness | 30127fd | 2018-10-31 21:01:03 -0700 | [diff] [blame] | 877 | case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV: |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 878 | case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 879 | for (uint32_t di = 0; di < layout_->GetDescriptorCountFromIndex(i); ++di) { |
John Zulauf | e4850d4 | 2019-12-30 16:10:55 -0700 | [diff] [blame] | 880 | descriptors_.emplace_back(new ((free_descriptor++)->AccelerationStructure()) |
| 881 | AccelerationStructureDescriptor(type)); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 882 | } |
Jeff Bolz | fbe5158 | 2018-09-13 10:01:35 -0500 | [diff] [blame] | 883 | break; |
Tony-LunarG | f563b36 | 2021-03-18 16:13:18 -0600 | [diff] [blame] | 884 | case VK_DESCRIPTOR_TYPE_MUTABLE_VALVE: |
| 885 | for (uint32_t di = 0; di < layout_->GetDescriptorCountFromIndex(i); ++di) { |
ziga-lunarg | e5d2854 | 2021-10-24 21:14:25 +0200 | [diff] [blame] | 886 | descriptors_.emplace_back(new ((free_descriptor++)->Mutable()) MutableDescriptor()); |
Tony-LunarG | f563b36 | 2021-03-18 16:13:18 -0600 | [diff] [blame] | 887 | } |
| 888 | break; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 889 | default: |
sfricke-samsung | 60f29ec | 2021-03-10 20:37:25 -0800 | [diff] [blame] | 890 | if (IsDynamicDescriptor(type) && IsBufferDescriptor(type)) { |
| 891 | for (uint32_t di = 0; di < layout_->GetDescriptorCountFromIndex(i); ++di) { |
| 892 | dynamic_offset_idx_to_descriptor_list_.push_back(descriptors_.size()); |
| 893 | descriptors_.emplace_back(new ((free_descriptor++)->Buffer()) BufferDescriptor(type)); |
| 894 | } |
| 895 | } else { |
| 896 | assert(0); // Bad descriptor type specified |
| 897 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 898 | break; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 899 | } |
| 900 | } |
| 901 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 902 | |
Jeremy Gebben | 610d3a6 | 2022-01-01 12:53:17 -0700 | [diff] [blame] | 903 | void cvdescriptorset::DescriptorSet::LinkChildNodes() { |
| 904 | // Connect child node(s), which cannot safely be done in the constructor. |
| 905 | for (auto &desc : descriptors_) { |
| 906 | desc->AddParent(this); |
| 907 | } |
| 908 | } |
| 909 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 910 | void cvdescriptorset::DescriptorSet::Destroy() { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 911 | for (auto &desc: descriptors_) { |
| 912 | desc->RemoveParent(this); |
| 913 | } |
| 914 | BASE_NODE::Destroy(); |
| 915 | } |
Chris Forbes | 5798913 | 2016-07-26 17:06:10 +1200 | [diff] [blame] | 916 | |
Jeremy Gebben | 90ce416 | 2021-08-25 14:23:07 -0600 | [diff] [blame] | 917 | static std::string StringDescriptorReqViewType(DescriptorReqFlags req) { |
Chris Forbes | 6e58ebd | 2016-08-31 12:58:14 -0700 | [diff] [blame] | 918 | std::string result(""); |
Mark Lobodzinski | 29f451a | 2020-02-10 16:15:30 -0700 | [diff] [blame] | 919 | for (unsigned i = 0; i <= VK_IMAGE_VIEW_TYPE_CUBE_ARRAY; i++) { |
Chris Forbes | 5798913 | 2016-07-26 17:06:10 +1200 | [diff] [blame] | 920 | if (req & (1 << i)) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 921 | if (result.size()) result += ", "; |
Chris Forbes | 6e58ebd | 2016-08-31 12:58:14 -0700 | [diff] [blame] | 922 | result += string_VkImageViewType(VkImageViewType(i)); |
Chris Forbes | 5798913 | 2016-07-26 17:06:10 +1200 | [diff] [blame] | 923 | } |
| 924 | } |
| 925 | |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 926 | if (!result.size()) result = "(none)"; |
Chris Forbes | 6e58ebd | 2016-08-31 12:58:14 -0700 | [diff] [blame] | 927 | |
| 928 | return result; |
Chris Forbes | 5798913 | 2016-07-26 17:06:10 +1200 | [diff] [blame] | 929 | } |
| 930 | |
Jeremy Gebben | 90ce416 | 2021-08-25 14:23:07 -0600 | [diff] [blame] | 931 | static char const *StringDescriptorReqComponentType(DescriptorReqFlags req) { |
Chris Forbes | da01e8d | 2018-08-27 15:36:57 -0700 | [diff] [blame] | 932 | if (req & DESCRIPTOR_REQ_COMPONENT_TYPE_SINT) return "SINT"; |
| 933 | if (req & DESCRIPTOR_REQ_COMPONENT_TYPE_UINT) return "UINT"; |
| 934 | if (req & DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT) return "FLOAT"; |
| 935 | return "(none)"; |
| 936 | } |
| 937 | |
Jeff Bolz | 6cede83 | 2019-08-09 23:30:39 -0500 | [diff] [blame] | 938 | unsigned DescriptorRequirementsBitsFromFormat(VkFormat fmt) { |
sfricke-samsung | e308629 | 2021-11-18 23:02:35 -0800 | [diff] [blame] | 939 | if (FormatIsSINT(fmt)) return DESCRIPTOR_REQ_COMPONENT_TYPE_SINT; |
| 940 | if (FormatIsUINT(fmt)) return DESCRIPTOR_REQ_COMPONENT_TYPE_UINT; |
sfricke-samsung | ed028b0 | 2021-09-06 23:14:51 -0700 | [diff] [blame] | 941 | // Formats such as VK_FORMAT_D16_UNORM_S8_UINT are both |
Chris Forbes | da01e8d | 2018-08-27 15:36:57 -0700 | [diff] [blame] | 942 | if (FormatIsDepthAndStencil(fmt)) return DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT | DESCRIPTOR_REQ_COMPONENT_TYPE_UINT; |
| 943 | if (fmt == VK_FORMAT_UNDEFINED) return 0; |
| 944 | // everything else -- UNORM/SNORM/FLOAT/USCALED/SSCALED is all float in the shader. |
| 945 | return DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT; |
| 946 | } |
| 947 | |
Tobin Ehlis | 3066db6 | 2016-08-22 08:12:23 -0600 | [diff] [blame] | 948 | // Validate that the state of this set is appropriate for the given bindings and dynamic_offsets at Draw time |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 949 | // This includes validating that all descriptors in the given bindings are updated, |
| 950 | // that any update buffers are valid, and that any dynamic offsets are within the bounds of their buffers. |
| 951 | // Return true if state is acceptable, or false and write an error message into error string |
locke-lunarg | b8d7a7a | 2020-10-25 16:01:52 -0600 | [diff] [blame] | 952 | bool CoreChecks::ValidateDrawState(const DescriptorSet *descriptor_set, const BindingReqMap &bindings, |
| 953 | const std::vector<uint32_t> &dynamic_offsets, const CMD_BUFFER_STATE *cb_node, |
Niels Möller | 1f945f6 | 2021-07-12 15:06:02 +0200 | [diff] [blame] | 954 | const std::vector<IMAGE_VIEW_STATE *> *attachments, const std::vector<SUBPASS_INFO> *subpasses, |
locke-lunarg | fc78e93 | 2020-11-19 17:06:24 -0700 | [diff] [blame] | 955 | const char *caller, const DrawDispatchVuid &vuids) const { |
Jeremy Gebben | 37ed31e | 2021-04-23 12:12:10 -0600 | [diff] [blame] | 956 | layer_data::optional<layer_data::unordered_map<VkImageView, VkImageLayout>> checked_layouts; |
Karl Schultz | 2171f93 | 2021-03-19 10:47:01 -0600 | [diff] [blame] | 957 | if (descriptor_set->GetTotalDescriptorCount() > cvdescriptorset::PrefilterBindRequestMap::kManyDescriptors_) { |
| 958 | checked_layouts.emplace(); |
| 959 | } |
Tony-LunarG | ace473a | 2020-05-06 12:48:04 -0600 | [diff] [blame] | 960 | bool result = false; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 961 | VkFramebuffer framebuffer = cb_node->activeFramebuffer ? cb_node->activeFramebuffer->framebuffer() : VK_NULL_HANDLE; |
John Zulauf | 79f0658 | 2021-02-27 18:38:39 -0700 | [diff] [blame] | 962 | for (const auto &binding_pair : bindings) { |
| 963 | const auto binding = binding_pair.first; |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 964 | DescriptorSetLayout::ConstBindingIterator binding_it(descriptor_set->GetLayout().get(), binding); |
| 965 | if (binding_it.AtEnd()) { // End at construction is the condition for an invalid binding. |
Tony-LunarG | ace473a | 2020-05-06 12:48:04 -0600 | [diff] [blame] | 966 | auto set = descriptor_set->GetSet(); |
locke-lunarg | 1328e8e | 2020-08-20 12:40:08 -0600 | [diff] [blame] | 967 | result |= LogError(set, vuids.descriptor_valid, |
Tony-LunarG | ace473a | 2020-05-06 12:48:04 -0600 | [diff] [blame] | 968 | "%s encountered the following validation error at %s time: Attempting to " |
| 969 | "validate DrawState for binding #%u which is an invalid binding for this descriptor set.", |
| 970 | report_data->FormatHandle(set).c_str(), caller, binding); |
| 971 | return result; |
Tobin Ehlis | 58c5958 | 2016-06-21 12:34:33 -0600 | [diff] [blame] | 972 | } |
Jeff Bolz | 6cede83 | 2019-08-09 23:30:39 -0500 | [diff] [blame] | 973 | |
| 974 | if (binding_it.GetDescriptorBindingFlags() & |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 975 | (VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT)) { |
Jeff Bolz | 6cede83 | 2019-08-09 23:30:39 -0500 | [diff] [blame] | 976 | // Can't validate the descriptor because it may not have been updated, |
| 977 | // or the view could have been destroyed |
| 978 | continue; |
| 979 | } |
John Zulauf | 81dd1f1 | 2021-01-26 16:49:16 -0700 | [diff] [blame] | 980 | // // This is a record time only path |
| 981 | const bool record_time_validate = true; |
locke-lunarg | fc78e93 | 2020-11-19 17:06:24 -0700 | [diff] [blame] | 982 | result |= ValidateDescriptorSetBindingData(cb_node, descriptor_set, dynamic_offsets, binding_pair, framebuffer, attachments, |
Karl Schultz | 2171f93 | 2021-03-19 10:47:01 -0600 | [diff] [blame] | 983 | subpasses, record_time_validate, caller, vuids, checked_layouts); |
unknown | 3087a64 | 2019-09-26 17:21:05 -0600 | [diff] [blame] | 984 | } |
Tony-LunarG | ace473a | 2020-05-06 12:48:04 -0600 | [diff] [blame] | 985 | return result; |
unknown | 3087a64 | 2019-09-26 17:21:05 -0600 | [diff] [blame] | 986 | } |
Jeff Bolz | 6cede83 | 2019-08-09 23:30:39 -0500 | [diff] [blame] | 987 | |
locke-lunarg | b8be822 | 2020-10-20 00:34:37 -0600 | [diff] [blame] | 988 | bool CoreChecks::ValidateDescriptorSetBindingData(const CMD_BUFFER_STATE *cb_node, const DescriptorSet *descriptor_set, |
| 989 | const std::vector<uint32_t> &dynamic_offsets, |
John Zulauf | 79f0658 | 2021-02-27 18:38:39 -0700 | [diff] [blame] | 990 | const std::pair<const uint32_t, DescriptorRequirement> &binding_info, |
Mark Lobodzinski | 85ebd40 | 2020-12-03 12:56:07 -0700 | [diff] [blame] | 991 | VkFramebuffer framebuffer, const std::vector<IMAGE_VIEW_STATE *> *attachments, |
Niels Möller | 1f945f6 | 2021-07-12 15:06:02 +0200 | [diff] [blame] | 992 | const std::vector<SUBPASS_INFO> *subpasses, bool record_time_validate, |
Karl Schultz | 2171f93 | 2021-03-19 10:47:01 -0600 | [diff] [blame] | 993 | const char *caller, const DrawDispatchVuid &vuids, |
Jeremy Gebben | 37ed31e | 2021-04-23 12:12:10 -0600 | [diff] [blame] | 994 | layer_data::optional<layer_data::unordered_map<VkImageView, VkImageLayout>> &checked_layouts) const { |
unknown | 3087a64 | 2019-09-26 17:21:05 -0600 | [diff] [blame] | 995 | using DescriptorClass = cvdescriptorset::DescriptorClass; |
| 996 | using BufferDescriptor = cvdescriptorset::BufferDescriptor; |
| 997 | using ImageDescriptor = cvdescriptorset::ImageDescriptor; |
| 998 | using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor; |
| 999 | using SamplerDescriptor = cvdescriptorset::SamplerDescriptor; |
| 1000 | using TexelDescriptor = cvdescriptorset::TexelDescriptor; |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 1001 | using AccelerationStructureDescriptor = cvdescriptorset::AccelerationStructureDescriptor; |
locke-lunarg | 3604599 | 2020-08-20 16:54:37 -0600 | [diff] [blame] | 1002 | const auto binding = binding_info.first; |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1003 | bool skip = false; |
unknown | 3087a64 | 2019-09-26 17:21:05 -0600 | [diff] [blame] | 1004 | DescriptorSetLayout::ConstBindingIterator binding_it(descriptor_set->GetLayout().get(), binding); |
| 1005 | { |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 1006 | // Copy the range, the end range is subject to update based on variable length descriptor arrays. |
| 1007 | cvdescriptorset::IndexRange index_range = binding_it.GetGlobalIndexRange(); |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 1008 | auto array_idx = 0; // Track array idx if we're dealing with array descriptors |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1009 | |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 1010 | if (binding_it.IsVariableDescriptorCount()) { |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1011 | // Only validate the first N descriptors if it uses variable_count |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 1012 | index_range.end = index_range.start + descriptor_set->GetVariableDescriptorCount(); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1013 | } |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1014 | for (uint32_t i = index_range.start; !skip && i < index_range.end; ++i, ++array_idx) { |
Locke | b994adf | 2019-03-29 23:52:31 -0600 | [diff] [blame] | 1015 | uint32_t index = i - index_range.start; |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 1016 | const auto *descriptor = descriptor_set->GetDescriptorFromGlobalIndex(i); |
Jeremy Gebben | 550ebbd | 2021-03-11 05:04:52 -0700 | [diff] [blame] | 1017 | const auto descriptor_class = descriptor->GetClass(); |
Locke | b994adf | 2019-03-29 23:52:31 -0600 | [diff] [blame] | 1018 | |
Jeremy Gebben | 550ebbd | 2021-03-11 05:04:52 -0700 | [diff] [blame] | 1019 | if (descriptor_class == DescriptorClass::InlineUniform) { |
Jeff Bolz | 6cede83 | 2019-08-09 23:30:39 -0500 | [diff] [blame] | 1020 | // Can't validate the descriptor because it may not have been updated. |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1021 | continue; |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 1022 | } else if (!descriptor->updated) { |
Tony-LunarG | ace473a | 2020-05-06 12:48:04 -0600 | [diff] [blame] | 1023 | auto set = descriptor_set->GetSet(); |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 1024 | return LogError( |
| 1025 | set, vuids.descriptor_valid, |
| 1026 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in binding #%" PRIu32 |
| 1027 | " index %" PRIu32 |
| 1028 | " is being used in draw but has never been updated via vkUpdateDescriptorSets() or a similar call.", |
| 1029 | report_data->FormatHandle(set).c_str(), caller, binding, index); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1030 | } |
| 1031 | switch (descriptor_class) { |
| 1032 | case DescriptorClass::GeneralBuffer: { |
| 1033 | const auto *buffer_desc = static_cast<const BufferDescriptor *>(descriptor); |
| 1034 | skip = |
| 1035 | ValidateGeneralBufferDescriptor(caller, vuids, cb_node, descriptor_set, *buffer_desc, binding_info, index); |
| 1036 | } break; |
| 1037 | case DescriptorClass::ImageSampler: { |
| 1038 | const auto *image_sampler_desc = static_cast<const ImageSamplerDescriptor *>(descriptor); |
| 1039 | skip = ValidateImageDescriptor(caller, vuids, cb_node, descriptor_set, *image_sampler_desc, binding_info, index, |
| 1040 | record_time_validate, attachments, subpasses, framebuffer, binding_it.GetType(), |
| 1041 | checked_layouts); |
| 1042 | if (!skip) { |
| 1043 | skip = ValidateSamplerDescriptor(caller, vuids, cb_node, descriptor_set, binding_info, index, |
| 1044 | image_sampler_desc->GetSampler(), image_sampler_desc->IsImmutableSampler(), |
| 1045 | image_sampler_desc->GetSamplerState()); |
Karl Schultz | 76d16a4 | 2020-11-11 05:05:33 -0700 | [diff] [blame] | 1046 | } |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1047 | } break; |
| 1048 | case DescriptorClass::Image: { |
| 1049 | const auto *image_desc = static_cast<const ImageDescriptor *>(descriptor); |
| 1050 | skip = ValidateImageDescriptor(caller, vuids, cb_node, descriptor_set, *image_desc, binding_info, index, |
| 1051 | record_time_validate, attachments, subpasses, framebuffer, binding_it.GetType(), |
| 1052 | checked_layouts); |
| 1053 | } break; |
| 1054 | case DescriptorClass::PlainSampler: { |
| 1055 | const auto *sampler_desc = static_cast<const SamplerDescriptor *>(descriptor); |
| 1056 | skip = ValidateSamplerDescriptor(caller, vuids, cb_node, descriptor_set, binding_info, index, |
| 1057 | sampler_desc->GetSampler(), sampler_desc->IsImmutableSampler(), |
| 1058 | sampler_desc->GetSamplerState()); |
| 1059 | } break; |
| 1060 | case DescriptorClass::TexelBuffer: { |
| 1061 | const auto *texel_desc = static_cast<const TexelDescriptor *>(descriptor); |
| 1062 | skip = ValidateTexelDescriptor(caller, vuids, cb_node, descriptor_set, *texel_desc, binding_info, index); |
| 1063 | } break; |
| 1064 | case DescriptorClass::AccelerationStructure: { |
| 1065 | const auto *accel_desc = static_cast<const AccelerationStructureDescriptor *>(descriptor); |
| 1066 | skip = ValidateAccelerationDescriptor(caller, vuids, cb_node, descriptor_set, *accel_desc, binding_info, index); |
| 1067 | } break; |
| 1068 | default: |
| 1069 | break; |
| 1070 | } |
| 1071 | } |
| 1072 | } |
| 1073 | return skip; |
| 1074 | } |
locke-lunarg | 3604599 | 2020-08-20 16:54:37 -0600 | [diff] [blame] | 1075 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1076 | bool CoreChecks::ValidateGeneralBufferDescriptor(const char *caller, const DrawDispatchVuid &vuids, const CMD_BUFFER_STATE *cb_node, |
| 1077 | const cvdescriptorset::DescriptorSet *descriptor_set, |
| 1078 | const cvdescriptorset::BufferDescriptor &descriptor, |
| 1079 | const std::pair<const uint32_t, DescriptorRequirement> &binding_info, |
| 1080 | uint32_t index) const { |
| 1081 | // Verify that buffers are valid |
| 1082 | auto buffer = descriptor.GetBuffer(); |
| 1083 | auto buffer_node = descriptor.GetBufferState(); |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1084 | if ((!buffer_node && !enabled_features.robustness2_features.nullDescriptor) || (buffer_node && buffer_node->Destroyed())) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1085 | auto set = descriptor_set->GetSet(); |
| 1086 | return LogError(set, vuids.descriptor_valid, |
| 1087 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1088 | "binding #%" PRIu32 " index %" PRIu32 " is using buffer %s that is invalid or has been destroyed.", |
| 1089 | report_data->FormatHandle(set).c_str(), caller, binding_info.first, index, |
| 1090 | report_data->FormatHandle(buffer).c_str()); |
| 1091 | } |
| 1092 | if (buffer) { |
| 1093 | if (buffer_node && !buffer_node->sparse) { |
Jeremy Gebben | 6fbf824 | 2021-06-21 09:14:46 -0600 | [diff] [blame] | 1094 | for (const auto &item: buffer_node->GetBoundMemory()) { |
| 1095 | auto &binding = item.second; |
| 1096 | if (binding.mem_state->Destroyed()) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1097 | auto set = descriptor_set->GetSet(); |
| 1098 | return LogError(set, vuids.descriptor_valid, |
| 1099 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1100 | "binding #%" PRIu32 " index %" PRIu32 " is uses buffer %s that references invalid memory %s.", |
| 1101 | report_data->FormatHandle(set).c_str(), caller, binding_info.first, index, |
Jeremy Gebben | 6fbf824 | 2021-06-21 09:14:46 -0600 | [diff] [blame] | 1102 | report_data->FormatHandle(buffer).c_str(), |
| 1103 | report_data->FormatHandle(binding.mem_state->mem()).c_str()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1104 | } |
| 1105 | } |
| 1106 | } |
| 1107 | if (enabled_features.core11.protectedMemory == VK_TRUE) { |
| 1108 | if (ValidateProtectedBuffer(cb_node, buffer_node, caller, vuids.unprotected_command_buffer, |
| 1109 | "Buffer is in a descriptorSet")) { |
| 1110 | return true; |
| 1111 | } |
| 1112 | if (binding_info.second.is_writable && |
| 1113 | ValidateUnprotectedBuffer(cb_node, buffer_node, caller, vuids.protected_command_buffer, |
| 1114 | "Buffer is in a descriptorSet")) { |
| 1115 | return true; |
| 1116 | } |
| 1117 | } |
| 1118 | } |
| 1119 | return false; |
| 1120 | } |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 1121 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1122 | bool CoreChecks::ValidateImageDescriptor(const char *caller, const DrawDispatchVuid &vuids, const CMD_BUFFER_STATE *cb_node, |
| 1123 | const cvdescriptorset::DescriptorSet *descriptor_set, |
| 1124 | const cvdescriptorset::ImageDescriptor &image_descriptor, |
| 1125 | const std::pair<const uint32_t, DescriptorRequirement> &binding_info, uint32_t index, |
| 1126 | bool record_time_validate, const std::vector<IMAGE_VIEW_STATE *> *attachments, |
Niels Möller | 1f945f6 | 2021-07-12 15:06:02 +0200 | [diff] [blame] | 1127 | const std::vector<SUBPASS_INFO> *subpasses, VkFramebuffer framebuffer, |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1128 | VkDescriptorType descriptor_type, |
Jeremy Gebben | 37ed31e | 2021-04-23 12:12:10 -0600 | [diff] [blame] | 1129 | layer_data::optional<layer_data::unordered_map<VkImageView, VkImageLayout>> &checked_layouts) const { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1130 | std::vector<const SAMPLER_STATE *> sampler_states; |
| 1131 | VkImageView image_view = image_descriptor.GetImageView(); |
| 1132 | const IMAGE_VIEW_STATE *image_view_state = image_descriptor.GetImageViewState(); |
| 1133 | VkImageLayout image_layout = image_descriptor.GetImageLayout(); |
| 1134 | const auto binding = binding_info.first; |
| 1135 | const auto reqs = binding_info.second.reqs; |
Karl Schultz | 76d16a4 | 2020-11-11 05:05:33 -0700 | [diff] [blame] | 1136 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1137 | if (image_descriptor.GetClass() == cvdescriptorset::DescriptorClass::ImageSampler) { |
| 1138 | sampler_states.emplace_back( |
| 1139 | static_cast<const cvdescriptorset::ImageSamplerDescriptor &>(image_descriptor).GetSamplerState()); |
| 1140 | } else { |
| 1141 | if (binding_info.second.samplers_used_by_image.size() > index) { |
Jeremy Gebben | 856b8c6 | 2021-12-01 15:20:07 -0700 | [diff] [blame] | 1142 | for (const auto &desc_index : binding_info.second.samplers_used_by_image[index]) { |
| 1143 | const auto *desc = descriptor_set->GetDescriptorFromBinding(desc_index.sampler_slot.binding, desc_index.sampler_index); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1144 | // NOTE: This check _shouldn't_ be necessary due to the checks made in IsSpecificDescriptorType in |
| 1145 | // shader_validation.cpp. However, without this check some traces still crash. |
Jeremy Gebben | 856b8c6 | 2021-12-01 15:20:07 -0700 | [diff] [blame] | 1146 | if (desc && (desc->GetClass() == cvdescriptorset::DescriptorClass::PlainSampler)) { |
| 1147 | const auto *sampler_state = static_cast<const cvdescriptorset::SamplerDescriptor *>(desc)->GetSamplerState(); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1148 | if (sampler_state) sampler_states.emplace_back(sampler_state); |
| 1149 | } |
| 1150 | } |
| 1151 | } |
| 1152 | } |
locke-lunarg | 4e1e463 | 2020-10-26 01:52:19 -0600 | [diff] [blame] | 1153 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1154 | if ((!image_view_state && !enabled_features.robustness2_features.nullDescriptor) || |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1155 | (image_view_state && image_view_state->Destroyed())) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1156 | // Image view must have been destroyed since initial update. Could potentially flag the descriptor |
| 1157 | // as "invalid" (updated = false) at DestroyImageView() time and detect this error at bind time |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 1158 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1159 | auto set = descriptor_set->GetSet(); |
| 1160 | return LogError(set, vuids.descriptor_valid, |
| 1161 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1162 | "binding #%" PRIu32 " index %" PRIu32 " is using imageView %s that is invalid or has been destroyed.", |
| 1163 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1164 | report_data->FormatHandle(image_view).c_str()); |
| 1165 | } |
| 1166 | if (image_view) { |
| 1167 | const auto &image_view_ci = image_view_state->create_info; |
| 1168 | const auto *image_state = image_view_state->image_state.get(); |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 1169 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1170 | if (reqs & DESCRIPTOR_REQ_ALL_VIEW_TYPE_BITS) { |
| 1171 | if (~reqs & (1 << image_view_ci.viewType)) { |
| 1172 | auto set = descriptor_set->GetSet(); |
| 1173 | return LogError(set, vuids.descriptor_valid, |
| 1174 | "Descriptor set %s encountered the following validation error at %s time: Descriptor " |
| 1175 | "in binding #%" PRIu32 " index %" PRIu32 " requires an image view of type %s but got %s.", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 1176 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1177 | StringDescriptorReqViewType(reqs).c_str(), string_VkImageViewType(image_view_ci.viewType)); |
| 1178 | } |
locke-lunarg | 25b6c35 | 2020-08-06 17:44:18 -0600 | [diff] [blame] | 1179 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1180 | if (!(reqs & image_view_state->descriptor_format_bits)) { |
| 1181 | // bad component type |
| 1182 | auto set = descriptor_set->GetSet(); |
| 1183 | return LogError(set, vuids.descriptor_valid, |
| 1184 | "Descriptor set %s encountered the following validation error at %s time: " |
| 1185 | "Descriptor in binding " |
| 1186 | "#%" PRIu32 " index %" PRIu32 " requires %s component type, but bound descriptor format is %s.", |
| 1187 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1188 | StringDescriptorReqComponentType(reqs), string_VkFormat(image_view_ci.format)); |
| 1189 | } |
| 1190 | } |
locke-lunarg | 540b225 | 2020-08-03 13:23:36 -0600 | [diff] [blame] | 1191 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1192 | // NOTE: Submit time validation of UPDATE_AFTER_BIND image layout is not possible with the |
| 1193 | // image layout tracking as currently implemented, so only record_time_validation is done |
| 1194 | if (!disabled[image_layout_validation] && record_time_validate) { |
| 1195 | // Verify Image Layout |
| 1196 | // No "invalid layout" VUID required for this call, since the optimal_layout parameter is UNDEFINED. |
| 1197 | // The caller provides a checked_layouts map when there are a large number of layouts to check, |
| 1198 | // making it worthwhile to keep track of verified layouts and not recheck them. |
| 1199 | bool already_validated = false; |
| 1200 | if (checked_layouts) { |
| 1201 | auto search = checked_layouts->find(image_view); |
| 1202 | if (search != checked_layouts->end() && search->second == image_layout) { |
| 1203 | already_validated = true; |
| 1204 | } |
| 1205 | } |
| 1206 | if (!already_validated) { |
| 1207 | bool hit_error = false; |
| 1208 | VerifyImageLayout(cb_node, image_state, image_view_state->normalized_subresource_range, |
| 1209 | image_view_ci.subresourceRange.aspectMask, image_layout, VK_IMAGE_LAYOUT_UNDEFINED, caller, |
| 1210 | kVUIDUndefined, "VUID-VkDescriptorImageInfo-imageLayout-00344", &hit_error); |
| 1211 | if (hit_error) { |
| 1212 | auto set = descriptor_set->GetSet(); |
| 1213 | return LogError(set, vuids.descriptor_valid, |
| 1214 | "Descriptor set %s encountered the following validation error at %s time: Image layout " |
| 1215 | "specified " |
| 1216 | "at vkUpdateDescriptorSet* or vkCmdPushDescriptorSet* time " |
| 1217 | "doesn't match actual image layout at time descriptor is used. See previous error callback for " |
| 1218 | "specific details.", |
| 1219 | report_data->FormatHandle(set).c_str(), caller); |
| 1220 | } |
| 1221 | if (checked_layouts) { |
| 1222 | checked_layouts->emplace(image_view, image_layout); |
| 1223 | } |
| 1224 | } |
| 1225 | } |
locke-lunarg | 540b225 | 2020-08-03 13:23:36 -0600 | [diff] [blame] | 1226 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1227 | // Verify Sample counts |
| 1228 | if ((reqs & DESCRIPTOR_REQ_SINGLE_SAMPLE) && image_view_state->samples != VK_SAMPLE_COUNT_1_BIT) { |
| 1229 | auto set = descriptor_set->GetSet(); |
| 1230 | return LogError(set, vuids.descriptor_valid, |
| 1231 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1232 | "binding #%" PRIu32 " index %" PRIu32 " requires bound image to have VK_SAMPLE_COUNT_1_BIT but got %s.", |
| 1233 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1234 | string_VkSampleCountFlagBits(image_view_state->samples)); |
| 1235 | } |
| 1236 | if ((reqs & DESCRIPTOR_REQ_MULTI_SAMPLE) && image_view_state->samples == VK_SAMPLE_COUNT_1_BIT) { |
| 1237 | auto set = descriptor_set->GetSet(); |
| 1238 | return LogError(set, vuids.descriptor_valid, |
| 1239 | "Descriptor set %s encountered the following validation error at %s time: Descriptor " |
| 1240 | "in binding #%" PRIu32 " index %" PRIu32 |
| 1241 | " requires bound image to have multiple samples, but got VK_SAMPLE_COUNT_1_BIT.", |
| 1242 | report_data->FormatHandle(set).c_str(), caller, binding, index); |
| 1243 | } |
locke-lunarg | 3604599 | 2020-08-20 16:54:37 -0600 | [diff] [blame] | 1244 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1245 | // Verify VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT |
| 1246 | if ((reqs & DESCRIPTOR_REQ_VIEW_ATOMIC_OPERATION) && (descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) && |
| 1247 | !(image_view_state->format_features & VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT)) { |
| 1248 | auto set = descriptor_set->GetSet(); |
| 1249 | LogObjectList objlist(set); |
| 1250 | objlist.add(image_view); |
| 1251 | return LogError(objlist, vuids.imageview_atomic, |
| 1252 | "Descriptor set %s encountered the following validation error at %s time: Descriptor " |
| 1253 | "in binding #%" PRIu32 " index %" PRIu32 |
| 1254 | ", %s, format %s, doesn't " |
| 1255 | "contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT.", |
| 1256 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1257 | report_data->FormatHandle(image_view).c_str(), string_VkFormat(image_view_ci.format)); |
| 1258 | } |
locke-lunarg | 654a905 | 2020-10-13 16:28:42 -0600 | [diff] [blame] | 1259 | |
Lionel Landwerlin | 6a9f89c | 2021-12-07 15:46:46 +0200 | [diff] [blame] | 1260 | // When KHR_format_feature_flags2 is supported, the read/write without |
| 1261 | // format support is reported per format rather as a blankey physical |
| 1262 | // device feature. |
| 1263 | if (has_format_feature2) { |
| 1264 | const VkFormatFeatureFlags2KHR img_format_feats = image_view_state->image_state->format_features; |
| 1265 | |
| 1266 | if (descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) { |
| 1267 | if ((reqs & DESCRIPTOR_REQ_IMAGE_READ_WITHOUT_FORMAT) && |
| 1268 | !(img_format_feats & VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT_KHR)) { |
| 1269 | auto set = descriptor_set->GetSet(); |
| 1270 | LogObjectList objlist(set); |
| 1271 | objlist.add(image_view); |
| 1272 | return LogError(objlist, vuids.storage_image_read_without_format, |
| 1273 | "Descriptor set %s encountered the following validation error at %s time: Descriptor " |
| 1274 | "in binding #%" PRIu32 " index %" PRIu32 |
| 1275 | ", %s, format %s, doesn't " |
| 1276 | "contain VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT_KHR", |
| 1277 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1278 | report_data->FormatHandle(image_view).c_str(), string_VkFormat(image_view_ci.format)); |
| 1279 | } |
| 1280 | |
| 1281 | if ((reqs & DESCRIPTOR_REQ_IMAGE_WRITE_WITHOUT_FORMAT) && |
| 1282 | !(img_format_feats & VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT_KHR)) { |
| 1283 | auto set = descriptor_set->GetSet(); |
| 1284 | LogObjectList objlist(set); |
| 1285 | objlist.add(image_view); |
| 1286 | return LogError(objlist, vuids.storage_image_write_without_format, |
| 1287 | "Descriptor set %s encountered the following validation error at %s time: Descriptor " |
| 1288 | "in binding #%" PRIu32 " index %" PRIu32 |
| 1289 | ", %s, format %s, doesn't " |
| 1290 | "contain VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT_KHR", |
| 1291 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1292 | report_data->FormatHandle(image_view).c_str(), string_VkFormat(image_view_ci.format)); |
| 1293 | } |
| 1294 | } |
Lionel Landwerlin | cdbe868 | 2021-12-08 15:10:37 +0200 | [diff] [blame] | 1295 | |
| 1296 | if ((reqs & DESCRIPTOR_REQ_IMAGE_DREF) && |
| 1297 | !(img_format_feats & VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT_KHR)) { |
| 1298 | auto set = descriptor_set->GetSet(); |
| 1299 | LogObjectList objlist(set); |
| 1300 | objlist.add(image_view); |
| 1301 | return LogError(objlist, vuids.depth_compare_sample, |
| 1302 | "Descriptor set %s encountered the following validation error at %s time: Descriptor " |
| 1303 | "in binding #%" PRIu32 " index %" PRIu32 |
| 1304 | ", %s, format %s, doesn't " |
| 1305 | "contain VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT_KHR", |
| 1306 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1307 | report_data->FormatHandle(image_view).c_str(), string_VkFormat(image_view_ci.format)); |
| 1308 | } |
Lionel Landwerlin | 6a9f89c | 2021-12-07 15:46:46 +0200 | [diff] [blame] | 1309 | } |
| 1310 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1311 | // Verify if attachments are used in DescriptorSet |
Niels Möller | 1f945f6 | 2021-07-12 15:06:02 +0200 | [diff] [blame] | 1312 | if (attachments && attachments->size() > 0 && subpasses && (descriptor_type != VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)) { |
Jeremy Gebben | b4d1701 | 2021-07-08 13:18:15 -0600 | [diff] [blame] | 1313 | bool ds_aspect = (image_view_state->normalized_subresource_range.aspectMask & |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1314 | (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) |
| 1315 | ? true |
| 1316 | : false; |
| 1317 | uint32_t att_index = 0; |
| 1318 | for (const auto &view_state : *attachments) { |
Niels Möller | 1f945f6 | 2021-07-12 15:06:02 +0200 | [diff] [blame] | 1319 | const SUBPASS_INFO& subpass = (*subpasses)[att_index]; |
| 1320 | if (!subpass.used || !view_state || view_state->Destroyed()) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1321 | continue; |
| 1322 | } |
Ricardo Garcia | fe2b768 | 2021-07-30 10:44:57 +0200 | [diff] [blame] | 1323 | if (ds_aspect && (subpass.usage == VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT || |
| 1324 | subpass.usage == VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1325 | if ((image_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL || |
| 1326 | image_layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL || |
| 1327 | image_layout == VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL) && |
Niels Möller | 1f945f6 | 2021-07-12 15:06:02 +0200 | [diff] [blame] | 1328 | (subpass.layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL || |
| 1329 | subpass.layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL || |
| 1330 | subpass.layout == VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL)) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1331 | continue; |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 1332 | } |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1333 | if ((image_layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL && |
Niels Möller | 1f945f6 | 2021-07-12 15:06:02 +0200 | [diff] [blame] | 1334 | subpass.layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL) || |
| 1335 | (subpass.layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL && |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1336 | image_layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL)) { |
| 1337 | continue; |
| 1338 | } |
| 1339 | } |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1340 | if (view_state->image_view() == image_view) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1341 | auto set = descriptor_set->GetSet(); |
| 1342 | LogObjectList objlist(set); |
| 1343 | objlist.add(image_view); |
| 1344 | objlist.add(framebuffer); |
| 1345 | return LogError(objlist, vuids.image_subresources, |
| 1346 | "Descriptor set %s encountered the following validation error at %s time: %s is used in " |
| 1347 | "Descriptor in binding #%" PRIu32 " index %" PRIu32 " and %s attachment # %" PRIu32 ".", |
| 1348 | report_data->FormatHandle(set).c_str(), caller, report_data->FormatHandle(image_view).c_str(), |
| 1349 | binding, index, report_data->FormatHandle(framebuffer).c_str(), att_index); |
| 1350 | } else { |
| 1351 | if (image_view_state->OverlapSubresource(*view_state)) { |
Karl Schultz | 76d16a4 | 2020-11-11 05:05:33 -0700 | [diff] [blame] | 1352 | auto set = descriptor_set->GetSet(); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1353 | LogObjectList objlist(set); |
| 1354 | objlist.add(image_view); |
| 1355 | objlist.add(framebuffer); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1356 | objlist.add(view_state->image_view()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1357 | return LogError( |
| 1358 | objlist, vuids.image_subresources, |
| 1359 | "Descriptor set %s encountered the following validation error at %s time: " |
| 1360 | "Image subresources of %s in " |
| 1361 | "Descriptor in binding #%" PRIu32 " index %" PRIu32 " and %s in %s attachment # %" PRIu32 " overlap.", |
| 1362 | report_data->FormatHandle(set).c_str(), caller, report_data->FormatHandle(image_view).c_str(), binding, |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1363 | index, report_data->FormatHandle(view_state->image_view()).c_str(), |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1364 | report_data->FormatHandle(framebuffer).c_str(), att_index); |
Karl Schultz | 76d16a4 | 2020-11-11 05:05:33 -0700 | [diff] [blame] | 1365 | } |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1366 | } |
| 1367 | ++att_index; |
| 1368 | } |
| 1369 | if (enabled_features.core11.protectedMemory == VK_TRUE) { |
| 1370 | if (ValidateProtectedImage(cb_node, image_view_state->image_state.get(), caller, vuids.unprotected_command_buffer, |
| 1371 | "Image is in a descriptorSet")) { |
| 1372 | return true; |
| 1373 | } |
| 1374 | if (binding_info.second.is_writable && |
| 1375 | ValidateUnprotectedImage(cb_node, image_view_state->image_state.get(), caller, vuids.protected_command_buffer, |
| 1376 | "Image is in a descriptorSet")) { |
| 1377 | return true; |
| 1378 | } |
| 1379 | } |
| 1380 | } |
Chris Forbes | e92dd1d | 2019-01-21 15:58:57 -0800 | [diff] [blame] | 1381 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1382 | for (const auto *sampler_state : sampler_states) { |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1383 | if (!sampler_state || sampler_state->Destroyed()) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1384 | continue; |
| 1385 | } |
locke-lunarg | 25b6c35 | 2020-08-06 17:44:18 -0600 | [diff] [blame] | 1386 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1387 | // TODO: Validate 04015 for DescriptorClass::PlainSampler |
| 1388 | if ((sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT || |
| 1389 | sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) && |
| 1390 | (sampler_state->customCreateInfo.format == VK_FORMAT_UNDEFINED)) { |
| 1391 | if (image_view_state->create_info.format == VK_FORMAT_B4G4R4A4_UNORM_PACK16 || |
| 1392 | image_view_state->create_info.format == VK_FORMAT_B5G6R5_UNORM_PACK16 || |
| 1393 | image_view_state->create_info.format == VK_FORMAT_B5G5R5A1_UNORM_PACK16) { |
| 1394 | auto set = descriptor_set->GetSet(); |
| 1395 | LogObjectList objlist(set); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1396 | objlist.add(sampler_state->sampler()); |
| 1397 | objlist.add(image_view_state->image_view()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1398 | return LogError(objlist, "VUID-VkSamplerCustomBorderColorCreateInfoEXT-format-04015", |
| 1399 | "Descriptor set %s encountered the following validation error at %s time: Sampler %s in " |
| 1400 | "binding #%" PRIu32 " index %" PRIu32 |
| 1401 | " has a custom border color with format = VK_FORMAT_UNDEFINED and is used to " |
| 1402 | "sample an image view %s with format %s", |
| 1403 | report_data->FormatHandle(set).c_str(), caller, |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1404 | report_data->FormatHandle(sampler_state->sampler()).c_str(), binding, index, |
| 1405 | report_data->FormatHandle(image_view_state->image_view()).c_str(), |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1406 | string_VkFormat(image_view_state->create_info.format)); |
| 1407 | } |
| 1408 | } |
| 1409 | VkFilter sampler_mag_filter = sampler_state->createInfo.magFilter; |
| 1410 | VkFilter sampler_min_filter = sampler_state->createInfo.minFilter; |
| 1411 | VkBool32 sampler_compare_enable = sampler_state->createInfo.compareEnable; |
| 1412 | if ((sampler_mag_filter == VK_FILTER_LINEAR || sampler_min_filter == VK_FILTER_LINEAR) && |
| 1413 | (sampler_compare_enable == VK_FALSE) && |
| 1414 | !(image_view_state->format_features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT)) { |
| 1415 | auto set = descriptor_set->GetSet(); |
| 1416 | LogObjectList objlist(set); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1417 | objlist.add(sampler_state->sampler()); |
| 1418 | objlist.add(image_view_state->image_view()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1419 | return LogError(objlist, vuids.linear_sampler, |
| 1420 | "Descriptor set %s encountered the following validation error at %s time: Sampler " |
| 1421 | "(%s) is set to use VK_FILTER_LINEAR with " |
Tony-LunarG | 81195df | 2021-12-02 15:01:58 -0700 | [diff] [blame] | 1422 | "compareEnable is set to VK_FALSE, but image view's (%s) format (%s) does not " |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1423 | "contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT in its format features.", |
| 1424 | report_data->FormatHandle(set).c_str(), caller, |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1425 | report_data->FormatHandle(sampler_state->sampler()).c_str(), |
| 1426 | report_data->FormatHandle(image_view_state->image_view()).c_str(), |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1427 | string_VkFormat(image_view_state->create_info.format)); |
| 1428 | } |
| 1429 | if (sampler_mag_filter == VK_FILTER_CUBIC_EXT || sampler_min_filter == VK_FILTER_CUBIC_EXT) { |
| 1430 | if (!(image_view_state->format_features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT)) { |
| 1431 | auto set = descriptor_set->GetSet(); |
| 1432 | LogObjectList objlist(set); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1433 | objlist.add(sampler_state->sampler()); |
| 1434 | objlist.add(image_view_state->image_view()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1435 | return LogError(objlist, vuids.cubic_sampler, |
| 1436 | "Descriptor set %s encountered the following validation error at %s time: " |
| 1437 | "Sampler (%s) is set to use VK_FILTER_CUBIC_EXT, then " |
| 1438 | "image view's (%s) format (%s) MUST contain " |
| 1439 | "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT in its format features.", |
| 1440 | report_data->FormatHandle(set).c_str(), caller, |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1441 | report_data->FormatHandle(sampler_state->sampler()).c_str(), |
| 1442 | report_data->FormatHandle(image_view_state->image_view()).c_str(), |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1443 | string_VkFormat(image_view_state->create_info.format)); |
| 1444 | } |
| 1445 | |
| 1446 | if (IsExtEnabled(device_extensions.vk_ext_filter_cubic)) { |
| 1447 | const auto reduction_mode_info = |
| 1448 | LvlFindInChain<VkSamplerReductionModeCreateInfo>(sampler_state->createInfo.pNext); |
| 1449 | if (reduction_mode_info && |
| 1450 | (reduction_mode_info->reductionMode == VK_SAMPLER_REDUCTION_MODE_MIN || |
| 1451 | reduction_mode_info->reductionMode == VK_SAMPLER_REDUCTION_MODE_MAX) && |
| 1452 | !image_view_state->filter_cubic_props.filterCubicMinmax) { |
| 1453 | auto set = descriptor_set->GetSet(); |
| 1454 | LogObjectList objlist(set); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1455 | objlist.add(sampler_state->sampler()); |
| 1456 | objlist.add(image_view_state->image_view()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1457 | return LogError(objlist, vuids.filter_cubic_min_max, |
| 1458 | "Descriptor set %s encountered the following validation error at %s time: " |
| 1459 | "Sampler (%s) is set to use VK_FILTER_CUBIC_EXT & %s, " |
| 1460 | "but image view (%s) doesn't support filterCubicMinmax.", |
| 1461 | report_data->FormatHandle(set).c_str(), caller, |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1462 | report_data->FormatHandle(sampler_state->sampler()).c_str(), |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1463 | string_VkSamplerReductionMode(reduction_mode_info->reductionMode), |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1464 | report_data->FormatHandle(image_view_state->image_view()).c_str()); |
Chris Forbes | e92dd1d | 2019-01-21 15:58:57 -0800 | [diff] [blame] | 1465 | } |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1466 | |
| 1467 | if (!image_view_state->filter_cubic_props.filterCubic) { |
| 1468 | auto set = descriptor_set->GetSet(); |
| 1469 | LogObjectList objlist(set); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1470 | objlist.add(sampler_state->sampler()); |
| 1471 | objlist.add(image_view_state->image_view()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1472 | return LogError(objlist, vuids.filter_cubic, |
| 1473 | "Descriptor set %s encountered the following validation error at %s time: " |
| 1474 | "Sampler (%s) is set to use VK_FILTER_CUBIC_EXT, " |
| 1475 | "but image view (%s) doesn't support filterCubic.", |
| 1476 | report_data->FormatHandle(set).c_str(), caller, |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1477 | report_data->FormatHandle(sampler_state->sampler()).c_str(), |
| 1478 | report_data->FormatHandle(image_view_state->image_view()).c_str()); |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 1479 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1480 | } |
locke-lunarg | 4e1e463 | 2020-10-26 01:52:19 -0600 | [diff] [blame] | 1481 | |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1482 | if (IsExtEnabled(device_extensions.vk_img_filter_cubic)) { |
Clemens Kern | 5a42ea6 | 2021-09-29 16:30:23 +0200 | [diff] [blame] | 1483 | if (image_view_state->create_info.viewType == VK_IMAGE_VIEW_TYPE_3D || |
| 1484 | image_view_state->create_info.viewType == VK_IMAGE_VIEW_TYPE_CUBE || |
| 1485 | image_view_state->create_info.viewType == VK_IMAGE_VIEW_TYPE_CUBE_ARRAY) { |
Tony-LunarG | ace473a | 2020-05-06 12:48:04 -0600 | [diff] [blame] | 1486 | auto set = descriptor_set->GetSet(); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1487 | LogObjectList objlist(set); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1488 | objlist.add(sampler_state->sampler()); |
| 1489 | objlist.add(image_view_state->image_view()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1490 | return LogError(objlist, vuids.img_filter_cubic, |
| 1491 | "Descriptor set %s encountered the following validation error at %s time: Sampler " |
| 1492 | "(%s)is set to use VK_FILTER_CUBIC_EXT while the VK_IMG_filter_cubic extension " |
| 1493 | "is enabled, but image view (%s) has an invalid imageViewType (%s).", |
| 1494 | report_data->FormatHandle(set).c_str(), caller, |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1495 | report_data->FormatHandle(sampler_state->sampler()).c_str(), |
| 1496 | report_data->FormatHandle(image_view_state->image_view()).c_str(), |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1497 | string_VkImageViewType(image_view_state->create_info.viewType)); |
Tobin Ehlis | b1a2e4b | 2018-03-16 07:54:24 -0600 | [diff] [blame] | 1498 | } |
| 1499 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1500 | } |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1501 | |
| 1502 | if ((image_state->createInfo.flags & VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV) && |
| 1503 | (sampler_state->createInfo.addressModeU != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE || |
| 1504 | sampler_state->createInfo.addressModeV != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE || |
| 1505 | sampler_state->createInfo.addressModeW != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE)) { |
| 1506 | std::string address_mode_letter = |
| 1507 | (sampler_state->createInfo.addressModeU != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE) |
| 1508 | ? "U" |
| 1509 | : (sampler_state->createInfo.addressModeV != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE) ? "V" : "W"; |
| 1510 | VkSamplerAddressMode address_mode = |
| 1511 | (sampler_state->createInfo.addressModeU != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE) |
| 1512 | ? sampler_state->createInfo.addressModeU |
| 1513 | : (sampler_state->createInfo.addressModeV != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE) |
| 1514 | ? sampler_state->createInfo.addressModeV |
| 1515 | : sampler_state->createInfo.addressModeW; |
| 1516 | auto set = descriptor_set->GetSet(); |
| 1517 | LogObjectList objlist(set); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1518 | objlist.add(sampler_state->sampler()); |
| 1519 | objlist.add(image_state->image()); |
| 1520 | objlist.add(image_view_state->image_view()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1521 | return LogError(objlist, vuids.corner_sampled_address_mode, |
| 1522 | "Descriptor set %s encountered the following validation error at %s time: Image " |
| 1523 | "(%s) in image view (%s) is created with flag " |
| 1524 | "VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV and can only be sampled using " |
| 1525 | "VK_SAMPLER_ADDRESS_MODE_CLAMP_EDGE, but sampler (%s) has " |
| 1526 | "createInfo.addressMode%s set to %s.", |
| 1527 | report_data->FormatHandle(set).c_str(), caller, |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1528 | report_data->FormatHandle(image_state->image()).c_str(), |
| 1529 | report_data->FormatHandle(image_view_state->image_view()).c_str(), |
| 1530 | report_data->FormatHandle(sampler_state->sampler()).c_str(), address_mode_letter.c_str(), |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1531 | string_VkSamplerAddressMode(address_mode)); |
| 1532 | } |
| 1533 | |
| 1534 | // UnnormalizedCoordinates sampler validations |
| 1535 | if (sampler_state->createInfo.unnormalizedCoordinates) { |
| 1536 | // If ImageView is used by a unnormalizedCoordinates sampler, it needs to check ImageView type |
| 1537 | if (image_view_ci.viewType == VK_IMAGE_VIEW_TYPE_3D || image_view_ci.viewType == VK_IMAGE_VIEW_TYPE_CUBE || |
| 1538 | image_view_ci.viewType == VK_IMAGE_VIEW_TYPE_1D_ARRAY || |
| 1539 | image_view_ci.viewType == VK_IMAGE_VIEW_TYPE_2D_ARRAY || |
| 1540 | image_view_ci.viewType == VK_IMAGE_VIEW_TYPE_CUBE_ARRAY) { |
| 1541 | auto set = descriptor_set->GetSet(); |
| 1542 | LogObjectList objlist(set); |
| 1543 | objlist.add(image_view); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1544 | objlist.add(sampler_state->sampler()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1545 | return LogError(objlist, vuids.sampler_imageview_type, |
| 1546 | "Descriptor set %s encountered the following validation error at %s time: %s, type: %s in " |
| 1547 | "Descriptor in binding #%" PRIu32 " index %" PRIu32 "is used by %s.", |
| 1548 | report_data->FormatHandle(set).c_str(), caller, report_data->FormatHandle(image_view).c_str(), |
| 1549 | string_VkImageViewType(image_view_ci.viewType), binding, index, |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1550 | report_data->FormatHandle(sampler_state->sampler()).c_str()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1551 | } |
| 1552 | |
| 1553 | // sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* |
| 1554 | // instructions with ImplicitLod, Dref or Proj in their name |
| 1555 | if (reqs & DESCRIPTOR_REQ_SAMPLER_IMPLICITLOD_DREF_PROJ) { |
| 1556 | auto set = descriptor_set->GetSet(); |
| 1557 | LogObjectList objlist(set); |
| 1558 | objlist.add(image_view); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1559 | objlist.add(sampler_state->sampler()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1560 | return LogError(objlist, vuids.sampler_implicitLod_dref_proj, |
| 1561 | "Descriptor set %s encountered the following validation error at %s time: %s in " |
| 1562 | "Descriptor in binding #%" PRIu32 " index %" PRIu32 |
| 1563 | " is used by %s that uses invalid operator.", |
| 1564 | report_data->FormatHandle(set).c_str(), caller, report_data->FormatHandle(image_view).c_str(), |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1565 | binding, index, report_data->FormatHandle(sampler_state->sampler()).c_str()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1566 | } |
| 1567 | |
| 1568 | // sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* |
| 1569 | // instructions that includes a LOD bias or any offset values |
| 1570 | if (reqs & DESCRIPTOR_REQ_SAMPLER_BIAS_OFFSET) { |
| 1571 | auto set = descriptor_set->GetSet(); |
| 1572 | LogObjectList objlist(set); |
| 1573 | objlist.add(image_view); |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1574 | objlist.add(sampler_state->sampler()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1575 | return LogError(objlist, vuids.sampler_bias_offset, |
| 1576 | "Descriptor set %s encountered the following validation error at %s time: %s in " |
| 1577 | "Descriptor in binding #%" PRIu32 " index %" PRIu32 |
| 1578 | " is used by %s that uses invalid bias or offset operator.", |
| 1579 | report_data->FormatHandle(set).c_str(), caller, report_data->FormatHandle(image_view).c_str(), |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 1580 | binding, index, report_data->FormatHandle(sampler_state->sampler()).c_str()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1581 | } |
| 1582 | } |
| 1583 | } |
| 1584 | } |
| 1585 | return false; |
| 1586 | } |
| 1587 | |
| 1588 | bool CoreChecks::ValidateTexelDescriptor(const char *caller, const DrawDispatchVuid &vuids, const CMD_BUFFER_STATE *cb_node, |
| 1589 | const cvdescriptorset::DescriptorSet *descriptor_set, |
| 1590 | const cvdescriptorset::TexelDescriptor &texel_descriptor, |
| 1591 | const std::pair<const uint32_t, DescriptorRequirement> &binding_info, |
| 1592 | uint32_t index) const { |
| 1593 | auto buffer_view = texel_descriptor.GetBufferView(); |
| 1594 | auto buffer_view_state = texel_descriptor.GetBufferViewState(); |
| 1595 | const auto binding = binding_info.first; |
| 1596 | const auto reqs = binding_info.second.reqs; |
| 1597 | if ((!buffer_view_state && !enabled_features.robustness2_features.nullDescriptor) || |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1598 | (buffer_view_state && buffer_view_state->Destroyed())) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1599 | auto set = descriptor_set->GetSet(); |
| 1600 | return LogError(set, vuids.descriptor_valid, |
| 1601 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1602 | "binding #%" PRIu32 " index %" PRIu32 " is using bufferView %s that is invalid or has been destroyed.", |
| 1603 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1604 | report_data->FormatHandle(buffer_view).c_str()); |
| 1605 | } |
| 1606 | if (buffer_view) { |
| 1607 | auto buffer = buffer_view_state->create_info.buffer; |
Jeremy Gebben | 9d0dda3 | 2022-01-28 10:16:56 -0700 | [diff] [blame] | 1608 | const auto *buffer_state = buffer_view_state->buffer_state.get(); |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1609 | if (buffer_state->Destroyed()) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1610 | auto set = descriptor_set->GetSet(); |
| 1611 | return LogError(set, vuids.descriptor_valid, |
| 1612 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1613 | "binding #%" PRIu32 " index %" PRIu32 " is using buffer %s that has been destroyed.", |
| 1614 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1615 | report_data->FormatHandle(buffer).c_str()); |
| 1616 | } |
| 1617 | auto format_bits = DescriptorRequirementsBitsFromFormat(buffer_view_state->create_info.format); |
| 1618 | |
| 1619 | if (!(reqs & format_bits)) { |
| 1620 | // bad component type |
| 1621 | auto set = descriptor_set->GetSet(); |
| 1622 | return LogError(set, vuids.descriptor_valid, |
| 1623 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1624 | "binding #%" PRIu32 " index %" PRIu32 " requires %s component type, but bound descriptor format is %s.", |
| 1625 | report_data->FormatHandle(set).c_str(), caller, binding, index, StringDescriptorReqComponentType(reqs), |
| 1626 | string_VkFormat(buffer_view_state->create_info.format)); |
| 1627 | } |
| 1628 | |
| 1629 | // Verify VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT |
| 1630 | if ((reqs & DESCRIPTOR_REQ_VIEW_ATOMIC_OPERATION) && |
| 1631 | (descriptor_set->GetTypeFromBinding(binding) == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER) && |
| 1632 | !(buffer_view_state->format_features & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT)) { |
| 1633 | auto set = descriptor_set->GetSet(); |
| 1634 | LogObjectList objlist(set); |
| 1635 | objlist.add(buffer_view); |
| 1636 | return LogError(objlist, "UNASSIGNED-None-MismatchAtomicBufferFeature", |
| 1637 | "Descriptor set %s encountered the following validation error at %s time: Descriptor " |
| 1638 | "in binding #%" PRIu32 " index %" PRIu32 |
| 1639 | ", %s, format %s, doesn't " |
| 1640 | "contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT.", |
| 1641 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1642 | report_data->FormatHandle(buffer_view).c_str(), string_VkFormat(buffer_view_state->create_info.format)); |
| 1643 | } |
| 1644 | if (enabled_features.core11.protectedMemory == VK_TRUE) { |
| 1645 | if (ValidateProtectedBuffer(cb_node, buffer_view_state->buffer_state.get(), caller, vuids.unprotected_command_buffer, |
| 1646 | "Buffer is in a descriptorSet")) { |
| 1647 | return true; |
| 1648 | } |
| 1649 | if (binding_info.second.is_writable && |
| 1650 | ValidateUnprotectedBuffer(cb_node, buffer_view_state->buffer_state.get(), caller, vuids.protected_command_buffer, |
| 1651 | "Buffer is in a descriptorSet")) { |
| 1652 | return true; |
| 1653 | } |
| 1654 | } |
| 1655 | } |
| 1656 | return false; |
| 1657 | } |
| 1658 | |
| 1659 | bool CoreChecks::ValidateAccelerationDescriptor(const char *caller, const DrawDispatchVuid &vuids, const CMD_BUFFER_STATE *cb_node, |
| 1660 | const cvdescriptorset::DescriptorSet *descriptor_set, |
| 1661 | const cvdescriptorset::AccelerationStructureDescriptor &descriptor, |
| 1662 | const std::pair<const uint32_t, DescriptorRequirement> &binding_info, |
| 1663 | uint32_t index) const { |
| 1664 | // Verify that acceleration structures are valid |
| 1665 | const auto binding = binding_info.first; |
| 1666 | if (descriptor.is_khr()) { |
| 1667 | auto acc = descriptor.GetAccelerationStructure(); |
| 1668 | auto acc_node = descriptor.GetAccelerationStructureStateKHR(); |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1669 | if (!acc_node || acc_node->Destroyed()) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1670 | if (acc != VK_NULL_HANDLE || !enabled_features.robustness2_features.nullDescriptor) { |
| 1671 | auto set = descriptor_set->GetSet(); |
| 1672 | return LogError(set, vuids.descriptor_valid, |
| 1673 | "Descriptor set %s encountered the following validation error at %s time: " |
| 1674 | "Descriptor in binding #%" PRIu32 " index %" PRIu32 |
| 1675 | " is using acceleration structure %s that is invalid or has been destroyed.", |
| 1676 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1677 | report_data->FormatHandle(acc).c_str()); |
| 1678 | } |
| 1679 | } else { |
Jeremy Gebben | 6fbf824 | 2021-06-21 09:14:46 -0600 | [diff] [blame] | 1680 | for (const auto &item: acc_node->GetBoundMemory()) { |
| 1681 | auto &mem_binding = item.second; |
| 1682 | if (mem_binding.mem_state->Destroyed()) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1683 | auto set = descriptor_set->GetSet(); |
| 1684 | return LogError(set, vuids.descriptor_valid, |
| 1685 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1686 | "binding #%" PRIu32 " index %" PRIu32 |
| 1687 | " is using acceleration structure %s that references invalid memory %s.", |
| 1688 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
Jeremy Gebben | 6fbf824 | 2021-06-21 09:14:46 -0600 | [diff] [blame] | 1689 | report_data->FormatHandle(acc).c_str(), |
| 1690 | report_data->FormatHandle(mem_binding.mem_state->mem()).c_str()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1691 | } |
| 1692 | } |
| 1693 | } |
| 1694 | } else { |
| 1695 | auto acc = descriptor.GetAccelerationStructureNV(); |
| 1696 | auto acc_node = descriptor.GetAccelerationStructureStateNV(); |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1697 | if (!acc_node || acc_node->Destroyed()) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1698 | if (acc != VK_NULL_HANDLE || !enabled_features.robustness2_features.nullDescriptor) { |
| 1699 | auto set = descriptor_set->GetSet(); |
| 1700 | return LogError(set, vuids.descriptor_valid, |
| 1701 | "Descriptor set %s encountered the following validation error at %s time: " |
| 1702 | "Descriptor in binding #%" PRIu32 " index %" PRIu32 |
| 1703 | " is using acceleration structure %s that is invalid or has been destroyed.", |
| 1704 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
| 1705 | report_data->FormatHandle(acc).c_str()); |
| 1706 | } |
| 1707 | } else { |
Jeremy Gebben | 6fbf824 | 2021-06-21 09:14:46 -0600 | [diff] [blame] | 1708 | for (const auto &item : acc_node->GetBoundMemory()) { |
| 1709 | auto &mem_binding = item.second; |
| 1710 | if (mem_binding.mem_state->Destroyed()) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1711 | auto set = descriptor_set->GetSet(); |
| 1712 | return LogError(set, vuids.descriptor_valid, |
| 1713 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1714 | "binding #%" PRIu32 " index %" PRIu32 |
| 1715 | " is using acceleration structure %s that references invalid memory %s.", |
| 1716 | report_data->FormatHandle(set).c_str(), caller, binding, index, |
Jeremy Gebben | 6fbf824 | 2021-06-21 09:14:46 -0600 | [diff] [blame] | 1717 | report_data->FormatHandle(acc).c_str(), |
| 1718 | report_data->FormatHandle(mem_binding.mem_state->mem()).c_str()); |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1719 | } |
| 1720 | } |
| 1721 | } |
| 1722 | } |
| 1723 | return false; |
| 1724 | } |
| 1725 | |
| 1726 | // If the validation is related to both of image and sampler, |
| 1727 | // please leave it in (descriptor_class == DescriptorClass::ImageSampler || descriptor_class == |
| 1728 | // DescriptorClass::Image) Here is to validate for only sampler. |
| 1729 | bool CoreChecks::ValidateSamplerDescriptor(const char *caller, const DrawDispatchVuid &vuids, const CMD_BUFFER_STATE *cb_node, |
| 1730 | const cvdescriptorset::DescriptorSet *descriptor_set, |
| 1731 | const std::pair<const uint32_t, DescriptorRequirement> &binding_info, uint32_t index, |
| 1732 | VkSampler sampler, bool is_immutable, const SAMPLER_STATE *sampler_state) const { |
| 1733 | // Verify Sampler still valid |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1734 | if (!sampler_state || sampler_state->Destroyed()) { |
Jeremy Gebben | e151b6b | 2021-04-09 18:03:06 -0600 | [diff] [blame] | 1735 | auto set = descriptor_set->GetSet(); |
| 1736 | return LogError(set, vuids.descriptor_valid, |
| 1737 | "Descriptor set %s encountered the following validation error at %s time: Descriptor in " |
| 1738 | "binding #%" PRIu32 " index %" PRIu32 " is using sampler %s that is invalid or has been destroyed.", |
| 1739 | report_data->FormatHandle(set).c_str(), caller, binding_info.first, index, |
| 1740 | report_data->FormatHandle(sampler).c_str()); |
| 1741 | } else { |
| 1742 | if (sampler_state->samplerConversion && !is_immutable) { |
| 1743 | auto set = descriptor_set->GetSet(); |
| 1744 | return LogError(set, vuids.descriptor_valid, |
| 1745 | "Descriptor set %s encountered the following validation error at %s time: sampler (%s) " |
| 1746 | "in the descriptor set (%s) contains a YCBCR conversion (%s), then the sampler MUST " |
| 1747 | "also exist as an immutable sampler.", |
| 1748 | report_data->FormatHandle(set).c_str(), caller, report_data->FormatHandle(sampler).c_str(), |
| 1749 | report_data->FormatHandle(descriptor_set->GetSet()).c_str(), |
| 1750 | report_data->FormatHandle(sampler_state->samplerConversion).c_str()); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1751 | } |
| 1752 | } |
Tony-LunarG | ace473a | 2020-05-06 12:48:04 -0600 | [diff] [blame] | 1753 | return false; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1754 | } |
Chris Forbes | 5798913 | 2016-07-26 17:06:10 +1200 | [diff] [blame] | 1755 | |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 1756 | // Loop through the write updates to do for a push descriptor set, ignoring dstSet |
Jeff Bolz | 41a1ced | 2019-10-11 11:40:49 -0500 | [diff] [blame] | 1757 | void cvdescriptorset::DescriptorSet::PerformPushDescriptorsUpdate(ValidationStateTracker *dev_data, uint32_t write_count, |
| 1758 | const VkWriteDescriptorSet *p_wds) { |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 1759 | assert(IsPushDescriptor()); |
| 1760 | for (uint32_t i = 0; i < write_count; i++) { |
Jeff Bolz | 41a1ced | 2019-10-11 11:40:49 -0500 | [diff] [blame] | 1761 | PerformWriteUpdate(dev_data, &p_wds[i]); |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 1762 | } |
Jason Macnak | 83cfd58 | 2019-07-31 10:14:24 -0700 | [diff] [blame] | 1763 | |
| 1764 | push_descriptor_set_writes.clear(); |
| 1765 | push_descriptor_set_writes.reserve(static_cast<std::size_t>(write_count)); |
| 1766 | for (uint32_t i = 0; i < write_count; i++) { |
| 1767 | push_descriptor_set_writes.push_back(safe_VkWriteDescriptorSet(&p_wds[i])); |
| 1768 | } |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 1769 | } |
| 1770 | |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1771 | // Perform write update in given update struct |
Jeff Bolz | 41a1ced | 2019-10-11 11:40:49 -0500 | [diff] [blame] | 1772 | void cvdescriptorset::DescriptorSet::PerformWriteUpdate(ValidationStateTracker *dev_data, const VkWriteDescriptorSet *update) { |
Tobin Ehlis | f922ef8 | 2016-11-30 10:19:14 -0700 | [diff] [blame] | 1773 | // Perform update on a per-binding basis as consecutive updates roll over to next binding |
| 1774 | auto descriptors_remaining = update->descriptorCount; |
Tobin Ehlis | f922ef8 | 2016-11-30 10:19:14 -0700 | [diff] [blame] | 1775 | auto offset = update->dstArrayElement; |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 1776 | auto orig_binding = DescriptorSetLayout::ConstBindingIterator(layout_.get(), update->dstBinding); |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 1777 | auto current_binding = orig_binding; |
| 1778 | |
Tobin Ehlis | e16805c | 2017-08-09 09:10:37 -0600 | [diff] [blame] | 1779 | uint32_t update_index = 0; |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 1780 | // Verify next consecutive binding matches type, stage flags & immutable sampler use and if AtEnd |
| 1781 | while (descriptors_remaining && orig_binding.IsConsistent(current_binding)) { |
| 1782 | const auto &index_range = current_binding.GetGlobalIndexRange(); |
| 1783 | auto global_idx = index_range.start + offset; |
| 1784 | // global_idx is which descriptor is needed to update. If global_idx > index_range.end, it means the descriptor isn't in |
| 1785 | // this binding, maybe in next binding. |
| 1786 | if (global_idx >= index_range.end) { |
| 1787 | offset -= current_binding.GetDescriptorCount(); |
| 1788 | ++current_binding; |
| 1789 | continue; |
| 1790 | } |
| 1791 | |
Tobin Ehlis | f922ef8 | 2016-11-30 10:19:14 -0700 | [diff] [blame] | 1792 | // Loop over the updates for a single binding at a time |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 1793 | uint32_t update_count = std::min(descriptors_remaining, current_binding.GetDescriptorCount() - offset); |
Tobin Ehlis | e16805c | 2017-08-09 09:10:37 -0600 | [diff] [blame] | 1794 | for (uint32_t di = 0; di < update_count; ++di, ++update_index) { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 1795 | descriptors_[global_idx + di]->WriteUpdate(this, state_data_, update, update_index); |
ziga | 7a255fb | 2021-11-20 21:17:07 +0100 | [diff] [blame] | 1796 | VkDeviceSize buffer_size = 0; |
| 1797 | if ((update->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER || |
| 1798 | update->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER || |
| 1799 | update->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC || |
| 1800 | update->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) && |
| 1801 | update->pBufferInfo) { |
| 1802 | const auto buffer_state = dev_data->GetConstCastShared<BUFFER_STATE>(update->pBufferInfo->buffer); |
| 1803 | if (buffer_state) { |
| 1804 | buffer_size = buffer_state->createInfo.size; |
| 1805 | } |
| 1806 | } else if ((update->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER || |
| 1807 | update->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER) && |
| 1808 | update->pTexelBufferView) { |
| 1809 | const auto buffer_view = dev_data->GetConstCastShared<BUFFER_VIEW_STATE>(update->pTexelBufferView[di]); |
| 1810 | if (buffer_view) { |
| 1811 | buffer_size = buffer_view->buffer_state->createInfo.size; |
| 1812 | } |
| 1813 | } |
| 1814 | descriptors_[global_idx + di]->SetDescriptorType(update->descriptorType, buffer_size); |
Tobin Ehlis | f922ef8 | 2016-11-30 10:19:14 -0700 | [diff] [blame] | 1815 | } |
| 1816 | // Roll over to next binding in case of consecutive update |
| 1817 | descriptors_remaining -= update_count; |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 1818 | if (descriptors_remaining) { |
| 1819 | // Starting offset is beyond the current binding. Check consistency, update counters and advance to the next binding, |
| 1820 | // looking for the start point. All bindings (even those skipped) must be consistent with the update and with the |
| 1821 | // original binding. |
| 1822 | offset = 0; |
| 1823 | ++current_binding; |
| 1824 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1825 | } |
Jeff Bolz | dd4cfa1 | 2019-08-11 20:57:51 -0500 | [diff] [blame] | 1826 | if (update->descriptorCount) { |
| 1827 | some_update_ = true; |
| 1828 | change_count_++; |
| 1829 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 1830 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 1831 | if (!IsPushDescriptor() && !(layout_->GetDescriptorBindingFlagsFromBinding(update->dstBinding) & |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1832 | (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT))) { |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1833 | Invalidate(false); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1834 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1835 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1836 | // Validate Copy update |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1837 | bool CoreChecks::ValidateCopyUpdate(const VkCopyDescriptorSet *update, const DescriptorSet *dst_set, const DescriptorSet *src_set, |
Jeff Bolz | 46c0ea0 | 2019-10-09 13:06:29 -0500 | [diff] [blame] | 1838 | const char *func_name, std::string *error_code, std::string *error_msg) const { |
Jeremy Gebben | 9d0dda3 | 2022-01-28 10:16:56 -0700 | [diff] [blame] | 1839 | const auto *dst_layout = dst_set->GetLayout().get(); |
| 1840 | const auto *src_layout = src_set->GetLayout().get(); |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1841 | |
John Zulauf | 5dfd45c | 2018-01-17 11:06:34 -0700 | [diff] [blame] | 1842 | // Verify dst layout still valid |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1843 | if (dst_layout->Destroyed()) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1844 | *error_code = "VUID-VkCopyDescriptorSet-dstSet-parameter"; |
Mark Lobodzinski | 23e395e | 2020-04-09 10:17:31 -0600 | [diff] [blame] | 1845 | std::ostringstream str; |
| 1846 | str << "Cannot call " << func_name << " to perform copy update on dstSet " << report_data->FormatHandle(dst_set->GetSet()) |
| 1847 | << " created with destroyed " << report_data->FormatHandle(dst_layout->GetDescriptorSetLayout()) << "."; |
| 1848 | *error_msg = str.str(); |
John Zulauf | 5dfd45c | 2018-01-17 11:06:34 -0700 | [diff] [blame] | 1849 | return false; |
| 1850 | } |
| 1851 | |
| 1852 | // Verify src layout still valid |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1853 | if (src_layout->Destroyed()) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1854 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-parameter"; |
Mark Lobodzinski | 23e395e | 2020-04-09 10:17:31 -0600 | [diff] [blame] | 1855 | std::ostringstream str; |
| 1856 | str << "Cannot call " << func_name << " to perform copy update on dstSet " << report_data->FormatHandle(dst_set->GetSet()) |
| 1857 | << " from srcSet " << report_data->FormatHandle(src_set->GetSet()) << " created with destroyed " |
| 1858 | << report_data->FormatHandle(src_layout->GetDescriptorSetLayout()) << "."; |
| 1859 | *error_msg = str.str(); |
John Zulauf | 5dfd45c | 2018-01-17 11:06:34 -0700 | [diff] [blame] | 1860 | return false; |
| 1861 | } |
| 1862 | |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1863 | if (!dst_layout->HasBinding(update->dstBinding)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1864 | *error_code = "VUID-VkCopyDescriptorSet-dstBinding-00347"; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1865 | std::stringstream error_str; |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1866 | error_str << "DescriptorSet " << report_data->FormatHandle(dst_set->GetSet()) |
| 1867 | << " does not have copy update dest binding of " << update->dstBinding; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1868 | *error_msg = error_str.str(); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1869 | return false; |
| 1870 | } |
| 1871 | if (!src_set->HasBinding(update->srcBinding)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1872 | *error_code = "VUID-VkCopyDescriptorSet-srcBinding-00345"; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1873 | std::stringstream error_str; |
sourav parmar | f4a7825 | 2020-04-10 13:04:21 -0700 | [diff] [blame] | 1874 | error_str << "DescriptorSet " << report_data->FormatHandle(src_set->GetSet()) |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1875 | << " does not have copy update src binding of " << update->srcBinding; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1876 | *error_msg = error_str.str(); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1877 | return false; |
| 1878 | } |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1879 | // Verify idle ds |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 1880 | if (dst_set->InUse() && |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1881 | !(dst_layout->GetDescriptorBindingFlagsFromBinding(update->dstBinding) & |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1882 | (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT))) { |
aitor-lunarg | 9f85900 | 2022-01-27 19:33:35 +0100 | [diff] [blame] | 1883 | *error_code = "VUID-vkUpdateDescriptorSets-None-03047"; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1884 | std::stringstream error_str; |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1885 | error_str << "Cannot call " << func_name << " to perform copy update on descriptor set " |
| 1886 | << report_data->FormatHandle(dst_set->GetSet()) << " that is in use by a command buffer"; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1887 | *error_msg = error_str.str(); |
| 1888 | return false; |
| 1889 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1890 | // src & dst set bindings are valid |
| 1891 | // Check bounds of src & dst |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 1892 | auto src_start_idx = src_set->GetGlobalIndexRangeFromBinding(update->srcBinding).start + update->srcArrayElement; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1893 | if ((src_start_idx + update->descriptorCount) > src_set->GetTotalDescriptorCount()) { |
| 1894 | // SRC update out of bounds |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1895 | *error_code = "VUID-VkCopyDescriptorSet-srcArrayElement-00346"; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1896 | std::stringstream error_str; |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1897 | error_str << "Attempting copy update from descriptorSet " << report_data->FormatHandle(update->srcSet) << " binding#" |
| 1898 | << update->srcBinding << " with offset index of " |
| 1899 | << src_set->GetGlobalIndexRangeFromBinding(update->srcBinding).start << " plus update array offset of " |
| 1900 | << update->srcArrayElement << " and update of " << update->descriptorCount |
Tobin Ehlis | 1d81edd | 2016-11-21 09:50:49 -0700 | [diff] [blame] | 1901 | << " descriptors oversteps total number of descriptors in set: " << src_set->GetTotalDescriptorCount(); |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1902 | *error_msg = error_str.str(); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1903 | return false; |
| 1904 | } |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1905 | auto dst_start_idx = dst_layout->GetGlobalIndexRangeFromBinding(update->dstBinding).start + update->dstArrayElement; |
| 1906 | if ((dst_start_idx + update->descriptorCount) > dst_layout->GetTotalDescriptorCount()) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1907 | // DST update out of bounds |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1908 | *error_code = "VUID-VkCopyDescriptorSet-dstArrayElement-00348"; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1909 | std::stringstream error_str; |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1910 | error_str << "Attempting copy update to descriptorSet " << report_data->FormatHandle(dst_set->GetSet()) << " binding#" |
| 1911 | << update->dstBinding << " with offset index of " |
| 1912 | << dst_layout->GetGlobalIndexRangeFromBinding(update->dstBinding).start << " plus update array offset of " |
| 1913 | << update->dstArrayElement << " and update of " << update->descriptorCount |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1914 | << " descriptors oversteps total number of descriptors in set: " << dst_layout->GetTotalDescriptorCount(); |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1915 | *error_msg = error_str.str(); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1916 | return false; |
| 1917 | } |
| 1918 | // Check that types match |
Shannon McPherson | afe5512 | 2020-05-25 16:20:19 -0600 | [diff] [blame] | 1919 | // TODO : Base default error case going from here is "VUID-VkAcquireNextImageInfoKHR-semaphore-parameter" 2ba which covers all |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 1920 | // consistency issues, need more fine-grained error codes |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1921 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-00349"; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1922 | auto src_type = src_set->GetTypeFromBinding(update->srcBinding); |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1923 | auto dst_type = dst_layout->GetTypeFromBinding(update->dstBinding); |
Ricardo Garcia | 14f4f76 | 2021-04-13 11:36:12 +0200 | [diff] [blame] | 1924 | if (src_type != VK_DESCRIPTOR_TYPE_MUTABLE_VALVE && dst_type != VK_DESCRIPTOR_TYPE_MUTABLE_VALVE && src_type != dst_type) { |
sourav parmar | f4a7825 | 2020-04-10 13:04:21 -0700 | [diff] [blame] | 1925 | *error_code = "VUID-VkCopyDescriptorSet-dstBinding-02632"; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1926 | std::stringstream error_str; |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1927 | error_str << "Attempting copy update to descriptorSet " << report_data->FormatHandle(dst_set->GetSet()) << " binding #" |
| 1928 | << update->dstBinding << " with type " << string_VkDescriptorType(dst_type) << " from descriptorSet " |
| 1929 | << report_data->FormatHandle(src_set->GetSet()) << " binding #" << update->srcBinding << " with type " |
| 1930 | << string_VkDescriptorType(src_type) << ". Types do not match"; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1931 | *error_msg = error_str.str(); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1932 | return false; |
| 1933 | } |
| 1934 | // Verify consistency of src & dst bindings if update crosses binding boundaries |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1935 | if ((!VerifyUpdateConsistency(report_data, DescriptorSetLayout::ConstBindingIterator(src_layout, update->srcBinding), |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 1936 | update->srcArrayElement, update->descriptorCount, "copy update from", src_set->GetSet(), |
| 1937 | error_msg)) || |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1938 | (!VerifyUpdateConsistency(report_data, DescriptorSetLayout::ConstBindingIterator(dst_layout, update->dstBinding), |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1939 | update->dstArrayElement, update->descriptorCount, "copy update to", dst_set->GetSet(), |
| 1940 | error_msg))) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1941 | return false; |
| 1942 | } |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1943 | |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1944 | if ((src_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT) && |
| 1945 | !(dst_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1946 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-01918"; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1947 | std::stringstream error_str; |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1948 | error_str << "If pname:srcSet's (" << report_data->FormatHandle(update->srcSet) |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1949 | << ") layout was created with the " |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1950 | "ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT flag " |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1951 | "set, then pname:dstSet's (" |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1952 | << report_data->FormatHandle(update->dstSet) |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1953 | << ") layout must: also have been created with the " |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1954 | "ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT flag set"; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1955 | *error_msg = error_str.str(); |
| 1956 | return false; |
| 1957 | } |
| 1958 | |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 1959 | if (IsExtEnabled(device_extensions.vk_valve_mutable_descriptor_type)) { |
Mike Schuchardt | 294a159 | 2021-05-12 15:38:00 -0700 | [diff] [blame] | 1960 | if (!(src_layout->GetCreateFlags() & (VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT | |
| 1961 | VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE)) && |
| 1962 | (dst_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT)) { |
| 1963 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-04885"; |
| 1964 | std::stringstream error_str; |
| 1965 | error_str << "If pname:srcSet's (" << report_data->FormatHandle(update->srcSet) |
| 1966 | << ") layout was created with neither ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT nor " |
| 1967 | "ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE flags set, then pname:dstSet's (" |
| 1968 | << report_data->FormatHandle(update->dstSet) |
| 1969 | << ") layout must: have been created without the " |
| 1970 | "ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT flag set"; |
| 1971 | *error_msg = error_str.str(); |
| 1972 | return false; |
| 1973 | } |
| 1974 | } else { |
| 1975 | if (!(src_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT) && |
| 1976 | (dst_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT)) { |
| 1977 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-04886"; |
| 1978 | std::stringstream error_str; |
| 1979 | error_str << "If pname:srcSet's (" << report_data->FormatHandle(update->srcSet) |
| 1980 | << ") layout was created without the ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT flag " |
| 1981 | "set, then pname:dstSet's (" |
| 1982 | << report_data->FormatHandle(update->dstSet) |
| 1983 | << ") layout must: also have been created without the " |
| 1984 | "ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT flag set"; |
| 1985 | *error_msg = error_str.str(); |
| 1986 | return false; |
| 1987 | } |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1988 | } |
| 1989 | |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1990 | if ((src_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT) && |
| 1991 | !(dst_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1992 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-01920"; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1993 | std::stringstream error_str; |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1994 | error_str << "If the descriptor pool from which pname:srcSet (" << report_data->FormatHandle(update->srcSet) |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1995 | << ") was allocated was created " |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 1996 | "with the ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag " |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1997 | "set, then the descriptor pool from which pname:dstSet (" |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 1998 | << report_data->FormatHandle(update->dstSet) |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1999 | << ") was allocated must: " |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 2000 | "also have been created with the ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag set"; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 2001 | *error_msg = error_str.str(); |
| 2002 | return false; |
| 2003 | } |
| 2004 | |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 2005 | if (IsExtEnabled(device_extensions.vk_valve_mutable_descriptor_type)) { |
Mike Schuchardt | 294a159 | 2021-05-12 15:38:00 -0700 | [diff] [blame] | 2006 | if (!(src_set->GetPoolState()->createInfo.flags & |
| 2007 | (VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT | VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE)) && |
| 2008 | (dst_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT)) { |
| 2009 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-04887"; |
| 2010 | std::stringstream error_str; |
| 2011 | error_str << "If the descriptor pool from which pname:srcSet (" << report_data->FormatHandle(update->srcSet) |
| 2012 | << ") was allocated was created with neither ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT nor " |
| 2013 | "ename:VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE flags set, then the descriptor pool from which " |
| 2014 | "pname:dstSet (" |
| 2015 | << report_data->FormatHandle(update->dstSet) |
| 2016 | << ") was allocated must: have been created without the " |
| 2017 | "ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag set"; |
| 2018 | *error_msg = error_str.str(); |
| 2019 | return false; |
| 2020 | } |
| 2021 | } else { |
| 2022 | if (!(src_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT) && |
| 2023 | (dst_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT)) { |
| 2024 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-04888"; |
| 2025 | std::stringstream error_str; |
| 2026 | error_str << "If the descriptor pool from which pname:srcSet (" << report_data->FormatHandle(update->srcSet) |
| 2027 | << ") was allocated was created without the ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag set, " |
| 2028 | "then the descriptor pool from which pname:dstSet (" |
| 2029 | << report_data->FormatHandle(update->dstSet) |
| 2030 | << ") was allocated must: also have been created without the " |
| 2031 | "ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag set"; |
| 2032 | *error_msg = error_str.str(); |
| 2033 | return false; |
| 2034 | } |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 2035 | } |
| 2036 | |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 2037 | if (src_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) { |
| 2038 | if ((update->srcArrayElement % 4) != 0) { |
| 2039 | *error_code = "VUID-VkCopyDescriptorSet-srcBinding-02223"; |
| 2040 | std::stringstream error_str; |
| 2041 | error_str << "Attempting copy update to VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT binding with " |
| 2042 | << "srcArrayElement " << update->srcArrayElement << " not a multiple of 4"; |
| 2043 | *error_msg = error_str.str(); |
| 2044 | return false; |
| 2045 | } |
| 2046 | if ((update->dstArrayElement % 4) != 0) { |
| 2047 | *error_code = "VUID-VkCopyDescriptorSet-dstBinding-02224"; |
| 2048 | std::stringstream error_str; |
| 2049 | error_str << "Attempting copy update to VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT binding with " |
| 2050 | << "dstArrayElement " << update->dstArrayElement << " not a multiple of 4"; |
| 2051 | *error_msg = error_str.str(); |
| 2052 | return false; |
| 2053 | } |
| 2054 | if ((update->descriptorCount % 4) != 0) { |
| 2055 | *error_code = "VUID-VkCopyDescriptorSet-srcBinding-02225"; |
| 2056 | std::stringstream error_str; |
| 2057 | error_str << "Attempting copy update to VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT binding with " |
| 2058 | << "descriptorCount " << update->descriptorCount << " not a multiple of 4"; |
| 2059 | *error_msg = error_str.str(); |
| 2060 | return false; |
| 2061 | } |
| 2062 | } |
| 2063 | |
ziga-lunarg | e5d2854 | 2021-10-24 21:14:25 +0200 | [diff] [blame] | 2064 | if (dst_type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) { |
| 2065 | if (src_type != VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) { |
| 2066 | if (!dst_layout->IsTypeMutable(src_type, update->dstBinding)) { |
| 2067 | *error_code = "VUID-VkCopyDescriptorSet-dstSet-04612"; |
| 2068 | std::stringstream error_str; |
| 2069 | error_str << "Attempting copy update with dstBinding descriptor type VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, but the new " |
| 2070 | "active descriptor type " |
| 2071 | << string_VkDescriptorType(src_type) |
| 2072 | << " is not in the corresponding pMutableDescriptorTypeLists list."; |
| 2073 | *error_msg = error_str.str(); |
| 2074 | return false; |
| 2075 | } |
| 2076 | } |
| 2077 | } else if (src_type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) { |
| 2078 | const auto *descriptor = src_set->GetDescriptorFromGlobalIndex(update->srcBinding); |
| 2079 | if (descriptor->active_descriptor_type != dst_type) { |
| 2080 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-04613"; |
| 2081 | std::stringstream error_str; |
| 2082 | error_str << "Attempting copy update with srcBinding descriptor type VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, but the " |
| 2083 | "active descriptor type (" |
| 2084 | << string_VkDescriptorType(descriptor->active_descriptor_type) |
| 2085 | << ") does not match the dstBinding descriptor type " << string_VkDescriptorType(dst_type) << "."; |
| 2086 | *error_msg = error_str.str(); |
| 2087 | return false; |
| 2088 | } |
| 2089 | } |
| 2090 | |
| 2091 | if (dst_type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) { |
| 2092 | if (src_type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) { |
| 2093 | const auto &mutable_src_types = src_layout->GetMutableTypes(update->srcBinding); |
| 2094 | const auto &mutable_dst_types = dst_layout->GetMutableTypes(update->dstBinding); |
| 2095 | bool complete_match = mutable_src_types.size() == mutable_dst_types.size(); |
| 2096 | if (complete_match) { |
| 2097 | for (const auto mutable_src_type : mutable_src_types) { |
| 2098 | if (std::find(mutable_dst_types.begin(), mutable_dst_types.end(), mutable_src_type) == |
| 2099 | mutable_dst_types.end()) { |
| 2100 | complete_match = false; |
| 2101 | break; |
| 2102 | } |
| 2103 | } |
| 2104 | } |
| 2105 | if (!complete_match) { |
| 2106 | *error_code = "VUID-VkCopyDescriptorSet-dstSet-04614"; |
| 2107 | std::stringstream error_str; |
| 2108 | error_str << "Attempting copy update with dstBinding and new active descriptor type being " |
| 2109 | "VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, but their corresponding pMutableDescriptorTypeLists do not match."; |
| 2110 | *error_msg = error_str.str(); |
| 2111 | return false; |
| 2112 | } |
| 2113 | } |
| 2114 | } |
| 2115 | |
ziga-lunarg | 5b2bede | 2021-11-08 11:54:17 +0100 | [diff] [blame] | 2116 | // Update mutable types |
| 2117 | if (src_type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) { |
| 2118 | src_type = src_set->GetDescriptorFromGlobalIndex(update->srcBinding)->active_descriptor_type; |
| 2119 | } |
| 2120 | if (dst_type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) { |
| 2121 | dst_type = dst_set->GetDescriptorFromGlobalIndex(update->dstBinding)->active_descriptor_type; |
| 2122 | } |
| 2123 | |
Tobin Ehlis | d41e7b6 | 2016-05-19 07:56:18 -0600 | [diff] [blame] | 2124 | // Update parameters all look good and descriptor updated so verify update contents |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 2125 | if (!VerifyCopyUpdateContents(update, src_set, src_type, src_start_idx, dst_set, dst_type, dst_start_idx, func_name, error_code, |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2126 | error_msg)) { |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 2127 | return false; |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2128 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2129 | |
| 2130 | // All checks passed so update is good |
| 2131 | return true; |
| 2132 | } |
| 2133 | // Perform Copy update |
Jeff Bolz | 41a1ced | 2019-10-11 11:40:49 -0500 | [diff] [blame] | 2134 | void cvdescriptorset::DescriptorSet::PerformCopyUpdate(ValidationStateTracker *dev_data, const VkCopyDescriptorSet *update, |
| 2135 | const DescriptorSet *src_set) { |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 2136 | auto src_start_idx = src_set->GetGlobalIndexRangeFromBinding(update->srcBinding).start + update->srcArrayElement; |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2137 | auto dst_start_idx = layout_->GetGlobalIndexRangeFromBinding(update->dstBinding).start + update->dstArrayElement; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2138 | // Update parameters all look good so perform update |
| 2139 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
Jeremy Gebben | 9d0dda3 | 2022-01-28 10:16:56 -0700 | [diff] [blame] | 2140 | auto *src = src_set->descriptors_[src_start_idx + di].get(); |
| 2141 | auto *dst = descriptors_[dst_start_idx + di].get(); |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 2142 | if (src->updated) { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2143 | dst->CopyUpdate(this, state_data_, src); |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 2144 | some_update_ = true; |
Jeff Bolz | dd4cfa1 | 2019-08-11 20:57:51 -0500 | [diff] [blame] | 2145 | change_count_++; |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 2146 | } else { |
| 2147 | dst->updated = false; |
| 2148 | } |
ziga | 7a255fb | 2021-11-20 21:17:07 +0100 | [diff] [blame] | 2149 | dst->SetDescriptorType(src); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2150 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2151 | |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2152 | if (!(layout_->GetDescriptorBindingFlagsFromBinding(update->dstBinding) & |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 2153 | (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT))) { |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 2154 | Invalidate(false); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 2155 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2156 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2157 | |
John Zulauf | 6f3d2bd | 2018-10-29 17:08:42 -0600 | [diff] [blame] | 2158 | // Update the drawing state for the affected descriptors. |
| 2159 | // Set cb_node to this set and this set to cb_node. |
| 2160 | // Add the bindings of the descriptor |
| 2161 | // Set the layout based on the current descriptor layout (will mask subsequent layer mismatch errors) |
| 2162 | // TODO: Modify the UpdateDrawState virtural functions to *only* set initial layout and not change layouts |
Tobin Ehlis | f951910 | 2016-08-17 09:49:13 -0600 | [diff] [blame] | 2163 | // Prereq: This should be called for a set that has been confirmed to be active for the given cb_node, meaning it's going |
| 2164 | // to be used in a draw by the given cb_node |
Jeremy Kniager | e682743 | 2020-04-01 09:05:56 -0600 | [diff] [blame] | 2165 | void cvdescriptorset::DescriptorSet::UpdateDrawState(ValidationStateTracker *device_data, CMD_BUFFER_STATE *cb_node, |
| 2166 | CMD_TYPE cmd_type, const PIPELINE_STATE *pipe, |
sfricke-samsung | 85584a7 | 2021-09-30 21:43:38 -0700 | [diff] [blame] | 2167 | const BindingReqMap &binding_req_map) { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2168 | // Descriptor UpdateDrawState only call image layout validation callbacks. If it is disabled, skip the entire loop. |
| 2169 | if (device_data->disabled[image_layout_validation]) { |
Jeff Bolz | e18e724 | 2019-08-12 20:55:22 -0500 | [diff] [blame] | 2170 | return; |
| 2171 | } |
| 2172 | |
Tobin Ehlis | f951910 | 2016-08-17 09:49:13 -0600 | [diff] [blame] | 2173 | // For the active slots, use set# to look up descriptorSet from boundDescriptorSets, and bind all of that descriptor set's |
| 2174 | // resources |
locke-lunarg | 540b225 | 2020-08-03 13:23:36 -0600 | [diff] [blame] | 2175 | CMD_BUFFER_STATE::CmdDrawDispatchInfo cmd_info = {}; |
John Zulauf | 79f0658 | 2021-02-27 18:38:39 -0700 | [diff] [blame] | 2176 | for (const auto &binding_req_pair : binding_req_map) { |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2177 | auto index = layout_->GetIndexFromBinding(binding_req_pair.first); |
locke-g | b3ce08f | 2019-09-30 12:30:56 -0600 | [diff] [blame] | 2178 | |
Tony-LunarG | 62c5dba | 2018-12-20 14:27:23 -0700 | [diff] [blame] | 2179 | // We aren't validating descriptors created with PARTIALLY_BOUND or UPDATE_AFTER_BIND, so don't record state |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2180 | auto flags = layout_->GetDescriptorBindingFlagsFromIndex(index); |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 2181 | if (flags & (VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT)) { |
| 2182 | if (!(flags & VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT)) { |
locke-lunarg | 3604599 | 2020-08-20 16:54:37 -0600 | [diff] [blame] | 2183 | cmd_info.binding_infos.emplace_back(binding_req_pair); |
locke-g | b3ce08f | 2019-09-30 12:30:56 -0600 | [diff] [blame] | 2184 | } |
Tony-LunarG | 62c5dba | 2018-12-20 14:27:23 -0700 | [diff] [blame] | 2185 | continue; |
| 2186 | } |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2187 | auto range = layout_->GetGlobalIndexRangeFromIndex(index); |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 2188 | for (uint32_t i = range.start; i < range.end; ++i) { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2189 | const auto descriptor_class = descriptors_[i]->GetClass(); |
| 2190 | switch (descriptor_class) { |
| 2191 | case DescriptorClass::Image: |
| 2192 | case DescriptorClass::ImageSampler: { |
| 2193 | auto *image_desc = static_cast<ImageDescriptor *>(descriptors_[i].get()); |
| 2194 | image_desc->UpdateDrawState(device_data, cb_node); |
| 2195 | break; |
| 2196 | } |
| 2197 | default: |
| 2198 | break; |
| 2199 | } |
Mark Lobodzinski | 2872f4a | 2018-09-03 17:00:53 -0600 | [diff] [blame] | 2200 | } |
| 2201 | } |
locke-lunarg | 540b225 | 2020-08-03 13:23:36 -0600 | [diff] [blame] | 2202 | |
| 2203 | if (cmd_info.binding_infos.size() > 0) { |
| 2204 | cmd_info.cmd_type = cmd_type; |
locke-lunarg | 540b225 | 2020-08-03 13:23:36 -0600 | [diff] [blame] | 2205 | if (cb_node->activeFramebuffer) { |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 2206 | cmd_info.framebuffer = cb_node->activeFramebuffer->framebuffer(); |
locke-lunarg | fc78e93 | 2020-11-19 17:06:24 -0700 | [diff] [blame] | 2207 | cmd_info.attachments = cb_node->active_attachments; |
| 2208 | cmd_info.subpasses = cb_node->active_subpasses; |
locke-lunarg | 540b225 | 2020-08-03 13:23:36 -0600 | [diff] [blame] | 2209 | } |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 2210 | cb_node->validate_descriptorsets_in_queuesubmit[GetSet()].emplace_back(cmd_info); |
locke-lunarg | 540b225 | 2020-08-03 13:23:36 -0600 | [diff] [blame] | 2211 | } |
Mark Lobodzinski | 2872f4a | 2018-09-03 17:00:53 -0600 | [diff] [blame] | 2212 | } |
| 2213 | |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2214 | void cvdescriptorset::DescriptorSet::FilterOneBindingReq(const BindingReqMap::value_type &binding_req_pair, BindingReqMap *out_req, |
| 2215 | const TrackedBindings &bindings, uint32_t limit) { |
| 2216 | if (bindings.size() < limit) { |
| 2217 | const auto it = bindings.find(binding_req_pair.first); |
| 2218 | if (it == bindings.cend()) out_req->emplace(binding_req_pair); |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 2219 | } |
| 2220 | } |
Mark Lobodzinski | 2872f4a | 2018-09-03 17:00:53 -0600 | [diff] [blame] | 2221 | |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2222 | void cvdescriptorset::DescriptorSet::FilterBindingReqs(const CMD_BUFFER_STATE &cb_state, const PIPELINE_STATE &pipeline, |
| 2223 | const BindingReqMap &in_req, BindingReqMap *out_req) const { |
| 2224 | // For const cleanliness we have to find in the maps... |
Jeremy Gebben | 87db52f | 2021-10-14 13:55:09 -0600 | [diff] [blame] | 2225 | const auto validated_it = cb_state.descriptorset_cache.find(this); |
| 2226 | if (validated_it == cb_state.descriptorset_cache.end()) { |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2227 | // We have nothing validated, copy in to out |
| 2228 | for (const auto &binding_req_pair : in_req) { |
| 2229 | out_req->emplace(binding_req_pair); |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 2230 | } |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2231 | return; |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 2232 | } |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2233 | const auto &validated = validated_it->second; |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 2234 | |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2235 | const auto image_sample_version_it = validated.image_samplers.find(&pipeline); |
| 2236 | const VersionedBindings *image_sample_version = nullptr; |
| 2237 | if (image_sample_version_it != validated.image_samplers.cend()) { |
| 2238 | image_sample_version = &(image_sample_version_it->second); |
| 2239 | } |
| 2240 | const auto &dynamic_buffers = validated.dynamic_buffers; |
| 2241 | const auto &non_dynamic_buffers = validated.non_dynamic_buffers; |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2242 | const auto &stats = layout_->GetBindingTypeStats(); |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 2243 | for (const auto &binding_req_pair : in_req) { |
| 2244 | auto binding = binding_req_pair.first; |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2245 | VkDescriptorSetLayoutBinding const *layout_binding = layout_->GetDescriptorSetLayoutBindingPtrFromBinding(binding); |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 2246 | if (!layout_binding) { |
| 2247 | continue; |
| 2248 | } |
| 2249 | // Caching criteria differs per type. |
| 2250 | // If image_layout have changed , the image descriptors need to be validated against them. |
sfricke-samsung | 60f29ec | 2021-03-10 20:37:25 -0800 | [diff] [blame] | 2251 | if (IsBufferDescriptor(layout_binding->descriptorType)) { |
| 2252 | if (IsDynamicDescriptor(layout_binding->descriptorType)) { |
| 2253 | FilterOneBindingReq(binding_req_pair, out_req, dynamic_buffers, stats.dynamic_buffer_count); |
| 2254 | } else { |
| 2255 | FilterOneBindingReq(binding_req_pair, out_req, non_dynamic_buffers, stats.non_dynamic_buffer_count); |
| 2256 | } |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 2257 | } else { |
| 2258 | // This is rather crude, as the changed layouts may not impact the bound descriptors, |
| 2259 | // but the simple "versioning" is a simple "dirt" test. |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2260 | bool stale = true; |
| 2261 | if (image_sample_version) { |
| 2262 | const auto version_it = image_sample_version->find(binding); |
| 2263 | if (version_it != image_sample_version->cend() && (version_it->second == cb_state.image_layout_change_count)) { |
| 2264 | stale = false; |
| 2265 | } |
| 2266 | } |
| 2267 | if (stale) { |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 2268 | out_req->emplace(binding_req_pair); |
| 2269 | } |
| 2270 | } |
| 2271 | } |
| 2272 | } |
Tobin Ehlis | 9252c2b | 2016-07-21 14:40:22 -0600 | [diff] [blame] | 2273 | |
Jeremy Gebben | 87db52f | 2021-10-14 13:55:09 -0600 | [diff] [blame] | 2274 | void cvdescriptorset::DescriptorSet::UpdateValidationCache(CMD_BUFFER_STATE &cb_state, const PIPELINE_STATE &pipeline, |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2275 | const BindingReqMap &updated_bindings) { |
Jeremy Gebben | 87db52f | 2021-10-14 13:55:09 -0600 | [diff] [blame] | 2276 | auto &validated = cb_state.descriptorset_cache[this]; |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2277 | |
| 2278 | auto &image_sample_version = validated.image_samplers[&pipeline]; |
| 2279 | auto &dynamic_buffers = validated.dynamic_buffers; |
| 2280 | auto &non_dynamic_buffers = validated.non_dynamic_buffers; |
| 2281 | for (const auto &binding_req_pair : updated_bindings) { |
| 2282 | auto binding = binding_req_pair.first; |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2283 | VkDescriptorSetLayoutBinding const *layout_binding = layout_->GetDescriptorSetLayoutBindingPtrFromBinding(binding); |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2284 | if (!layout_binding) { |
| 2285 | continue; |
| 2286 | } |
| 2287 | // Caching criteria differs per type. |
sfricke-samsung | 60f29ec | 2021-03-10 20:37:25 -0800 | [diff] [blame] | 2288 | if (IsBufferDescriptor(layout_binding->descriptorType)) { |
| 2289 | if (IsDynamicDescriptor(layout_binding->descriptorType)) { |
| 2290 | dynamic_buffers.emplace(binding); |
| 2291 | } else { |
| 2292 | non_dynamic_buffers.emplace(binding); |
| 2293 | } |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2294 | } else { |
| 2295 | // Save the layout change version... |
| 2296 | image_sample_version[binding] = cb_state.image_layout_change_count; |
| 2297 | } |
| 2298 | } |
| 2299 | } |
| 2300 | |
John Zulauf | d2c3dae | 2019-12-12 11:02:17 -0700 | [diff] [blame] | 2301 | cvdescriptorset::SamplerDescriptor::SamplerDescriptor(const ValidationStateTracker *dev_data, const VkSampler *immut) |
Jeremy Gebben | 059ab50 | 2021-04-26 11:25:02 -0600 | [diff] [blame] | 2302 | : Descriptor(PlainSampler), immutable_(false) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2303 | if (immut) { |
Karl Schultz | 76d16a4 | 2020-11-11 05:05:33 -0700 | [diff] [blame] | 2304 | sampler_state_ = dev_data->GetConstCastShared<SAMPLER_STATE>(*immut); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2305 | immutable_ = true; |
| 2306 | updated = true; |
| 2307 | } |
| 2308 | } |
Tobin Ehlis | e2f8029 | 2016-06-02 10:08:53 -0600 | [diff] [blame] | 2309 | // Validate given sampler. Currently this only checks to make sure it exists in the samplerMap |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame] | 2310 | bool CoreChecks::ValidateSampler(const VkSampler sampler) const { return Get<SAMPLER_STATE>(sampler).get() != nullptr; } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2311 | |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2312 | bool CoreChecks::ValidateImageUpdate(VkImageView image_view, VkImageLayout image_layout, VkDescriptorType type, |
John Zulauf | bd9b341 | 2019-08-22 17:16:11 -0600 | [diff] [blame] | 2313 | const char *func_name, std::string *error_code, std::string *error_msg) const { |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 2314 | auto iv_state = Get<IMAGE_VIEW_STATE>(image_view); |
Mark Lobodzinski | 3fc3d75 | 2019-06-24 14:22:46 -0600 | [diff] [blame] | 2315 | assert(iv_state); |
| 2316 | |
Tobin Ehlis | 8128096 | 2016-07-20 14:04:20 -0600 | [diff] [blame] | 2317 | // Note that when an imageview is created, we validated that memory is bound so no need to re-check here |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2318 | // Validate that imageLayout is compatible with aspect_mask and image format |
| 2319 | // and validate that image usage bits are correct for given usage |
Jeremy Gebben | b4d1701 | 2021-07-08 13:18:15 -0600 | [diff] [blame] | 2320 | VkImageAspectFlags aspect_mask = iv_state->normalized_subresource_range.aspectMask; |
Tobin Ehlis | 8b26a38 | 2016-09-14 08:02:49 -0600 | [diff] [blame] | 2321 | VkImage image = iv_state->create_info.image; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2322 | VkFormat format = VK_FORMAT_MAX_ENUM; |
| 2323 | VkImageUsageFlags usage = 0; |
Jeremy Gebben | 057f9d5 | 2021-11-05 14:12:31 -0600 | [diff] [blame] | 2324 | auto *image_node = iv_state->image_state.get(); |
Mark Lobodzinski | 3fc3d75 | 2019-06-24 14:22:46 -0600 | [diff] [blame] | 2325 | assert(image_node); |
Chris Forbes | 67757ff | 2017-07-21 13:59:01 -0700 | [diff] [blame] | 2326 | |
Mark Lobodzinski | 3fc3d75 | 2019-06-24 14:22:46 -0600 | [diff] [blame] | 2327 | format = image_node->createInfo.format; |
| 2328 | usage = image_node->createInfo.usage; |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 2329 | const auto stencil_usage_info = LvlFindInChain<VkImageStencilUsageCreateInfo>(image_node->createInfo.pNext); |
Ricardo Garcia | 3f5984c | 2020-04-09 10:56:34 +0200 | [diff] [blame] | 2330 | if (stencil_usage_info) { |
| 2331 | usage |= stencil_usage_info->stencilUsage; |
| 2332 | } |
Mark Lobodzinski | 03d0006 | 2020-06-15 14:35:45 -0600 | [diff] [blame] | 2333 | |
Mark Lobodzinski | 3fc3d75 | 2019-06-24 14:22:46 -0600 | [diff] [blame] | 2334 | // Validate that memory is bound to image |
sfricke-samsung | 52dbd08 | 2021-09-23 11:17:58 -0700 | [diff] [blame] | 2335 | if (ValidateMemoryIsBoundToImage(image_node, func_name, kVUID_Core_Bound_Resource_FreedMemoryAccess)) { |
| 2336 | *error_code = kVUID_Core_Bound_Resource_FreedMemoryAccess; |
Mark Lobodzinski | 3fc3d75 | 2019-06-24 14:22:46 -0600 | [diff] [blame] | 2337 | *error_msg = "No memory bound to image."; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2338 | return false; |
| 2339 | } |
Mark Lobodzinski | 3fc3d75 | 2019-06-24 14:22:46 -0600 | [diff] [blame] | 2340 | |
| 2341 | // KHR_maintenance1 allows rendering into 2D or 2DArray views which slice a 3D image, |
| 2342 | // but not binding them to descriptor sets. |
Jeremy Gebben | 11a68a3 | 2021-07-29 11:59:22 -0600 | [diff] [blame] | 2343 | if (iv_state->IsDepthSliced()) { |
Mark Lobodzinski | 3fc3d75 | 2019-06-24 14:22:46 -0600 | [diff] [blame] | 2344 | *error_code = "VUID-VkDescriptorImageInfo-imageView-00343"; |
| 2345 | *error_msg = "ImageView must not be a 2D or 2DArray view of a 3D image"; |
| 2346 | return false; |
| 2347 | } |
| 2348 | |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 2349 | // TODO : The various image aspect and format checks here are based on general spec language in 11.5 Image Views section under |
| 2350 | // vkCreateImageView(). What's the best way to create unique id for these cases? |
sfricke-samsung | 52dbd08 | 2021-09-23 11:17:58 -0700 | [diff] [blame] | 2351 | *error_code = kVUID_Core_DrawState_InvalidImageView; |
Dave Houlton | 1d2022c | 2017-03-29 11:43:58 -0600 | [diff] [blame] | 2352 | bool ds = FormatIsDepthOrStencil(format); |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2353 | switch (image_layout) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2354 | case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: |
| 2355 | // Only Color bit must be set |
| 2356 | if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != VK_IMAGE_ASPECT_COLOR_BIT) { |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2357 | std::stringstream error_str; |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 2358 | error_str |
Mark Lobodzinski | 298f0fd | 2020-04-09 11:50:19 -0600 | [diff] [blame] | 2359 | << "ImageView (" << report_data->FormatHandle(image_view) |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 2360 | << ") uses layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but does not have VK_IMAGE_ASPECT_COLOR_BIT set."; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 2361 | *error_msg = error_str.str(); |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2362 | return false; |
| 2363 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2364 | // format must NOT be DS |
| 2365 | if (ds) { |
| 2366 | std::stringstream error_str; |
Mark Lobodzinski | 298f0fd | 2020-04-09 11:50:19 -0600 | [diff] [blame] | 2367 | error_str << "ImageView (" << report_data->FormatHandle(image_view) |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2368 | << ") uses layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but the image format is " |
| 2369 | << string_VkFormat(format) << " which is not a color format."; |
| 2370 | *error_msg = error_str.str(); |
| 2371 | return false; |
| 2372 | } |
| 2373 | break; |
| 2374 | case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: |
| 2375 | case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: |
| 2376 | // Depth or stencil bit must be set, but both must NOT be set |
Tobin Ehlis | bbf3f91 | 2016-06-15 13:03:58 -0600 | [diff] [blame] | 2377 | if (aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) { |
| 2378 | if (aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) { |
| 2379 | // both must NOT be set |
| 2380 | std::stringstream error_str; |
Mark Lobodzinski | 298f0fd | 2020-04-09 11:50:19 -0600 | [diff] [blame] | 2381 | error_str << "ImageView (" << report_data->FormatHandle(image_view) |
Mark Lobodzinski | 6b04292 | 2019-06-21 14:46:42 -0600 | [diff] [blame] | 2382 | << ") has both STENCIL and DEPTH aspects set"; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 2383 | *error_msg = error_str.str(); |
Tobin Ehlis | bbf3f91 | 2016-06-15 13:03:58 -0600 | [diff] [blame] | 2384 | return false; |
| 2385 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2386 | } else if (!(aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT)) { |
| 2387 | // Neither were set |
| 2388 | std::stringstream error_str; |
Mark Lobodzinski | 298f0fd | 2020-04-09 11:50:19 -0600 | [diff] [blame] | 2389 | error_str << "ImageView (" << report_data->FormatHandle(image_view) << ") has layout " |
Mark Lobodzinski | 6b04292 | 2019-06-21 14:46:42 -0600 | [diff] [blame] | 2390 | << string_VkImageLayout(image_layout) << " but does not have STENCIL or DEPTH aspects set"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2391 | *error_msg = error_str.str(); |
| 2392 | return false; |
Tobin Ehlis | bbf3f91 | 2016-06-15 13:03:58 -0600 | [diff] [blame] | 2393 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2394 | // format must be DS |
| 2395 | if (!ds) { |
| 2396 | std::stringstream error_str; |
Mark Lobodzinski | 298f0fd | 2020-04-09 11:50:19 -0600 | [diff] [blame] | 2397 | error_str << "ImageView (" << report_data->FormatHandle(image_view) << ") has layout " |
Mark Lobodzinski | 6b04292 | 2019-06-21 14:46:42 -0600 | [diff] [blame] | 2398 | << string_VkImageLayout(image_layout) << " but the image format is " << string_VkFormat(format) |
| 2399 | << " which is not a depth/stencil format."; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2400 | *error_msg = error_str.str(); |
| 2401 | return false; |
| 2402 | } |
| 2403 | break; |
| 2404 | default: |
| 2405 | // For other layouts if the source is depth/stencil image, both aspect bits must not be set |
| 2406 | if (ds) { |
| 2407 | if (aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) { |
| 2408 | if (aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) { |
| 2409 | // both must NOT be set |
| 2410 | std::stringstream error_str; |
Mark Lobodzinski | 298f0fd | 2020-04-09 11:50:19 -0600 | [diff] [blame] | 2411 | error_str << "ImageView (" << report_data->FormatHandle(image_view) << ") has layout " |
Mark Lobodzinski | 6b04292 | 2019-06-21 14:46:42 -0600 | [diff] [blame] | 2412 | << string_VkImageLayout(image_layout) << " and is using depth/stencil image of format " |
| 2413 | << string_VkFormat(format) |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2414 | << " but it has both STENCIL and DEPTH aspects set, which is illegal. When using a depth/stencil " |
| 2415 | "image in a descriptor set, please only set either VK_IMAGE_ASPECT_DEPTH_BIT or " |
| 2416 | "VK_IMAGE_ASPECT_STENCIL_BIT depending on whether it will be used for depth reads or stencil " |
| 2417 | "reads respectively."; |
Mark Lobodzinski | 4d05d7a | 2019-06-25 09:12:06 -0600 | [diff] [blame] | 2418 | *error_code = "VUID-VkDescriptorImageInfo-imageView-01976"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2419 | *error_msg = error_str.str(); |
| 2420 | return false; |
| 2421 | } |
| 2422 | } |
| 2423 | } |
| 2424 | break; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2425 | } |
| 2426 | // Now validate that usage flags are correctly set for given type of update |
Tobin Ehlis | fb4cf71 | 2016-10-10 14:02:48 -0600 | [diff] [blame] | 2427 | // As we're switching per-type, if any type has specific layout requirements, check those here as well |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 2428 | // TODO : The various image usage bit requirements are in general spec language for VkImageUsageFlags bit block in 11.3 Images |
| 2429 | // under vkCreateImage() |
Jeff Bolz | 6d3beaa | 2019-02-09 21:00:05 -0600 | [diff] [blame] | 2430 | const char *error_usage_bit = nullptr; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2431 | switch (type) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2432 | case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: |
sfricke-samsung | 088f124 | 2020-06-06 02:15:35 -0700 | [diff] [blame] | 2433 | if (iv_state->samplerConversion != VK_NULL_HANDLE) { |
| 2434 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-01946"; |
| 2435 | std::stringstream error_str; |
| 2436 | error_str << "ImageView (" << report_data->FormatHandle(image_view) << ")" |
| 2437 | << "used as a VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE can't be created with VkSamplerYcbcrConversion"; |
| 2438 | *error_msg = error_str.str(); |
| 2439 | return false; |
| 2440 | } |
| 2441 | // drop through |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2442 | case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: { |
| 2443 | if (!(usage & VK_IMAGE_USAGE_SAMPLED_BIT)) { |
| 2444 | error_usage_bit = "VK_IMAGE_USAGE_SAMPLED_BIT"; |
sfricke-samsung | 7923f21 | 2020-02-29 21:17:35 -0800 | [diff] [blame] | 2445 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00337"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2446 | } |
| 2447 | break; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2448 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2449 | case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: { |
| 2450 | if (!(usage & VK_IMAGE_USAGE_STORAGE_BIT)) { |
| 2451 | error_usage_bit = "VK_IMAGE_USAGE_STORAGE_BIT"; |
sfricke-samsung | 7923f21 | 2020-02-29 21:17:35 -0800 | [diff] [blame] | 2452 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00339"; |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 2453 | } else if ((VK_IMAGE_LAYOUT_GENERAL != image_layout) && |
| 2454 | (!IsExtEnabled(device_extensions.vk_khr_shared_presentable_image) || |
| 2455 | (VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR != image_layout))) { |
sfricke-samsung | f105898 | 2020-09-10 22:36:49 -0700 | [diff] [blame] | 2456 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-04152"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2457 | std::stringstream error_str; |
sfricke-samsung | ada55a1 | 2020-08-15 03:39:41 -0700 | [diff] [blame] | 2458 | error_str << "Descriptor update with descriptorType VK_DESCRIPTOR_TYPE_STORAGE_IMAGE" |
| 2459 | << " is being updated with invalid imageLayout " << string_VkImageLayout(image_layout) << " for image " |
| 2460 | << report_data->FormatHandle(image) << " in imageView " << report_data->FormatHandle(image_view) |
| 2461 | << ". Allowed layouts are: VK_IMAGE_LAYOUT_GENERAL"; |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 2462 | if (IsExtEnabled(device_extensions.vk_khr_shared_presentable_image)) { |
sfricke-samsung | ada55a1 | 2020-08-15 03:39:41 -0700 | [diff] [blame] | 2463 | error_str << " or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR"; |
Tobin Ehlis | bb03e5f | 2017-05-11 08:52:51 -0600 | [diff] [blame] | 2464 | } |
sfricke-samsung | ada55a1 | 2020-08-15 03:39:41 -0700 | [diff] [blame] | 2465 | *error_msg = error_str.str(); |
| 2466 | return false; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2467 | } |
| 2468 | break; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2469 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2470 | case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: { |
| 2471 | if (!(usage & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)) { |
| 2472 | error_usage_bit = "VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT"; |
sfricke-samsung | 7923f21 | 2020-02-29 21:17:35 -0800 | [diff] [blame] | 2473 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00338"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2474 | } |
| 2475 | break; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2476 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2477 | default: |
| 2478 | break; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2479 | } |
Jeff Bolz | 6d3beaa | 2019-02-09 21:00:05 -0600 | [diff] [blame] | 2480 | if (error_usage_bit) { |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2481 | std::stringstream error_str; |
Mark Lobodzinski | 298f0fd | 2020-04-09 11:50:19 -0600 | [diff] [blame] | 2482 | error_str << "ImageView (" << report_data->FormatHandle(image_view) << ") with usage mask " << std::hex << std::showbase |
| 2483 | << usage << " being used for a descriptor update of type " << string_VkDescriptorType(type) << " does not have " |
| 2484 | << error_usage_bit << " set."; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 2485 | *error_msg = error_str.str(); |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 2486 | return false; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2487 | } |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 2488 | |
sfricke-samsung | ada55a1 | 2020-08-15 03:39:41 -0700 | [diff] [blame] | 2489 | // All the following types share the same image layouts |
| 2490 | // checkf or Storage Images above |
| 2491 | if ((type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) || (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) || |
| 2492 | (type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)) { |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 2493 | // Test that the layout is compatible with the descriptorType for the two sampled image types |
| 2494 | const static std::array<VkImageLayout, 3> valid_layouts = { |
Jeremy Hayes | d0549f6 | 2019-06-05 10:15:36 -0600 | [diff] [blame] | 2495 | {VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL}}; |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 2496 | |
| 2497 | struct ExtensionLayout { |
| 2498 | VkImageLayout layout; |
Tony-LunarG | 2ec96bb | 2019-11-26 13:43:02 -0700 | [diff] [blame] | 2499 | ExtEnabled DeviceExtensions::*extension; |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 2500 | }; |
Ricardo Garcia | 602c202 | 2021-07-30 10:42:17 +0200 | [diff] [blame] | 2501 | const static std::array<ExtensionLayout, 7> extended_layouts{{ |
Jeremy Gebben | 579aaca | 2021-02-15 13:36:18 -0700 | [diff] [blame] | 2502 | // Note double brace req'd for aggregate initialization |
| 2503 | {VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, &DeviceExtensions::vk_khr_shared_presentable_image}, |
| 2504 | {VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, &DeviceExtensions::vk_khr_maintenance2}, |
| 2505 | {VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, &DeviceExtensions::vk_khr_maintenance2}, |
Mike Schuchardt | c57de4a | 2021-07-20 17:26:32 -0700 | [diff] [blame] | 2506 | {VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR, &DeviceExtensions::vk_khr_synchronization2}, |
| 2507 | {VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR, &DeviceExtensions::vk_khr_synchronization2}, |
Ricardo Garcia | 602c202 | 2021-07-30 10:42:17 +0200 | [diff] [blame] | 2508 | {VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, &DeviceExtensions::vk_khr_separate_depth_stencil_layouts}, |
| 2509 | {VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, &DeviceExtensions::vk_khr_separate_depth_stencil_layouts}, |
Jeremy Gebben | 579aaca | 2021-02-15 13:36:18 -0700 | [diff] [blame] | 2510 | }}; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2511 | auto is_layout = [image_layout, this](const ExtensionLayout &ext_layout) { |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 2512 | return IsExtEnabled(device_extensions.*(ext_layout.extension)) && (ext_layout.layout == image_layout); |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 2513 | }; |
| 2514 | |
| 2515 | bool valid_layout = (std::find(valid_layouts.cbegin(), valid_layouts.cend(), image_layout) != valid_layouts.cend()) || |
| 2516 | std::any_of(extended_layouts.cbegin(), extended_layouts.cend(), is_layout); |
| 2517 | |
| 2518 | if (!valid_layout) { |
sfricke-samsung | f105898 | 2020-09-10 22:36:49 -0700 | [diff] [blame] | 2519 | // The following works as currently all 3 descriptor types share the same set of valid layouts |
sfricke-samsung | ada55a1 | 2020-08-15 03:39:41 -0700 | [diff] [blame] | 2520 | switch (type) { |
| 2521 | case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: |
Shannon McPherson | 2c793ba | 2020-08-28 12:13:24 -0600 | [diff] [blame] | 2522 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-04149"; |
sfricke-samsung | ada55a1 | 2020-08-15 03:39:41 -0700 | [diff] [blame] | 2523 | break; |
| 2524 | case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: |
Shannon McPherson | 2c793ba | 2020-08-28 12:13:24 -0600 | [diff] [blame] | 2525 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-04150"; |
sfricke-samsung | ada55a1 | 2020-08-15 03:39:41 -0700 | [diff] [blame] | 2526 | break; |
| 2527 | case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: |
sfricke-samsung | f105898 | 2020-09-10 22:36:49 -0700 | [diff] [blame] | 2528 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-04151"; |
sfricke-samsung | ada55a1 | 2020-08-15 03:39:41 -0700 | [diff] [blame] | 2529 | break; |
| 2530 | default: |
| 2531 | break; |
| 2532 | } |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 2533 | std::stringstream error_str; |
| 2534 | error_str << "Descriptor update with descriptorType " << string_VkDescriptorType(type) |
Mark Lobodzinski | 74eddba | 2019-06-21 14:16:33 -0600 | [diff] [blame] | 2535 | << " is being updated with invalid imageLayout " << string_VkImageLayout(image_layout) << " for image " |
Mark Lobodzinski | 298f0fd | 2020-04-09 11:50:19 -0600 | [diff] [blame] | 2536 | << report_data->FormatHandle(image) << " in imageView " << report_data->FormatHandle(image_view) |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 2537 | << ". Allowed layouts are: VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, " |
| 2538 | << "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL"; |
| 2539 | for (auto &ext_layout : extended_layouts) { |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 2540 | if (IsExtEnabled(device_extensions.*(ext_layout.extension))) { |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 2541 | error_str << ", " << string_VkImageLayout(ext_layout.layout); |
| 2542 | } |
| 2543 | } |
| 2544 | *error_msg = error_str.str(); |
| 2545 | return false; |
| 2546 | } |
| 2547 | } |
| 2548 | |
sfricke-samsung | bd0e805 | 2020-06-06 01:36:39 -0700 | [diff] [blame] | 2549 | if ((type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) || (type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)) { |
| 2550 | const VkComponentMapping components = iv_state->create_info.components; |
| 2551 | if (IsIdentitySwizzle(components) == false) { |
| 2552 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00336"; |
| 2553 | std::stringstream error_str; |
| 2554 | error_str << "ImageView (" << report_data->FormatHandle(image_view) << ") has a non-identiy swizzle component, " |
| 2555 | << " r swizzle = " << string_VkComponentSwizzle(components.r) << "," |
| 2556 | << " g swizzle = " << string_VkComponentSwizzle(components.g) << "," |
| 2557 | << " b swizzle = " << string_VkComponentSwizzle(components.b) << "," |
| 2558 | << " a swizzle = " << string_VkComponentSwizzle(components.a) << "."; |
| 2559 | *error_msg = error_str.str(); |
| 2560 | return false; |
| 2561 | } |
| 2562 | } |
| 2563 | |
Tony-LunarG | 69604c4 | 2021-11-22 16:00:12 -0700 | [diff] [blame] | 2564 | if ((type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) && (iv_state->min_lod != 0.0f)) { |
| 2565 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-06450"; |
| 2566 | std::stringstream error_str; |
| 2567 | error_str << "ImageView (" << report_data->FormatHandle(image_view) |
| 2568 | << ") , written to a descriptor of type VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT with a minLod (" << iv_state->min_lod |
| 2569 | << ") that is not 0.0"; |
| 2570 | *error_msg = error_str.str(); |
| 2571 | return false; |
| 2572 | } |
| 2573 | |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2574 | return true; |
| 2575 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2576 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2577 | // Helper template to change shared pointer members of a Descriptor, while |
| 2578 | // correctly managing links to the parent DescriptorSet. |
| 2579 | // src and dst are shared pointers. |
| 2580 | template <typename T> |
| 2581 | static void ReplaceStatePtr(DescriptorSet *set_state, T &dst, const T &src) { |
| 2582 | if (dst) { |
| 2583 | dst->RemoveParent(set_state); |
| 2584 | } |
| 2585 | dst = src; |
| 2586 | if (dst) { |
| 2587 | dst->AddParent(set_state); |
| 2588 | } |
| 2589 | } |
| 2590 | |
| 2591 | void cvdescriptorset::SamplerDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
| 2592 | const VkWriteDescriptorSet *update, const uint32_t index) { |
Chris Forbes | fea2c54 | 2018-04-13 09:34:15 -0700 | [diff] [blame] | 2593 | if (!immutable_) { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2594 | ReplaceStatePtr(set_state, sampler_state_ , dev_data->GetConstCastShared<SAMPLER_STATE>(update->pImageInfo[index].sampler)); |
Chris Forbes | fea2c54 | 2018-04-13 09:34:15 -0700 | [diff] [blame] | 2595 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2596 | updated = true; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2597 | } |
| 2598 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2599 | void cvdescriptorset::SamplerDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
| 2600 | const Descriptor *src) { |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2601 | updated = true; |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2602 | if (src->descriptor_class == Mutable) { |
ziga | da4b151 | 2021-11-28 15:53:06 +0100 | [diff] [blame^] | 2603 | auto *sampler_src = static_cast<const MutableDescriptor *>(src); |
| 2604 | if (!immutable_) { |
| 2605 | ReplaceStatePtr(set_state, sampler_state_, sampler_src->GetSharedSamplerState()); |
| 2606 | } |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2607 | return; |
| 2608 | } |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2609 | auto *sampler_src = static_cast<const SamplerDescriptor *>(src); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2610 | if (!immutable_) { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2611 | ReplaceStatePtr(set_state, sampler_state_, sampler_src->sampler_state_); |
Tobin Ehlis | 8020eea | 2016-08-17 11:10:41 -0600 | [diff] [blame] | 2612 | } |
| 2613 | } |
| 2614 | |
John Zulauf | d2c3dae | 2019-12-12 11:02:17 -0700 | [diff] [blame] | 2615 | cvdescriptorset::ImageSamplerDescriptor::ImageSamplerDescriptor(const ValidationStateTracker *dev_data, const VkSampler *immut) |
Jeremy Gebben | 059ab50 | 2021-04-26 11:25:02 -0600 | [diff] [blame] | 2616 | : ImageDescriptor(ImageSampler), immutable_(false) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2617 | if (immut) { |
Karl Schultz | 76d16a4 | 2020-11-11 05:05:33 -0700 | [diff] [blame] | 2618 | sampler_state_ = dev_data->GetConstCastShared<SAMPLER_STATE>(*immut); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2619 | immutable_ = true; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2620 | } |
| 2621 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2622 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2623 | void cvdescriptorset::ImageSamplerDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
John Zulauf | d2c3dae | 2019-12-12 11:02:17 -0700 | [diff] [blame] | 2624 | const VkWriteDescriptorSet *update, const uint32_t index) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2625 | updated = true; |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2626 | const auto &image_info = update->pImageInfo[index]; |
Chris Forbes | fea2c54 | 2018-04-13 09:34:15 -0700 | [diff] [blame] | 2627 | if (!immutable_) { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2628 | ReplaceStatePtr(set_state, sampler_state_, dev_data->GetConstCastShared<SAMPLER_STATE>(image_info.sampler)); |
Chris Forbes | fea2c54 | 2018-04-13 09:34:15 -0700 | [diff] [blame] | 2629 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2630 | image_layout_ = image_info.imageLayout; |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2631 | ReplaceStatePtr(set_state, image_view_state_, dev_data->GetConstCastShared<IMAGE_VIEW_STATE>(image_info.imageView)); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2632 | } |
| 2633 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2634 | void cvdescriptorset::ImageSamplerDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
| 2635 | const Descriptor *src) { |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2636 | updated = true; |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2637 | if (src->descriptor_class == Mutable) { |
ziga | da4b151 | 2021-11-28 15:53:06 +0100 | [diff] [blame^] | 2638 | auto *image_src = static_cast<const MutableDescriptor *>(src); |
| 2639 | if (!immutable_) { |
| 2640 | ReplaceStatePtr(set_state, sampler_state_, image_src->GetSharedSamplerState()); |
| 2641 | } |
| 2642 | ImageDescriptor::CopyUpdate(set_state, dev_data, src); |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2643 | return; |
| 2644 | } |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2645 | auto *image_src = static_cast<const ImageSamplerDescriptor *>(src); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2646 | if (!immutable_) { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2647 | ReplaceStatePtr(set_state, sampler_state_, image_src->sampler_state_); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2648 | } |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2649 | ImageDescriptor::CopyUpdate(set_state, dev_data, src); |
Tobin Ehlis | 8020eea | 2016-08-17 11:10:41 -0600 | [diff] [blame] | 2650 | } |
| 2651 | |
Jeremy Gebben | 059ab50 | 2021-04-26 11:25:02 -0600 | [diff] [blame] | 2652 | cvdescriptorset::ImageDescriptor::ImageDescriptor(const VkDescriptorType type) |
| 2653 | : Descriptor(Image), image_layout_(VK_IMAGE_LAYOUT_UNDEFINED) {} |
| 2654 | |
| 2655 | cvdescriptorset::ImageDescriptor::ImageDescriptor(DescriptorClass class_) |
| 2656 | : Descriptor(class_), image_layout_(VK_IMAGE_LAYOUT_UNDEFINED) {} |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2657 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2658 | void cvdescriptorset::ImageDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
| 2659 | const VkWriteDescriptorSet *update, const uint32_t index) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2660 | updated = true; |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2661 | const auto &image_info = update->pImageInfo[index]; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2662 | image_layout_ = image_info.imageLayout; |
ziga | da4b151 | 2021-11-28 15:53:06 +0100 | [diff] [blame^] | 2663 | ReplaceStatePtr(set_state, image_view_state_, dev_data->GetConstCastShared<IMAGE_VIEW_STATE>(image_info.imageView)); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2664 | } |
| 2665 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2666 | void cvdescriptorset::ImageDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
| 2667 | const Descriptor *src) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2668 | updated = true; |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2669 | if (src->descriptor_class == Mutable) { |
ziga | da4b151 | 2021-11-28 15:53:06 +0100 | [diff] [blame^] | 2670 | auto *image_src = static_cast<const MutableDescriptor *>(src); |
| 2671 | |
| 2672 | image_layout_ = image_src->GetImageLayout(); |
| 2673 | ReplaceStatePtr(set_state, image_view_state_, image_src->GetSharedImageViewState()); |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2674 | return; |
| 2675 | } |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2676 | auto *image_src = static_cast<const ImageDescriptor *>(src); |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2677 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2678 | image_layout_ = image_src->image_layout_; |
| 2679 | ReplaceStatePtr(set_state, image_view_state_, image_src->image_view_state_); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2680 | } |
| 2681 | |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2682 | void cvdescriptorset::ImageDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) { |
Tobin Ehlis | 81e4637 | 2016-08-17 13:33:44 -0600 | [diff] [blame] | 2683 | // Add binding for image |
Jeff Bolz | faffeb3 | 2019-10-04 12:47:16 -0500 | [diff] [blame] | 2684 | auto iv_state = GetImageViewState(); |
Tobin Ehlis | 8b26a38 | 2016-09-14 08:02:49 -0600 | [diff] [blame] | 2685 | if (iv_state) { |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2686 | dev_data->CallSetImageViewInitialLayoutCallback(cb_node, *iv_state, image_layout_); |
Jeff Bolz | 148d94e | 2018-12-13 21:25:56 -0600 | [diff] [blame] | 2687 | } |
Tobin Ehlis | 8020eea | 2016-08-17 11:10:41 -0600 | [diff] [blame] | 2688 | } |
| 2689 | |
Jeremy Gebben | 059ab50 | 2021-04-26 11:25:02 -0600 | [diff] [blame] | 2690 | cvdescriptorset::BufferDescriptor::BufferDescriptor(const VkDescriptorType type) |
| 2691 | : Descriptor(GeneralBuffer), offset_(0), range_(0) {} |
| 2692 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2693 | void cvdescriptorset::BufferDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
| 2694 | const VkWriteDescriptorSet *update, const uint32_t index) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2695 | updated = true; |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2696 | const auto &buffer_info = update->pBufferInfo[index]; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2697 | offset_ = buffer_info.offset; |
| 2698 | range_ = buffer_info.range; |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2699 | ReplaceStatePtr(set_state, buffer_state_, dev_data->GetConstCastShared<BUFFER_STATE>(buffer_info.buffer)); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2700 | } |
| 2701 | |
ziga | da4b151 | 2021-11-28 15:53:06 +0100 | [diff] [blame^] | 2702 | void cvdescriptorset::BufferDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2703 | const Descriptor *src) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2704 | updated = true; |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2705 | if (src->descriptor_class == Mutable) { |
ziga | da4b151 | 2021-11-28 15:53:06 +0100 | [diff] [blame^] | 2706 | const auto buff_desc = static_cast<const MutableDescriptor *>(src); |
| 2707 | offset_ = buff_desc->GetOffset(); |
| 2708 | range_ = buff_desc->GetRange(); |
| 2709 | ReplaceStatePtr(set_state, buffer_state_, buff_desc->GetSharedBufferState()); |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2710 | return; |
| 2711 | } |
Karl Schultz | 76d16a4 | 2020-11-11 05:05:33 -0700 | [diff] [blame] | 2712 | const auto buff_desc = static_cast<const BufferDescriptor *>(src); |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2713 | offset_ = buff_desc->offset_; |
| 2714 | range_ = buff_desc->range_; |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2715 | ReplaceStatePtr(set_state, buffer_state_, buff_desc->buffer_state_); |
Tobin Ehlis | 8020eea | 2016-08-17 11:10:41 -0600 | [diff] [blame] | 2716 | } |
| 2717 | |
Jeremy Gebben | 059ab50 | 2021-04-26 11:25:02 -0600 | [diff] [blame] | 2718 | cvdescriptorset::TexelDescriptor::TexelDescriptor(const VkDescriptorType type) : Descriptor(TexelBuffer) {} |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2719 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2720 | void cvdescriptorset::TexelDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
| 2721 | const VkWriteDescriptorSet *update, const uint32_t index) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2722 | updated = true; |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2723 | ReplaceStatePtr(set_state, buffer_view_state_, |
| 2724 | dev_data->GetConstCastShared<BUFFER_VIEW_STATE>(update->pTexelBufferView[index])); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 2725 | } |
| 2726 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2727 | void cvdescriptorset::TexelDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
| 2728 | const Descriptor *src) { |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2729 | updated = true; |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2730 | if (src->descriptor_class == Mutable) { |
ziga | da4b151 | 2021-11-28 15:53:06 +0100 | [diff] [blame^] | 2731 | ReplaceStatePtr(set_state, buffer_view_state_, static_cast<const MutableDescriptor *>(src)->GetSharedBufferViewState()); |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2732 | return; |
| 2733 | } |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2734 | ReplaceStatePtr(set_state, buffer_view_state_, static_cast<const TexelDescriptor *>(src)->buffer_view_state_); |
Tobin Ehlis | 8020eea | 2016-08-17 11:10:41 -0600 | [diff] [blame] | 2735 | } |
| 2736 | |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 2737 | cvdescriptorset::AccelerationStructureDescriptor::AccelerationStructureDescriptor(const VkDescriptorType type) |
Jeremy Gebben | 059ab50 | 2021-04-26 11:25:02 -0600 | [diff] [blame] | 2738 | : Descriptor(AccelerationStructure), acc_(VK_NULL_HANDLE), acc_nv_(VK_NULL_HANDLE) { |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2739 | is_khr_ = false; |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 2740 | } |
ziga | da4b151 | 2021-11-28 15:53:06 +0100 | [diff] [blame^] | 2741 | void cvdescriptorset::AccelerationStructureDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 2742 | const VkWriteDescriptorSet *update, const uint32_t index) { |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 2743 | const auto *acc_info = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureKHR>(update->pNext); |
| 2744 | const auto *acc_info_nv = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureNV>(update->pNext); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2745 | assert(acc_info || acc_info_nv); |
| 2746 | is_khr_ = (acc_info != NULL); |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 2747 | updated = true; |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2748 | if (is_khr_) { |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2749 | acc_ = acc_info->pAccelerationStructures[index]; |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2750 | ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_)); |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2751 | } else { |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 2752 | acc_nv_ = acc_info_nv->pAccelerationStructures[index]; |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2753 | ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_)); |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2754 | } |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 2755 | } |
| 2756 | |
ziga | da4b151 | 2021-11-28 15:53:06 +0100 | [diff] [blame^] | 2757 | void cvdescriptorset::AccelerationStructureDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
| 2758 | const Descriptor *src) { |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 2759 | updated = true; |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2760 | if (src->descriptor_class == Mutable) { |
ziga | da4b151 | 2021-11-28 15:53:06 +0100 | [diff] [blame^] | 2761 | auto acc_desc = static_cast<const MutableDescriptor *>(src); |
| 2762 | if (is_khr_) { |
| 2763 | acc_ = acc_desc->GetAccelerationStructure(); |
| 2764 | ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_)); |
| 2765 | } else { |
| 2766 | acc_nv_ = acc_desc->GetAccelerationStructureNV(); |
| 2767 | ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_)); |
| 2768 | } |
Tony-LunarG | 8035832 | 2021-04-16 07:58:13 -0600 | [diff] [blame] | 2769 | return; |
| 2770 | } |
ziga | da4b151 | 2021-11-28 15:53:06 +0100 | [diff] [blame^] | 2771 | auto acc_desc = static_cast<const AccelerationStructureDescriptor *>(src); |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2772 | if (is_khr_) { |
| 2773 | acc_ = acc_desc->acc_; |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2774 | ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_)); |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2775 | } else { |
| 2776 | acc_nv_ = acc_desc->acc_nv_; |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2777 | ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_)); |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 2778 | } |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 2779 | } |
| 2780 | |
ziga | da4b151 | 2021-11-28 15:53:06 +0100 | [diff] [blame^] | 2781 | cvdescriptorset::MutableDescriptor::MutableDescriptor() |
| 2782 | : Descriptor(Mutable), |
| 2783 | buffer_size_(0), |
| 2784 | immutable_(false), |
| 2785 | image_layout_(VK_IMAGE_LAYOUT_UNDEFINED), |
| 2786 | offset_(0), |
| 2787 | range_(0), |
| 2788 | is_khr_(false), |
| 2789 | acc_(VK_NULL_HANDLE), |
| 2790 | acc_nv_(VK_NULL_HANDLE) { |
ziga | 7a255fb | 2021-11-20 21:17:07 +0100 | [diff] [blame] | 2791 | active_descriptor_class_ = NoDescriptorClass; |
| 2792 | } |
Tony-LunarG | f563b36 | 2021-03-18 16:13:18 -0600 | [diff] [blame] | 2793 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2794 | void cvdescriptorset::MutableDescriptor::WriteUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
ziga | da4b151 | 2021-11-28 15:53:06 +0100 | [diff] [blame^] | 2795 | const VkWriteDescriptorSet *update, const uint32_t index) { |
Tony-LunarG | f563b36 | 2021-03-18 16:13:18 -0600 | [diff] [blame] | 2796 | updated = true; |
ziga | da4b151 | 2021-11-28 15:53:06 +0100 | [diff] [blame^] | 2797 | if (update->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) { |
| 2798 | if (!immutable_) { |
| 2799 | ReplaceStatePtr(set_state, sampler_state_, |
| 2800 | dev_data->GetConstCastShared<SAMPLER_STATE>(update->pImageInfo[index].sampler)); |
| 2801 | } |
| 2802 | } else if (update->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) { |
| 2803 | const auto &image_info = update->pImageInfo[index]; |
| 2804 | if (!immutable_) { |
| 2805 | ReplaceStatePtr(set_state, sampler_state_, dev_data->GetConstCastShared<SAMPLER_STATE>(image_info.sampler)); |
| 2806 | } |
| 2807 | image_layout_ = image_info.imageLayout; |
| 2808 | ReplaceStatePtr(set_state, image_view_state_, dev_data->GetConstCastShared<IMAGE_VIEW_STATE>(image_info.imageView)); |
| 2809 | } else if (update->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE || |
| 2810 | update->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE || |
| 2811 | update->descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) { |
| 2812 | const auto &image_info = update->pImageInfo[index]; |
| 2813 | image_layout_ = image_info.imageLayout; |
| 2814 | ReplaceStatePtr(set_state, image_view_state_, dev_data->GetConstCastShared<IMAGE_VIEW_STATE>(image_info.imageView)); |
| 2815 | } else if (update->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER || |
| 2816 | update->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER || |
| 2817 | update->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC || |
| 2818 | update->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) { |
| 2819 | const auto &buffer_info = update->pBufferInfo[index]; |
| 2820 | offset_ = buffer_info.offset; |
| 2821 | range_ = buffer_info.range; |
| 2822 | ReplaceStatePtr(set_state, buffer_state_, dev_data->GetConstCastShared<BUFFER_STATE>(buffer_info.buffer)); |
| 2823 | } else if (update->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER || |
| 2824 | update->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER) { |
| 2825 | ReplaceStatePtr(set_state, buffer_view_state_, |
| 2826 | dev_data->GetConstCastShared<BUFFER_VIEW_STATE>(update->pTexelBufferView[index])); |
| 2827 | } else if (update->descriptorType == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR) { |
| 2828 | const auto *acc_info = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureKHR>(update->pNext); |
| 2829 | const auto *acc_info_nv = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureNV>(update->pNext); |
| 2830 | assert(acc_info || acc_info_nv); |
| 2831 | is_khr_ = (acc_info != NULL); |
| 2832 | updated = true; |
| 2833 | if (is_khr_) { |
| 2834 | acc_ = acc_info->pAccelerationStructures[index]; |
| 2835 | ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_)); |
| 2836 | } else { |
| 2837 | acc_nv_ = acc_info_nv->pAccelerationStructures[index]; |
| 2838 | ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_)); |
| 2839 | } |
| 2840 | } |
Tony-LunarG | f563b36 | 2021-03-18 16:13:18 -0600 | [diff] [blame] | 2841 | } |
| 2842 | |
Jeremy Gebben | 5570abe | 2021-05-16 18:35:13 -0600 | [diff] [blame] | 2843 | void cvdescriptorset::MutableDescriptor::CopyUpdate(DescriptorSet *set_state, const ValidationStateTracker *dev_data, |
| 2844 | const Descriptor *src) { |
Tony-LunarG | f563b36 | 2021-03-18 16:13:18 -0600 | [diff] [blame] | 2845 | updated = true; |
ziga | da4b151 | 2021-11-28 15:53:06 +0100 | [diff] [blame^] | 2846 | if (src->descriptor_class == DescriptorClass::PlainSampler) { |
| 2847 | auto *sampler_src = static_cast<const SamplerDescriptor *>(src); |
| 2848 | if (!immutable_) { |
| 2849 | ReplaceStatePtr(set_state, sampler_state_, sampler_src->GetSharedSamplerState()); |
| 2850 | } |
| 2851 | } else if (src->descriptor_class == DescriptorClass::ImageSampler) { |
| 2852 | auto *image_src = static_cast<const ImageSamplerDescriptor *>(src); |
| 2853 | if (!immutable_) { |
| 2854 | ReplaceStatePtr(set_state, sampler_state_, image_src->GetSharedSamplerState()); |
| 2855 | } |
| 2856 | |
| 2857 | image_layout_ = image_src->GetImageLayout(); |
| 2858 | ReplaceStatePtr(set_state, image_view_state_, image_src->GetSharedImageViewState()); |
| 2859 | } else if (src->descriptor_class == DescriptorClass::Image) { |
| 2860 | auto *image_src = static_cast<const ImageDescriptor *>(src); |
| 2861 | |
| 2862 | image_layout_ = image_src->GetImageLayout(); |
| 2863 | ReplaceStatePtr(set_state, image_view_state_, image_src->GetSharedImageViewState()); |
| 2864 | } else if (src->descriptor_class == DescriptorClass::TexelBuffer) { |
| 2865 | ReplaceStatePtr(set_state, buffer_view_state_, static_cast<const TexelDescriptor *>(src)->GetSharedBufferViewState()); |
| 2866 | } else if (src->descriptor_class == DescriptorClass::GeneralBuffer) { |
| 2867 | const auto buff_desc = static_cast<const BufferDescriptor *>(src); |
| 2868 | offset_ = buff_desc->GetOffset(); |
| 2869 | range_ = buff_desc->GetRange(); |
| 2870 | ReplaceStatePtr(set_state, buffer_state_, buff_desc->GetSharedBufferState()); |
| 2871 | } else if (src->descriptor_class == DescriptorClass::AccelerationStructure) { |
| 2872 | auto acc_desc = static_cast<const AccelerationStructureDescriptor *>(src); |
| 2873 | if (is_khr_) { |
| 2874 | acc_ = acc_desc->GetAccelerationStructure(); |
| 2875 | ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_)); |
| 2876 | } else { |
| 2877 | acc_nv_ = acc_desc->GetAccelerationStructureNV(); |
| 2878 | ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_)); |
| 2879 | } |
| 2880 | } else if (src->descriptor_class == DescriptorClass::Mutable) { |
| 2881 | if (src->active_descriptor_type == VK_DESCRIPTOR_TYPE_SAMPLER) { |
| 2882 | auto *sampler_src = static_cast<const MutableDescriptor *>(src); |
| 2883 | if (!immutable_) { |
| 2884 | ReplaceStatePtr(set_state, sampler_state_, sampler_src->GetSharedSamplerState()); |
| 2885 | } |
| 2886 | } else if (src->active_descriptor_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) { |
| 2887 | auto *image_src = static_cast<const MutableDescriptor *>(src); |
| 2888 | if (!immutable_) { |
| 2889 | ReplaceStatePtr(set_state, sampler_state_, image_src->GetSharedSamplerState()); |
| 2890 | } |
| 2891 | |
| 2892 | image_layout_ = image_src->GetImageLayout(); |
| 2893 | ReplaceStatePtr(set_state, image_view_state_, image_src->GetSharedImageViewState()); |
| 2894 | } else if (src->active_descriptor_type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE || |
| 2895 | src->active_descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE || |
| 2896 | src->active_descriptor_type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) { |
| 2897 | auto *image_src = static_cast<const MutableDescriptor *>(src); |
| 2898 | |
| 2899 | image_layout_ = image_src->GetImageLayout(); |
| 2900 | ReplaceStatePtr(set_state, image_view_state_, image_src->GetSharedImageViewState()); |
| 2901 | } else if (src->active_descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER || |
| 2902 | src->active_descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER || |
| 2903 | src->active_descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC || |
| 2904 | src->active_descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) { |
| 2905 | const auto buff_desc = static_cast<const MutableDescriptor *>(src); |
| 2906 | offset_ = buff_desc->GetOffset(); |
| 2907 | range_ = buff_desc->GetRange(); |
| 2908 | ReplaceStatePtr(set_state, buffer_state_, buff_desc->GetSharedBufferState()); |
| 2909 | } else if (src->active_descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER || |
| 2910 | src->active_descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER) { |
| 2911 | ReplaceStatePtr(set_state, buffer_view_state_, static_cast<const MutableDescriptor *>(src)->GetSharedBufferViewState()); |
| 2912 | } else if (src->active_descriptor_type == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR || |
| 2913 | src->active_descriptor_type == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV) { |
| 2914 | auto acc_desc = static_cast<const MutableDescriptor *>(src); |
| 2915 | if (is_khr_) { |
| 2916 | acc_ = acc_desc->GetAccelerationStructure(); |
| 2917 | ReplaceStatePtr(set_state, acc_state_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE_KHR>(acc_)); |
| 2918 | } else { |
| 2919 | acc_nv_ = acc_desc->GetAccelerationStructureNV(); |
| 2920 | ReplaceStatePtr(set_state, acc_state_nv_, dev_data->GetConstCastShared<ACCELERATION_STRUCTURE_STATE>(acc_nv_)); |
| 2921 | } |
| 2922 | } |
| 2923 | } |
| 2924 | } |
| 2925 | |
| 2926 | bool cvdescriptorset::MutableDescriptor::AddParent(BASE_NODE *base_node) { |
| 2927 | bool result = false; |
| 2928 | if (active_descriptor_type == VK_DESCRIPTOR_TYPE_SAMPLER) { |
| 2929 | if (sampler_state_) { |
| 2930 | result |= sampler_state_->AddParent(base_node); |
| 2931 | } |
| 2932 | } else if (active_descriptor_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) { |
| 2933 | if (sampler_state_) { |
| 2934 | result |= sampler_state_->AddParent(base_node); |
| 2935 | } |
| 2936 | if (image_view_state_) { |
| 2937 | result = image_view_state_->AddParent(base_node); |
| 2938 | } |
| 2939 | } else if (active_descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER || |
| 2940 | active_descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER) { |
| 2941 | if (buffer_view_state_) { |
| 2942 | result = buffer_view_state_->AddParent(base_node); |
| 2943 | } |
| 2944 | } else if (active_descriptor_type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE || |
| 2945 | active_descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE || |
| 2946 | active_descriptor_type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) { |
| 2947 | if (image_view_state_) { |
| 2948 | result = image_view_state_->AddParent(base_node); |
| 2949 | } |
| 2950 | } else if (active_descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER || |
| 2951 | active_descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER || |
| 2952 | active_descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC || |
| 2953 | active_descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) { |
| 2954 | if (buffer_state_) { |
| 2955 | result = buffer_state_->AddParent(base_node); |
| 2956 | } |
| 2957 | } else if (active_descriptor_type == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR || |
| 2958 | active_descriptor_type == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV) { |
| 2959 | if (acc_state_) { |
| 2960 | result |= acc_state_->AddParent(base_node); |
| 2961 | } |
| 2962 | if (acc_state_nv_) { |
| 2963 | result |= acc_state_nv_->AddParent(base_node); |
| 2964 | } |
| 2965 | } |
| 2966 | return result; |
| 2967 | } |
| 2968 | void cvdescriptorset::MutableDescriptor::RemoveParent(BASE_NODE *base_node) { |
| 2969 | if (sampler_state_) { |
| 2970 | sampler_state_->RemoveParent(base_node); |
| 2971 | } |
| 2972 | if (image_view_state_) { |
| 2973 | image_view_state_->RemoveParent(base_node); |
| 2974 | } |
| 2975 | if (buffer_view_state_) { |
| 2976 | buffer_view_state_->RemoveParent(base_node); |
| 2977 | } |
| 2978 | if (buffer_state_) { |
| 2979 | buffer_state_->RemoveParent(base_node); |
| 2980 | } |
| 2981 | if (acc_state_) { |
| 2982 | acc_state_->RemoveParent(base_node); |
| 2983 | } |
| 2984 | if (acc_state_nv_) { |
| 2985 | acc_state_nv_->RemoveParent(base_node); |
| 2986 | } |
Tony-LunarG | f563b36 | 2021-03-18 16:13:18 -0600 | [diff] [blame] | 2987 | } |
| 2988 | |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2989 | // This is a helper function that iterates over a set of Write and Copy updates, pulls the DescriptorSet* for updated |
| 2990 | // sets, and then calls their respective Validate[Write|Copy]Update functions. |
| 2991 | // If the update hits an issue for which the callback returns "true", meaning that the call down the chain should |
| 2992 | // be skipped, then true is returned. |
| 2993 | // If there is no issue with the update, then false is returned. |
Mark Lobodzinski | 3840ca0 | 2019-03-08 18:36:11 -0700 | [diff] [blame] | 2994 | bool CoreChecks::ValidateUpdateDescriptorSets(uint32_t write_count, const VkWriteDescriptorSet *p_wds, uint32_t copy_count, |
Jeff Bolz | 46c0ea0 | 2019-10-09 13:06:29 -0500 | [diff] [blame] | 2995 | const VkCopyDescriptorSet *p_cds, const char *func_name) const { |
Mark Lobodzinski | bdc3b02 | 2017-04-24 09:11:35 -0600 | [diff] [blame] | 2996 | bool skip = false; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2997 | // Validate Write updates |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 2998 | for (uint32_t i = 0; i < write_count; i++) { |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2999 | auto dest_set = p_wds[i].dstSet; |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 3000 | auto set_node = Get<cvdescriptorset::DescriptorSet>(dest_set); |
Tobin Ehlis | 6a72dc7 | 2016-06-01 16:41:17 -0600 | [diff] [blame] | 3001 | if (!set_node) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 3002 | skip |= LogError(dest_set, kVUID_Core_DrawState_InvalidDescriptorSet, |
| 3003 | "Cannot call %s on %s that has not been allocated in pDescriptorWrites[%u].", func_name, |
| 3004 | report_data->FormatHandle(dest_set).c_str(), i); |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3005 | } else { |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 3006 | std::string error_code; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3007 | std::string error_str; |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame] | 3008 | if (!ValidateWriteUpdate(set_node.get(), &p_wds[i], func_name, &error_code, &error_str, false)) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 3009 | skip |= |
| 3010 | LogError(dest_set, error_code, "%s pDescriptorWrites[%u] failed write update validation for %s with error: %s.", |
| 3011 | func_name, i, report_data->FormatHandle(dest_set).c_str(), error_str.c_str()); |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3012 | } |
| 3013 | } |
sourav parmar | a24fb7b | 2020-05-26 10:50:04 -0700 | [diff] [blame] | 3014 | if (p_wds[i].pNext) { |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 3015 | const auto *pnext_struct = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureKHR>(p_wds[i].pNext); |
Mark Lobodzinski | 17dc460 | 2020-05-29 07:48:40 -0600 | [diff] [blame] | 3016 | if (pnext_struct) { |
| 3017 | for (uint32_t j = 0; j < pnext_struct->accelerationStructureCount; ++j) { |
Jeremy Gebben | f444939 | 2022-01-28 10:09:10 -0700 | [diff] [blame] | 3018 | auto as_state = Get<ACCELERATION_STRUCTURE_STATE_KHR>(pnext_struct->pAccelerationStructures[j]); |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 3019 | if (as_state && (as_state->create_infoKHR.sType == VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR && |
sourav parmar | 766e2a7 | 2020-12-03 16:17:11 -0800 | [diff] [blame] | 3020 | (as_state->create_infoKHR.type != VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR && |
| 3021 | as_state->create_infoKHR.type != VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR))) { |
sourav parmar | a24fb7b | 2020-05-26 10:50:04 -0700 | [diff] [blame] | 3022 | skip |= |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 3023 | LogError(dest_set, "VUID-VkWriteDescriptorSetAccelerationStructureKHR-pAccelerationStructures-03579", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 3024 | "%s: For pDescriptorWrites[%u] acceleration structure in pAccelerationStructures[%u] must " |
| 3025 | "have been created with " |
sourav parmar | bcee751 | 2020-12-28 14:34:49 -0800 | [diff] [blame] | 3026 | "VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR or VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR.", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 3027 | func_name, i, j); |
sourav parmar | a24fb7b | 2020-05-26 10:50:04 -0700 | [diff] [blame] | 3028 | } |
| 3029 | } |
| 3030 | } |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 3031 | const auto *pnext_struct_nv = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureNV>(p_wds[i].pNext); |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 3032 | if (pnext_struct_nv) { |
| 3033 | for (uint32_t j = 0; j < pnext_struct_nv->accelerationStructureCount; ++j) { |
Jeremy Gebben | f444939 | 2022-01-28 10:09:10 -0700 | [diff] [blame] | 3034 | auto as_state = Get<ACCELERATION_STRUCTURE_STATE>(pnext_struct_nv->pAccelerationStructures[j]); |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 3035 | if (as_state && (as_state->create_infoNV.sType == VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV && |
| 3036 | as_state->create_infoNV.info.type != VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV)) { |
| 3037 | skip |= LogError(dest_set, "VUID-VkWriteDescriptorSetAccelerationStructureNV-pAccelerationStructures-03748", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 3038 | "%s: For pDescriptorWrites[%u] acceleration structure in pAccelerationStructures[%u] must " |
| 3039 | "have been created with" |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 3040 | " VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV.", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 3041 | func_name, i, j); |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 3042 | } |
| 3043 | } |
| 3044 | } |
sourav parmar | a24fb7b | 2020-05-26 10:50:04 -0700 | [diff] [blame] | 3045 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3046 | } |
| 3047 | // Now validate copy updates |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 3048 | for (uint32_t i = 0; i < copy_count; ++i) { |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3049 | auto dst_set = p_cds[i].dstSet; |
| 3050 | auto src_set = p_cds[i].srcSet; |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 3051 | auto src_node = Get<cvdescriptorset::DescriptorSet>(src_set); |
| 3052 | auto dst_node = Get<cvdescriptorset::DescriptorSet>(dst_set); |
Tobin Ehlis | a171275 | 2017-01-04 09:41:47 -0700 | [diff] [blame] | 3053 | // Object_tracker verifies that src & dest descriptor set are valid |
| 3054 | assert(src_node); |
| 3055 | assert(dst_node); |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 3056 | std::string error_code; |
Tobin Ehlis | a171275 | 2017-01-04 09:41:47 -0700 | [diff] [blame] | 3057 | std::string error_str; |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame] | 3058 | if (!ValidateCopyUpdate(&p_cds[i], dst_node.get(), src_node.get(), func_name, &error_code, &error_str)) { |
Mark Lobodzinski | 9d38ea2 | 2020-03-16 18:22:16 -0600 | [diff] [blame] | 3059 | LogObjectList objlist(dst_set); |
| 3060 | objlist.add(src_set); |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 3061 | skip |= LogError(objlist, error_code, "%s pDescriptorCopies[%u] failed copy update from %s to %s with error: %s.", |
| 3062 | func_name, i, report_data->FormatHandle(src_set).c_str(), report_data->FormatHandle(dst_set).c_str(), |
| 3063 | error_str.c_str()); |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3064 | } |
| 3065 | } |
Mark Lobodzinski | bdc3b02 | 2017-04-24 09:11:35 -0600 | [diff] [blame] | 3066 | return skip; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3067 | } |
| 3068 | // This is a helper function that iterates over a set of Write and Copy updates, pulls the DescriptorSet* for updated |
| 3069 | // sets, and then calls their respective Perform[Write|Copy]Update functions. |
| 3070 | // Prerequisite : ValidateUpdateDescriptorSets() should be called and return "false" prior to calling PerformUpdateDescriptorSets() |
| 3071 | // with the same set of updates. |
| 3072 | // This is split from the validate code to allow validation prior to calling down the chain, and then update after |
| 3073 | // calling down the chain. |
John Zulauf | e3b35f3 | 2019-06-25 14:21:21 -0600 | [diff] [blame] | 3074 | void cvdescriptorset::PerformUpdateDescriptorSets(ValidationStateTracker *dev_data, uint32_t write_count, |
| 3075 | const VkWriteDescriptorSet *p_wds, uint32_t copy_count, |
| 3076 | const VkCopyDescriptorSet *p_cds) { |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3077 | // Write updates first |
| 3078 | uint32_t i = 0; |
| 3079 | for (i = 0; i < write_count; ++i) { |
| 3080 | auto dest_set = p_wds[i].dstSet; |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 3081 | auto set_node = dev_data->Get<cvdescriptorset::DescriptorSet>(dest_set); |
Tobin Ehlis | 6a72dc7 | 2016-06-01 16:41:17 -0600 | [diff] [blame] | 3082 | if (set_node) { |
Jeff Bolz | 41a1ced | 2019-10-11 11:40:49 -0500 | [diff] [blame] | 3083 | set_node->PerformWriteUpdate(dev_data, &p_wds[i]); |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3084 | } |
| 3085 | } |
| 3086 | // Now copy updates |
| 3087 | for (i = 0; i < copy_count; ++i) { |
| 3088 | auto dst_set = p_cds[i].dstSet; |
| 3089 | auto src_set = p_cds[i].srcSet; |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 3090 | auto src_node = dev_data->Get<cvdescriptorset::DescriptorSet>(src_set); |
| 3091 | auto dst_node = dev_data->Get<cvdescriptorset::DescriptorSet>(dst_set); |
Tobin Ehlis | 6a72dc7 | 2016-06-01 16:41:17 -0600 | [diff] [blame] | 3092 | if (src_node && dst_node) { |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame] | 3093 | dst_node->PerformCopyUpdate(dev_data, &p_cds[i], src_node.get()); |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3094 | } |
| 3095 | } |
| 3096 | } |
Mark Lobodzinski | 3d63a04 | 2017-03-09 16:24:13 -0700 | [diff] [blame] | 3097 | |
John Zulauf | e3b35f3 | 2019-06-25 14:21:21 -0600 | [diff] [blame] | 3098 | cvdescriptorset::DecodedTemplateUpdate::DecodedTemplateUpdate(const ValidationStateTracker *device_data, |
Jeremy Gebben | fc89045 | 2021-10-27 10:56:49 -0600 | [diff] [blame] | 3099 | VkDescriptorSet descriptorSet, |
| 3100 | const UPDATE_TEMPLATE_STATE *template_state, const void *pData, |
| 3101 | VkDescriptorSetLayout push_layout) { |
John Zulauf | b845eb2 | 2018-10-12 11:41:06 -0600 | [diff] [blame] | 3102 | auto const &create_info = template_state->create_info; |
| 3103 | inline_infos.resize(create_info.descriptorUpdateEntryCount); // Make sure we have one if we need it |
sourav parmar | 480d277 | 2021-01-24 22:24:54 -0800 | [diff] [blame] | 3104 | inline_infos_khr.resize(create_info.descriptorUpdateEntryCount); |
| 3105 | inline_infos_nv.resize(create_info.descriptorUpdateEntryCount); |
John Zulauf | b845eb2 | 2018-10-12 11:41:06 -0600 | [diff] [blame] | 3106 | desc_writes.reserve(create_info.descriptorUpdateEntryCount); // emplaced, so reserved without initialization |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 3107 | VkDescriptorSetLayout effective_dsl = create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET |
| 3108 | ? create_info.descriptorSetLayout |
| 3109 | : push_layout; |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame] | 3110 | auto layout_obj = device_data->Get<cvdescriptorset::DescriptorSetLayout>(effective_dsl); |
Mark Lobodzinski | 3d63a04 | 2017-03-09 16:24:13 -0700 | [diff] [blame] | 3111 | |
| 3112 | // Create a WriteDescriptorSet struct for each template update entry |
| 3113 | for (uint32_t i = 0; i < create_info.descriptorUpdateEntryCount; i++) { |
| 3114 | auto binding_count = layout_obj->GetDescriptorCountFromBinding(create_info.pDescriptorUpdateEntries[i].dstBinding); |
| 3115 | auto binding_being_updated = create_info.pDescriptorUpdateEntries[i].dstBinding; |
| 3116 | auto dst_array_element = create_info.pDescriptorUpdateEntries[i].dstArrayElement; |
| 3117 | |
John Zulauf | b6d7120 | 2017-12-22 16:47:09 -0700 | [diff] [blame] | 3118 | desc_writes.reserve(desc_writes.size() + create_info.pDescriptorUpdateEntries[i].descriptorCount); |
Mark Lobodzinski | 3d63a04 | 2017-03-09 16:24:13 -0700 | [diff] [blame] | 3119 | for (uint32_t j = 0; j < create_info.pDescriptorUpdateEntries[i].descriptorCount; j++) { |
| 3120 | desc_writes.emplace_back(); |
| 3121 | auto &write_entry = desc_writes.back(); |
| 3122 | |
| 3123 | size_t offset = create_info.pDescriptorUpdateEntries[i].offset + j * create_info.pDescriptorUpdateEntries[i].stride; |
| 3124 | char *update_entry = (char *)(pData) + offset; |
| 3125 | |
| 3126 | if (dst_array_element >= binding_count) { |
| 3127 | dst_array_element = 0; |
Mark Lobodzinski | 4aa479d | 2017-03-10 09:14:00 -0700 | [diff] [blame] | 3128 | binding_being_updated = layout_obj->GetNextValidBinding(binding_being_updated); |
Mark Lobodzinski | 3d63a04 | 2017-03-09 16:24:13 -0700 | [diff] [blame] | 3129 | } |
| 3130 | |
| 3131 | write_entry.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; |
| 3132 | write_entry.pNext = NULL; |
| 3133 | write_entry.dstSet = descriptorSet; |
| 3134 | write_entry.dstBinding = binding_being_updated; |
| 3135 | write_entry.dstArrayElement = dst_array_element; |
| 3136 | write_entry.descriptorCount = 1; |
| 3137 | write_entry.descriptorType = create_info.pDescriptorUpdateEntries[i].descriptorType; |
| 3138 | |
| 3139 | switch (create_info.pDescriptorUpdateEntries[i].descriptorType) { |
| 3140 | case VK_DESCRIPTOR_TYPE_SAMPLER: |
| 3141 | case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: |
| 3142 | case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: |
| 3143 | case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: |
| 3144 | case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: |
| 3145 | write_entry.pImageInfo = reinterpret_cast<VkDescriptorImageInfo *>(update_entry); |
| 3146 | break; |
| 3147 | |
| 3148 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: |
| 3149 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: |
| 3150 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: |
| 3151 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: |
| 3152 | write_entry.pBufferInfo = reinterpret_cast<VkDescriptorBufferInfo *>(update_entry); |
| 3153 | break; |
| 3154 | |
| 3155 | case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: |
| 3156 | case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: |
| 3157 | write_entry.pTexelBufferView = reinterpret_cast<VkBufferView *>(update_entry); |
| 3158 | break; |
Dave Houlton | 142c4cb | 2018-10-17 15:04:41 -0600 | [diff] [blame] | 3159 | case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: { |
| 3160 | VkWriteDescriptorSetInlineUniformBlockEXT *inline_info = &inline_infos[i]; |
| 3161 | inline_info->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT; |
| 3162 | inline_info->pNext = nullptr; |
| 3163 | inline_info->dataSize = create_info.pDescriptorUpdateEntries[i].descriptorCount; |
| 3164 | inline_info->pData = update_entry; |
| 3165 | write_entry.pNext = inline_info; |
Ricardo Garcia | fee1573 | 2019-05-28 11:13:31 +0200 | [diff] [blame] | 3166 | // descriptorCount must match the dataSize member of the VkWriteDescriptorSetInlineUniformBlockEXT structure |
| 3167 | write_entry.descriptorCount = inline_info->dataSize; |
Dave Houlton | 142c4cb | 2018-10-17 15:04:41 -0600 | [diff] [blame] | 3168 | // skip the rest of the array, they just represent bytes in the update |
| 3169 | j = create_info.pDescriptorUpdateEntries[i].descriptorCount; |
| 3170 | break; |
| 3171 | } |
sourav parmar | 480d277 | 2021-01-24 22:24:54 -0800 | [diff] [blame] | 3172 | case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: { |
| 3173 | VkWriteDescriptorSetAccelerationStructureKHR *inline_info_khr = &inline_infos_khr[i]; |
| 3174 | inline_info_khr->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR; |
| 3175 | inline_info_khr->pNext = nullptr; |
| 3176 | inline_info_khr->accelerationStructureCount = create_info.pDescriptorUpdateEntries[i].descriptorCount; |
| 3177 | inline_info_khr->pAccelerationStructures = reinterpret_cast<VkAccelerationStructureKHR *>(update_entry); |
| 3178 | write_entry.pNext = inline_info_khr; |
| 3179 | break; |
| 3180 | } |
| 3181 | case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV: { |
| 3182 | VkWriteDescriptorSetAccelerationStructureNV *inline_info_nv = &inline_infos_nv[i]; |
| 3183 | inline_info_nv->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV; |
| 3184 | inline_info_nv->pNext = nullptr; |
| 3185 | inline_info_nv->accelerationStructureCount = create_info.pDescriptorUpdateEntries[i].descriptorCount; |
| 3186 | inline_info_nv->pAccelerationStructures = reinterpret_cast<VkAccelerationStructureNV *>(update_entry); |
| 3187 | write_entry.pNext = inline_info_nv; |
| 3188 | break; |
| 3189 | } |
Mark Lobodzinski | 3d63a04 | 2017-03-09 16:24:13 -0700 | [diff] [blame] | 3190 | default: |
| 3191 | assert(0); |
| 3192 | break; |
| 3193 | } |
| 3194 | dst_array_element++; |
| 3195 | } |
| 3196 | } |
John Zulauf | b845eb2 | 2018-10-12 11:41:06 -0600 | [diff] [blame] | 3197 | } |
John Zulauf | b45fdc3 | 2018-10-12 15:14:17 -0600 | [diff] [blame] | 3198 | // These helper functions carry out the validate and record descriptor updates peformed via update templates. They decode |
| 3199 | // the templatized data and leverage the non-template UpdateDescriptor helper functions. |
Jeremy Gebben | fc89045 | 2021-10-27 10:56:49 -0600 | [diff] [blame] | 3200 | bool CoreChecks::ValidateUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet, |
| 3201 | const UPDATE_TEMPLATE_STATE *template_state, const void *pData) const { |
John Zulauf | b45fdc3 | 2018-10-12 15:14:17 -0600 | [diff] [blame] | 3202 | // Translate the templated update into a normal update for validation... |
Mark Lobodzinski | 3840ca0 | 2019-03-08 18:36:11 -0700 | [diff] [blame] | 3203 | cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData); |
| 3204 | return ValidateUpdateDescriptorSets(static_cast<uint32_t>(decoded_update.desc_writes.size()), decoded_update.desc_writes.data(), |
| 3205 | 0, NULL, "vkUpdateDescriptorSetWithTemplate()"); |
John Zulauf | b45fdc3 | 2018-10-12 15:14:17 -0600 | [diff] [blame] | 3206 | } |
John Zulauf | b845eb2 | 2018-10-12 11:41:06 -0600 | [diff] [blame] | 3207 | |
John Zulauf | 4e7bcb5 | 2018-11-02 10:46:30 -0600 | [diff] [blame] | 3208 | std::string cvdescriptorset::DescriptorSet::StringifySetAndLayout() const { |
| 3209 | std::string out; |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 3210 | auto layout_handle = layout_->GetDescriptorSetLayout(); |
John Zulauf | 4e7bcb5 | 2018-11-02 10:46:30 -0600 | [diff] [blame] | 3211 | if (IsPushDescriptor()) { |
Mark Lobodzinski | 23e395e | 2020-04-09 10:17:31 -0600 | [diff] [blame] | 3212 | std::ostringstream str; |
Tony-LunarG | 1d3ee2d | 2020-10-27 15:54:52 -0600 | [diff] [blame] | 3213 | str << "Push Descriptors defined with " << state_data_->report_data->FormatHandle(layout_handle); |
Mark Lobodzinski | 23e395e | 2020-04-09 10:17:31 -0600 | [diff] [blame] | 3214 | out = str.str(); |
John Zulauf | 4e7bcb5 | 2018-11-02 10:46:30 -0600 | [diff] [blame] | 3215 | } else { |
Mark Lobodzinski | 23e395e | 2020-04-09 10:17:31 -0600 | [diff] [blame] | 3216 | std::ostringstream str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3217 | str << state_data_->report_data->FormatHandle(GetSet()) << " allocated with " |
Mark Lobodzinski | 23e395e | 2020-04-09 10:17:31 -0600 | [diff] [blame] | 3218 | << state_data_->report_data->FormatHandle(layout_handle); |
| 3219 | out = str.str(); |
John Zulauf | 4e7bcb5 | 2018-11-02 10:46:30 -0600 | [diff] [blame] | 3220 | } |
| 3221 | return out; |
| 3222 | }; |
| 3223 | |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 3224 | // Loop through the write updates to validate for a push descriptor set, ignoring dstSet |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3225 | bool CoreChecks::ValidatePushDescriptorsUpdate(const DescriptorSet *push_set, uint32_t write_count, |
John Zulauf | bd9b341 | 2019-08-22 17:16:11 -0600 | [diff] [blame] | 3226 | const VkWriteDescriptorSet *p_wds, const char *func_name) const { |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 3227 | assert(push_set->IsPushDescriptor()); |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 3228 | bool skip = false; |
| 3229 | for (uint32_t i = 0; i < write_count; i++) { |
| 3230 | std::string error_code; |
| 3231 | std::string error_str; |
ziga-lunarg | 6c46b24 | 2021-09-13 18:33:37 +0200 | [diff] [blame] | 3232 | if (!ValidateWriteUpdate(push_set, &p_wds[i], func_name, &error_code, &error_str, true)) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 3233 | skip |= LogError(push_set->GetDescriptorSetLayout(), error_code, |
| 3234 | "%s VkWriteDescriptorSet[%u] failed update validation: %s.", func_name, i, error_str.c_str()); |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 3235 | } |
| 3236 | } |
| 3237 | return skip; |
| 3238 | } |
| 3239 | |
Tobin Ehlis | 6bd2b98 | 2016-05-24 12:33:42 -0600 | [diff] [blame] | 3240 | // For the given buffer, verify that its creation parameters are appropriate for the given type |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 3241 | // If there's an error, update the error_msg string with details and return false, else return true |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3242 | bool cvdescriptorset::ValidateBufferUsage(debug_report_data *report_data, BUFFER_STATE const *buffer_node, VkDescriptorType type, |
| 3243 | std::string *error_code, std::string *error_msg) { |
Tobin Ehlis | 6bd2b98 | 2016-05-24 12:33:42 -0600 | [diff] [blame] | 3244 | // Verify that usage bits set correctly for given type |
Tobin Ehlis | 94bc5d2 | 2016-06-02 07:46:52 -0600 | [diff] [blame] | 3245 | auto usage = buffer_node->createInfo.usage; |
Jeff Bolz | 6d3beaa | 2019-02-09 21:00:05 -0600 | [diff] [blame] | 3246 | const char *error_usage_bit = nullptr; |
Tobin Ehlis | 6bd2b98 | 2016-05-24 12:33:42 -0600 | [diff] [blame] | 3247 | switch (type) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3248 | case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: |
| 3249 | if (!(usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3250 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00334"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3251 | error_usage_bit = "VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT"; |
| 3252 | } |
| 3253 | break; |
| 3254 | case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: |
| 3255 | if (!(usage & VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3256 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00335"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3257 | error_usage_bit = "VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT"; |
| 3258 | } |
| 3259 | break; |
| 3260 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: |
| 3261 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: |
| 3262 | if (!(usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3263 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00330"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3264 | error_usage_bit = "VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT"; |
| 3265 | } |
| 3266 | break; |
| 3267 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: |
| 3268 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: |
| 3269 | if (!(usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3270 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00331"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3271 | error_usage_bit = "VK_BUFFER_USAGE_STORAGE_BUFFER_BIT"; |
| 3272 | } |
| 3273 | break; |
| 3274 | default: |
| 3275 | break; |
Tobin Ehlis | 6bd2b98 | 2016-05-24 12:33:42 -0600 | [diff] [blame] | 3276 | } |
Jeff Bolz | 6d3beaa | 2019-02-09 21:00:05 -0600 | [diff] [blame] | 3277 | if (error_usage_bit) { |
Tobin Ehlis | 6bd2b98 | 2016-05-24 12:33:42 -0600 | [diff] [blame] | 3278 | std::stringstream error_str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3279 | error_str << "Buffer (" << report_data->FormatHandle(buffer_node->buffer()) << ") with usage mask " << std::hex |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3280 | << std::showbase << usage << " being used for a descriptor update of type " << string_VkDescriptorType(type) |
| 3281 | << " does not have " << error_usage_bit << " set."; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 3282 | *error_msg = error_str.str(); |
Tobin Ehlis | 6bd2b98 | 2016-05-24 12:33:42 -0600 | [diff] [blame] | 3283 | return false; |
| 3284 | } |
| 3285 | return true; |
| 3286 | } |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3287 | // For buffer descriptor updates, verify the buffer usage and VkDescriptorBufferInfo struct which includes: |
| 3288 | // 1. buffer is valid |
| 3289 | // 2. buffer was created with correct usage flags |
| 3290 | // 3. offset is less than buffer size |
| 3291 | // 4. range is either VK_WHOLE_SIZE or falls in (0, (buffer size - offset)] |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3292 | // 5. range and offset are within the device's limits |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 3293 | // If there's an error, update the error_msg string with details and return false, else return true |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3294 | bool CoreChecks::ValidateBufferUpdate(VkDescriptorBufferInfo const *buffer_info, VkDescriptorType type, const char *func_name, |
John Zulauf | bd9b341 | 2019-08-22 17:16:11 -0600 | [diff] [blame] | 3295 | std::string *error_code, std::string *error_msg) const { |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3296 | // First make sure that buffer is valid |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 3297 | auto buffer_node = Get<BUFFER_STATE>(buffer_info->buffer); |
Tobin Ehlis | fa8b618 | 2016-12-22 13:40:45 -0700 | [diff] [blame] | 3298 | // Any invalid buffer should already be caught by object_tracker |
| 3299 | assert(buffer_node); |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame] | 3300 | if (ValidateMemoryIsBoundToBuffer(buffer_node.get(), func_name, "VUID-VkWriteDescriptorSet-descriptorType-00329")) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3301 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00329"; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 3302 | *error_msg = "No memory bound to buffer."; |
Tobin Ehlis | 8128096 | 2016-07-20 14:04:20 -0600 | [diff] [blame] | 3303 | return false; |
Tobin Ehlis | fed999f | 2016-09-21 15:09:45 -0600 | [diff] [blame] | 3304 | } |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3305 | // Verify usage bits |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame] | 3306 | if (!cvdescriptorset::ValidateBufferUsage(report_data, buffer_node.get(), type, error_code, error_msg)) { |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 3307 | // error_msg will have been updated by ValidateBufferUsage() |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3308 | return false; |
| 3309 | } |
| 3310 | // offset must be less than buffer size |
Jeremy Hayes | d1a6a82 | 2017-03-09 14:39:45 -0700 | [diff] [blame] | 3311 | if (buffer_info->offset >= buffer_node->createInfo.size) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3312 | *error_code = "VUID-VkDescriptorBufferInfo-offset-00340"; |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3313 | std::stringstream error_str; |
Jeremy Hayes | d1a6a82 | 2017-03-09 14:39:45 -0700 | [diff] [blame] | 3314 | error_str << "VkDescriptorBufferInfo offset of " << buffer_info->offset << " is greater than or equal to buffer " |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3315 | << report_data->FormatHandle(buffer_node->buffer()) << " size of " << buffer_node->createInfo.size; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 3316 | *error_msg = error_str.str(); |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3317 | return false; |
| 3318 | } |
| 3319 | if (buffer_info->range != VK_WHOLE_SIZE) { |
| 3320 | // Range must be VK_WHOLE_SIZE or > 0 |
| 3321 | if (!buffer_info->range) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3322 | *error_code = "VUID-VkDescriptorBufferInfo-range-00341"; |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3323 | std::stringstream error_str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3324 | error_str << "For buffer " << report_data->FormatHandle(buffer_node->buffer()) |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3325 | << " VkDescriptorBufferInfo range is not VK_WHOLE_SIZE and is zero, which is not allowed."; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 3326 | *error_msg = error_str.str(); |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3327 | return false; |
| 3328 | } |
| 3329 | // Range must be VK_WHOLE_SIZE or <= (buffer size - offset) |
| 3330 | if (buffer_info->range > (buffer_node->createInfo.size - buffer_info->offset)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3331 | *error_code = "VUID-VkDescriptorBufferInfo-range-00342"; |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3332 | std::stringstream error_str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3333 | error_str << "For buffer " << report_data->FormatHandle(buffer_node->buffer()) << " VkDescriptorBufferInfo range is " |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3334 | << buffer_info->range << " which is greater than buffer size (" << buffer_node->createInfo.size |
| 3335 | << ") minus requested offset of " << buffer_info->offset; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 3336 | *error_msg = error_str.str(); |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3337 | return false; |
| 3338 | } |
| 3339 | } |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3340 | // Check buffer update sizes against device limits |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3341 | const auto &limits = phys_dev_props.limits; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3342 | if (VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER == type || VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC == type) { |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 3343 | auto max_ub_range = limits.maxUniformBufferRange; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3344 | if (buffer_info->range != VK_WHOLE_SIZE && buffer_info->range > max_ub_range) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3345 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00332"; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3346 | std::stringstream error_str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3347 | error_str << "For buffer " << report_data->FormatHandle(buffer_node->buffer()) << " VkDescriptorBufferInfo range is " |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3348 | << buffer_info->range << " which is greater than this device's maxUniformBufferRange (" << max_ub_range |
| 3349 | << ")"; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3350 | *error_msg = error_str.str(); |
| 3351 | return false; |
Peter Kohaut | 2794a29 | 2018-07-13 11:13:47 +0200 | [diff] [blame] | 3352 | } else if (buffer_info->range == VK_WHOLE_SIZE && (buffer_node->createInfo.size - buffer_info->offset) > max_ub_range) { |
| 3353 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00332"; |
| 3354 | std::stringstream error_str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3355 | error_str << "For buffer " << report_data->FormatHandle(buffer_node->buffer()) |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3356 | << " VkDescriptorBufferInfo range is VK_WHOLE_SIZE but effective range " |
Peter Kohaut | 18f413d | 2018-07-16 13:15:42 +0200 | [diff] [blame] | 3357 | << "(" << (buffer_node->createInfo.size - buffer_info->offset) << ") is greater than this device's " |
Peter Kohaut | 2794a29 | 2018-07-13 11:13:47 +0200 | [diff] [blame] | 3358 | << "maxUniformBufferRange (" << max_ub_range << ")"; |
| 3359 | *error_msg = error_str.str(); |
| 3360 | return false; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3361 | } |
| 3362 | } else if (VK_DESCRIPTOR_TYPE_STORAGE_BUFFER == type || VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC == type) { |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 3363 | auto max_sb_range = limits.maxStorageBufferRange; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3364 | if (buffer_info->range != VK_WHOLE_SIZE && buffer_info->range > max_sb_range) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3365 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00333"; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3366 | std::stringstream error_str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3367 | error_str << "For buffer " << report_data->FormatHandle(buffer_node->buffer()) << " VkDescriptorBufferInfo range is " |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3368 | << buffer_info->range << " which is greater than this device's maxStorageBufferRange (" << max_sb_range |
| 3369 | << ")"; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3370 | *error_msg = error_str.str(); |
| 3371 | return false; |
Peter Kohaut | 2794a29 | 2018-07-13 11:13:47 +0200 | [diff] [blame] | 3372 | } else if (buffer_info->range == VK_WHOLE_SIZE && (buffer_node->createInfo.size - buffer_info->offset) > max_sb_range) { |
| 3373 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00333"; |
| 3374 | std::stringstream error_str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3375 | error_str << "For buffer " << report_data->FormatHandle(buffer_node->buffer()) |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3376 | << " VkDescriptorBufferInfo range is VK_WHOLE_SIZE but effective range " |
Peter Kohaut | 18f413d | 2018-07-16 13:15:42 +0200 | [diff] [blame] | 3377 | << "(" << (buffer_node->createInfo.size - buffer_info->offset) << ") is greater than this device's " |
Peter Kohaut | 2794a29 | 2018-07-13 11:13:47 +0200 | [diff] [blame] | 3378 | << "maxStorageBufferRange (" << max_sb_range << ")"; |
| 3379 | *error_msg = error_str.str(); |
| 3380 | return false; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 3381 | } |
| 3382 | } |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 3383 | return true; |
| 3384 | } |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 3385 | template <typename T> |
| 3386 | bool CoreChecks::ValidateAccelerationStructureUpdate(T acc_node, const char *func_name, std::string *error_code, |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 3387 | std::string *error_msg) const { |
ziga-lunarg | 5f3a091 | 2021-11-06 19:18:45 +0100 | [diff] [blame] | 3388 | // nullDescriptor feature allows this to be VK_NULL_HANDLE |
| 3389 | if (acc_node) { |
| 3390 | if (ValidateMemoryIsBoundToAccelerationStructure(acc_node, func_name, kVUIDUndefined)) { |
| 3391 | *error_code = kVUIDUndefined; |
| 3392 | *error_msg = "No memory bound to acceleration structure."; |
| 3393 | return false; |
| 3394 | } |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 3395 | } |
| 3396 | return true; |
| 3397 | } |
| 3398 | |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3399 | // Verify that the contents of the update are ok, but don't perform actual update |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 3400 | bool CoreChecks::VerifyCopyUpdateContents(const VkCopyDescriptorSet *update, const DescriptorSet *src_set, |
| 3401 | VkDescriptorType src_type, uint32_t src_index, const DescriptorSet *dst_set, |
| 3402 | VkDescriptorType dst_type, uint32_t dst_index, const char *func_name, |
| 3403 | std::string *error_code, std::string *error_msg) const { |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 3404 | // Note : Repurposing some Write update error codes here as specific details aren't called out for copy updates like they are |
| 3405 | // for write updates |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3406 | using DescriptorClass = cvdescriptorset::DescriptorClass; |
| 3407 | using BufferDescriptor = cvdescriptorset::BufferDescriptor; |
| 3408 | using ImageDescriptor = cvdescriptorset::ImageDescriptor; |
| 3409 | using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor; |
| 3410 | using SamplerDescriptor = cvdescriptorset::SamplerDescriptor; |
| 3411 | using TexelDescriptor = cvdescriptorset::TexelDescriptor; |
| 3412 | |
| 3413 | auto device_data = this; |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 3414 | |
| 3415 | if (dst_type == VK_DESCRIPTOR_TYPE_SAMPLER) { |
| 3416 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
| 3417 | const auto dst_desc = dst_set->GetDescriptorFromGlobalIndex(dst_index + di); |
| 3418 | if (!dst_desc->updated) continue; |
| 3419 | if (dst_desc->IsImmutableSampler()) { |
| 3420 | *error_code = "VUID-VkCopyDescriptorSet-dstBinding-02753"; |
| 3421 | std::stringstream error_str; |
| 3422 | error_str << "Attempted copy update to an immutable sampler descriptor."; |
| 3423 | *error_msg = error_str.str(); |
| 3424 | return false; |
| 3425 | } |
| 3426 | } |
| 3427 | } |
| 3428 | |
| 3429 | switch (src_set->GetDescriptorFromGlobalIndex(src_index)->descriptor_class) { |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3430 | case DescriptorClass::PlainSampler: { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3431 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 3432 | const auto src_desc = src_set->GetDescriptorFromGlobalIndex(src_index + di); |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 3433 | if (!src_desc->updated) continue; |
| 3434 | if (!src_desc->IsImmutableSampler()) { |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 3435 | auto update_sampler = static_cast<const SamplerDescriptor *>(src_desc)->GetSampler(); |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3436 | if (!ValidateSampler(update_sampler)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3437 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00325"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3438 | std::stringstream error_str; |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3439 | error_str << "Attempted copy update to sampler descriptor with invalid sampler: " |
| 3440 | << report_data->FormatHandle(update_sampler) << "."; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3441 | *error_msg = error_str.str(); |
| 3442 | return false; |
| 3443 | } |
| 3444 | } else { |
| 3445 | // TODO : Warn here |
| 3446 | } |
| 3447 | } |
| 3448 | break; |
| 3449 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3450 | case DescriptorClass::ImageSampler: { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3451 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 3452 | const auto src_desc = src_set->GetDescriptorFromGlobalIndex(src_index + di); |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 3453 | if (!src_desc->updated) continue; |
| 3454 | auto img_samp_desc = static_cast<const ImageSamplerDescriptor *>(src_desc); |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3455 | // First validate sampler |
| 3456 | if (!img_samp_desc->IsImmutableSampler()) { |
| 3457 | auto update_sampler = img_samp_desc->GetSampler(); |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3458 | if (!ValidateSampler(update_sampler)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 3459 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00325"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3460 | std::stringstream error_str; |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3461 | error_str << "Attempted copy update to sampler descriptor with invalid sampler: " |
| 3462 | << report_data->FormatHandle(update_sampler) << "."; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3463 | *error_msg = error_str.str(); |
| 3464 | return false; |
| 3465 | } |
| 3466 | } else { |
| 3467 | // TODO : Warn here |
| 3468 | } |
| 3469 | // Validate image |
| 3470 | auto image_view = img_samp_desc->GetImageView(); |
| 3471 | auto image_layout = img_samp_desc->GetImageLayout(); |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3472 | if (image_view) { |
| 3473 | if (!ValidateImageUpdate(image_view, image_layout, src_type, func_name, error_code, error_msg)) { |
| 3474 | std::stringstream error_str; |
| 3475 | error_str << "Attempted copy update to combined image sampler descriptor failed due to: " |
| 3476 | << error_msg->c_str(); |
| 3477 | *error_msg = error_str.str(); |
| 3478 | return false; |
| 3479 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3480 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3481 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3482 | break; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3483 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3484 | case DescriptorClass::Image: { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3485 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 3486 | const auto src_desc = src_set->GetDescriptorFromGlobalIndex(src_index + di); |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 3487 | if (!src_desc->updated) continue; |
| 3488 | auto img_desc = static_cast<const ImageDescriptor *>(src_desc); |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3489 | auto image_view = img_desc->GetImageView(); |
| 3490 | auto image_layout = img_desc->GetImageLayout(); |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3491 | if (image_view) { |
| 3492 | if (!ValidateImageUpdate(image_view, image_layout, src_type, func_name, error_code, error_msg)) { |
| 3493 | std::stringstream error_str; |
| 3494 | error_str << "Attempted copy update to image descriptor failed due to: " << error_msg->c_str(); |
| 3495 | *error_msg = error_str.str(); |
| 3496 | return false; |
| 3497 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3498 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3499 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3500 | break; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3501 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3502 | case DescriptorClass::TexelBuffer: { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3503 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 3504 | const auto src_desc = src_set->GetDescriptorFromGlobalIndex(src_index + di); |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 3505 | if (!src_desc->updated) continue; |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 3506 | auto buffer_view = static_cast<const TexelDescriptor *>(src_desc)->GetBufferView(); |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3507 | if (buffer_view) { |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 3508 | auto bv_state = device_data->Get<BUFFER_VIEW_STATE>(buffer_view); |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3509 | if (!bv_state) { |
| 3510 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02994"; |
| 3511 | std::stringstream error_str; |
| 3512 | error_str << "Attempted copy update to texel buffer descriptor with invalid buffer view: " |
| 3513 | << report_data->FormatHandle(buffer_view); |
| 3514 | *error_msg = error_str.str(); |
| 3515 | return false; |
| 3516 | } |
| 3517 | auto buffer = bv_state->create_info.buffer; |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame] | 3518 | auto buffer_state = Get<BUFFER_STATE>(buffer); |
| 3519 | if (!cvdescriptorset::ValidateBufferUsage(report_data, buffer_state.get(), src_type, error_code, error_msg)) { |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3520 | std::stringstream error_str; |
| 3521 | error_str << "Attempted copy update to texel buffer descriptor failed due to: " << error_msg->c_str(); |
| 3522 | *error_msg = error_str.str(); |
| 3523 | return false; |
| 3524 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3525 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3526 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3527 | break; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3528 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3529 | case DescriptorClass::GeneralBuffer: { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3530 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 3531 | const auto src_desc = src_set->GetDescriptorFromGlobalIndex(src_index + di); |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 3532 | if (!src_desc->updated) continue; |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame] | 3533 | auto buffer_state = static_cast<const BufferDescriptor *>(src_desc)->GetBufferState(); |
| 3534 | if (buffer_state) { |
| 3535 | if (!cvdescriptorset::ValidateBufferUsage(report_data, buffer_state, src_type, error_code, error_msg)) { |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3536 | std::stringstream error_str; |
| 3537 | error_str << "Attempted copy update to buffer descriptor failed due to: " << error_msg->c_str(); |
| 3538 | *error_msg = error_str.str(); |
| 3539 | return false; |
| 3540 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3541 | } |
Tobin Ehlis | cbcf234 | 2016-05-24 13:07:12 -0600 | [diff] [blame] | 3542 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3543 | break; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3544 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3545 | case DescriptorClass::InlineUniform: |
| 3546 | case DescriptorClass::AccelerationStructure: |
Ricardo Garcia | 14f4f76 | 2021-04-13 11:36:12 +0200 | [diff] [blame] | 3547 | case DescriptorClass::Mutable: |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 3548 | break; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 3549 | default: |
| 3550 | assert(0); // We've already verified update type so should never get here |
| 3551 | break; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 3552 | } |
| 3553 | // All checks passed so update contents are good |
| 3554 | return true; |
Chris Forbes | b4e0bdb | 2016-05-31 16:34:40 +1200 | [diff] [blame] | 3555 | } |
Tobin Ehlis | ee47146 | 2016-05-26 11:21:59 -0600 | [diff] [blame] | 3556 | // Verify that the state at allocate time is correct, but don't actually allocate the sets yet |
Mark Lobodzinski | 3840ca0 | 2019-03-08 18:36:11 -0700 | [diff] [blame] | 3557 | bool CoreChecks::ValidateAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info, |
Jeff Bolz | 46c0ea0 | 2019-10-09 13:06:29 -0500 | [diff] [blame] | 3558 | const cvdescriptorset::AllocateDescriptorSetsData *ds_data) const { |
Mark Lobodzinski | bdc3b02 | 2017-04-24 09:11:35 -0600 | [diff] [blame] | 3559 | bool skip = false; |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 3560 | auto pool_state = Get<DESCRIPTOR_POOL_STATE>(p_alloc_info->descriptorPool); |
Tobin Ehlis | ee47146 | 2016-05-26 11:21:59 -0600 | [diff] [blame] | 3561 | |
| 3562 | for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) { |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame] | 3563 | auto layout = Get<cvdescriptorset::DescriptorSetLayout>(p_alloc_info->pSetLayouts[i]); |
John Zulauf | 5562d06 | 2018-01-24 11:54:05 -0700 | [diff] [blame] | 3564 | if (layout) { // nullptr layout indicates no valid layout handle for this device, validated/logged in object_tracker |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 3565 | if (layout->IsPushDescriptor()) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 3566 | skip |= LogError(p_alloc_info->pSetLayouts[i], "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-00308", |
| 3567 | "%s specified at pSetLayouts[%" PRIu32 |
| 3568 | "] in vkAllocateDescriptorSets() was created with invalid flag %s set.", |
| 3569 | report_data->FormatHandle(p_alloc_info->pSetLayouts[i]).c_str(), i, |
| 3570 | "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR"); |
John Zulauf | 5562d06 | 2018-01-24 11:54:05 -0700 | [diff] [blame] | 3571 | } |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 3572 | if (layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT && |
| 3573 | !(pool_state->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT)) { |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 3574 | skip |= LogError( |
| 3575 | device, "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-03044", |
| 3576 | "vkAllocateDescriptorSets(): Descriptor set layout create flags and pool create flags mismatch for index (%d)", |
| 3577 | i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 3578 | } |
ziga-lunarg | 2ab9653 | 2021-07-19 11:06:41 +0200 | [diff] [blame] | 3579 | if (layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE && |
| 3580 | !(pool_state->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE)) { |
| 3581 | skip |= LogError(device, "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-04610", |
| 3582 | "vkAllocateDescriptorSets(): pSetLayouts[%d].flags contain " |
| 3583 | "VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE bit, but the pool was not created " |
| 3584 | "with the VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE bit.", |
| 3585 | i); |
| 3586 | } |
Tobin Ehlis | ee47146 | 2016-05-26 11:21:59 -0600 | [diff] [blame] | 3587 | } |
| 3588 | } |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 3589 | if (!IsExtEnabled(device_extensions.vk_khr_maintenance1)) { |
Mike Schuchardt | 64b5bb7 | 2017-03-21 16:33:26 -0600 | [diff] [blame] | 3590 | // Track number of descriptorSets allowable in this pool |
Jeremy Gebben | 0d9143e | 2022-01-01 12:29:36 -0700 | [diff] [blame] | 3591 | if (pool_state->GetAvailableSets() < p_alloc_info->descriptorSetCount) { |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 3592 | skip |= LogError(pool_state->Handle(), "VUID-VkDescriptorSetAllocateInfo-descriptorSetCount-00306", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 3593 | "vkAllocateDescriptorSets(): Unable to allocate %u descriptorSets from %s" |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 3594 | ". This pool only has %d descriptorSets remaining.", |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 3595 | p_alloc_info->descriptorSetCount, report_data->FormatHandle(pool_state->Handle()).c_str(), |
Jeremy Gebben | 0d9143e | 2022-01-01 12:29:36 -0700 | [diff] [blame] | 3596 | pool_state->GetAvailableSets()); |
Mike Schuchardt | 64b5bb7 | 2017-03-21 16:33:26 -0600 | [diff] [blame] | 3597 | } |
| 3598 | // Determine whether descriptor counts are satisfiable |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 3599 | for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) { |
Jeremy Gebben | 0d9143e | 2022-01-01 12:29:36 -0700 | [diff] [blame] | 3600 | auto available_count = pool_state->GetAvailableCount(it->first); |
Jeff Bolz | 46c0ea0 | 2019-10-09 13:06:29 -0500 | [diff] [blame] | 3601 | |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 3602 | if (ds_data->required_descriptors_by_type.at(it->first) > available_count) { |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 3603 | skip |= LogError(pool_state->Handle(), "VUID-VkDescriptorSetAllocateInfo-descriptorPool-00307", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 3604 | "vkAllocateDescriptorSets(): Unable to allocate %u descriptors of type %s from %s" |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 3605 | ". This pool only has %d descriptors of this type remaining.", |
| 3606 | ds_data->required_descriptors_by_type.at(it->first), |
| 3607 | string_VkDescriptorType(VkDescriptorType(it->first)), |
Jeremy Gebben | 1fbebb8 | 2021-10-27 10:27:27 -0600 | [diff] [blame] | 3608 | report_data->FormatHandle(pool_state->Handle()).c_str(), available_count); |
Mike Schuchardt | 64b5bb7 | 2017-03-21 16:33:26 -0600 | [diff] [blame] | 3609 | } |
Tobin Ehlis | ee47146 | 2016-05-26 11:21:59 -0600 | [diff] [blame] | 3610 | } |
| 3611 | } |
Tobin Ehlis | 5d749ea | 2016-07-18 13:14:01 -0600 | [diff] [blame] | 3612 | |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 3613 | const auto *count_allocate_info = LvlFindInChain<VkDescriptorSetVariableDescriptorCountAllocateInfo>(p_alloc_info->pNext); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 3614 | |
| 3615 | if (count_allocate_info) { |
| 3616 | if (count_allocate_info->descriptorSetCount != 0 && |
| 3617 | count_allocate_info->descriptorSetCount != p_alloc_info->descriptorSetCount) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 3618 | skip |= LogError(device, "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfo-descriptorSetCount-03045", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 3619 | "vkAllocateDescriptorSets(): VkDescriptorSetAllocateInfo::descriptorSetCount (%d) != " |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 3620 | "VkDescriptorSetVariableDescriptorCountAllocateInfo::descriptorSetCount (%d)", |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 3621 | p_alloc_info->descriptorSetCount, count_allocate_info->descriptorSetCount); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 3622 | } |
| 3623 | if (count_allocate_info->descriptorSetCount == p_alloc_info->descriptorSetCount) { |
| 3624 | for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) { |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 3625 | auto layout = Get<cvdescriptorset::DescriptorSetLayout>(p_alloc_info->pSetLayouts[i]); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 3626 | if (count_allocate_info->pDescriptorCounts[i] > layout->GetDescriptorCountFromBinding(layout->GetMaxBinding())) { |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 3627 | skip |= LogError(device, "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfo-pSetLayouts-03046", |
sfricke-samsung | bda4a85 | 2021-03-06 20:58:01 -0800 | [diff] [blame] | 3628 | "vkAllocateDescriptorSets(): pDescriptorCounts[%d] = (%d), binding's descriptorCount = (%d)", |
| 3629 | i, count_allocate_info->pDescriptorCounts[i], |
Mark Lobodzinski | d18de90 | 2020-01-15 12:20:37 -0700 | [diff] [blame] | 3630 | layout->GetDescriptorCountFromBinding(layout->GetMaxBinding())); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 3631 | } |
| 3632 | } |
| 3633 | } |
| 3634 | } |
| 3635 | |
Mark Lobodzinski | bdc3b02 | 2017-04-24 09:11:35 -0600 | [diff] [blame] | 3636 | return skip; |
Tobin Ehlis | ee47146 | 2016-05-26 11:21:59 -0600 | [diff] [blame] | 3637 | } |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 3638 | |
Jeff Bolz | dd4cfa1 | 2019-08-11 20:57:51 -0500 | [diff] [blame] | 3639 | const BindingReqMap &cvdescriptorset::PrefilterBindRequestMap::FilteredMap(const CMD_BUFFER_STATE &cb_state, |
| 3640 | const PIPELINE_STATE &pipeline) { |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 3641 | if (IsManyDescriptors()) { |
Karl Schultz | 7090a05 | 2020-11-10 08:54:21 -0700 | [diff] [blame] | 3642 | filtered_map_.reset(new BindingReqMap); |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 3643 | descriptor_set_.FilterBindingReqs(cb_state, pipeline, orig_map_, filtered_map_.get()); |
| 3644 | return *filtered_map_; |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 3645 | } |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 3646 | return orig_map_; |
Artem Kharytoniuk | 2456f99 | 2018-01-12 14:17:41 +0100 | [diff] [blame] | 3647 | } |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 3648 | |
| 3649 | // Starting at offset descriptor of given binding, parse over update_count |
| 3650 | // descriptor updates and verify that for any binding boundaries that are crossed, the next binding(s) are all consistent |
| 3651 | // Consistency means that their type, stage flags, and whether or not they use immutable samplers matches |
| 3652 | // If so, return true. If not, fill in error_msg and return false |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3653 | bool cvdescriptorset::VerifyUpdateConsistency(debug_report_data *report_data, |
| 3654 | DescriptorSetLayout::ConstBindingIterator current_binding, uint32_t offset, |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 3655 | uint32_t update_count, const char *type, const VkDescriptorSet set, |
| 3656 | std::string *error_msg) { |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 3657 | bool pass = true; |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 3658 | // Verify consecutive bindings match (if needed) |
| 3659 | auto orig_binding = current_binding; |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 3660 | |
| 3661 | while (pass && update_count) { |
| 3662 | // First, it's legal to offset beyond your own binding so handle that case |
| 3663 | if (offset > 0) { |
| 3664 | const auto &index_range = current_binding.GetGlobalIndexRange(); |
| 3665 | // index_range.start + offset is which descriptor is needed to update. If it > index_range.end, it means the descriptor |
| 3666 | // isn't in this binding, maybe in next binding. |
| 3667 | if ((index_range.start + offset) >= index_range.end) { |
| 3668 | // Advance to next binding, decrement offset by binding size |
| 3669 | offset -= current_binding.GetDescriptorCount(); |
| 3670 | ++current_binding; |
| 3671 | // Verify next consecutive binding matches type, stage flags & immutable sampler use and if AtEnd |
| 3672 | if (!orig_binding.IsConsistent(current_binding)) { |
| 3673 | pass = false; |
| 3674 | } |
| 3675 | continue; |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 3676 | } |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 3677 | } |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 3678 | |
| 3679 | update_count -= std::min(update_count, current_binding.GetDescriptorCount() - offset); |
| 3680 | if (update_count) { |
| 3681 | // Starting offset is beyond the current binding. Check consistency, update counters and advance to the next binding, |
| 3682 | // looking for the start point. All bindings (even those skipped) must be consistent with the update and with the |
| 3683 | // original binding. |
| 3684 | offset = 0; |
| 3685 | ++current_binding; |
| 3686 | // Verify next consecutive binding matches type, stage flags & immutable sampler use and if AtEnd |
| 3687 | if (!orig_binding.IsConsistent(current_binding)) { |
| 3688 | pass = false; |
| 3689 | } |
| 3690 | } |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 3691 | } |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 3692 | |
| 3693 | if (!pass) { |
| 3694 | std::stringstream error_str; |
| 3695 | error_str << "Attempting " << type; |
| 3696 | if (current_binding.Layout()->IsPushDescriptor()) { |
| 3697 | error_str << " push descriptors"; |
| 3698 | } else { |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3699 | error_str << " descriptor set " << report_data->FormatHandle(set); |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 3700 | } |
| 3701 | error_str << " binding #" << orig_binding.Binding() << " with #" << update_count |
| 3702 | << " descriptors being updated but this update oversteps the bounds of this binding and the next binding is " |
sfricke-samsung | 5de3488 | 2021-04-15 22:33:23 -0700 | [diff] [blame] | 3703 | "not consistent with current binding"; |
| 3704 | |
| 3705 | // Get what was not consistent in IsConsistent() as a more detailed error message |
| 3706 | const auto *binding_ci = orig_binding.GetDescriptorSetLayoutBindingPtr(); |
| 3707 | const auto *other_binding_ci = current_binding.GetDescriptorSetLayoutBindingPtr(); |
| 3708 | if (binding_ci == nullptr || other_binding_ci == nullptr) { |
| 3709 | error_str << " (No two valid DescriptorSetLayoutBinding to compare)"; |
| 3710 | } else if (binding_ci->descriptorType != other_binding_ci->descriptorType) { |
| 3711 | error_str << " (" << string_VkDescriptorType(binding_ci->descriptorType) |
| 3712 | << " != " << string_VkDescriptorType(other_binding_ci->descriptorType) << ")"; |
| 3713 | } else if (binding_ci->stageFlags != other_binding_ci->stageFlags) { |
| 3714 | error_str << " (" << string_VkShaderStageFlags(binding_ci->stageFlags) |
| 3715 | << " != " << string_VkShaderStageFlags(other_binding_ci->stageFlags) << ")"; |
| 3716 | } else if (!hash_util::similar_for_nullity(binding_ci->pImmutableSamplers, other_binding_ci->pImmutableSamplers)) { |
| 3717 | error_str << " (pImmutableSamplers don't match)"; |
| 3718 | } else if (orig_binding.GetDescriptorBindingFlags() != current_binding.GetDescriptorBindingFlags()) { |
| 3719 | error_str << " (" << string_VkDescriptorBindingFlags(orig_binding.GetDescriptorBindingFlags()) |
| 3720 | << " != " << string_VkDescriptorBindingFlags(current_binding.GetDescriptorBindingFlags()) << ")"; |
| 3721 | } |
| 3722 | |
| 3723 | error_str << " so this update is invalid"; |
locke-lunarg | e46b778 | 2019-09-10 01:44:20 -0600 | [diff] [blame] | 3724 | *error_msg = error_str.str(); |
| 3725 | } |
| 3726 | return pass; |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 3727 | } |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3728 | |
| 3729 | // Validate the state for a given write update but don't actually perform the update |
| 3730 | // If an error would occur for this update, return false and fill in details in error_msg string |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3731 | bool CoreChecks::ValidateWriteUpdate(const DescriptorSet *dest_set, const VkWriteDescriptorSet *update, const char *func_name, |
ziga-lunarg | 6c46b24 | 2021-09-13 18:33:37 +0200 | [diff] [blame] | 3732 | std::string *error_code, std::string *error_msg, bool push) const { |
Jeremy Gebben | 9d0dda3 | 2022-01-28 10:16:56 -0700 | [diff] [blame] | 3733 | const auto *dest_layout = dest_set->GetLayout().get(); |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3734 | |
| 3735 | // Verify dst layout still valid |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 3736 | if (dest_layout->Destroyed()) { |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3737 | *error_code = "VUID-VkWriteDescriptorSet-dstSet-00320"; |
Mark Lobodzinski | 23e395e | 2020-04-09 10:17:31 -0600 | [diff] [blame] | 3738 | std::ostringstream str; |
| 3739 | str << "Cannot call " << func_name << " to perform write update on " << dest_set->StringifySetAndLayout() |
| 3740 | << " which has been destroyed"; |
| 3741 | *error_msg = str.str(); |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3742 | return false; |
| 3743 | } |
| 3744 | // Verify dst binding exists |
| 3745 | if (!dest_layout->HasBinding(update->dstBinding)) { |
| 3746 | *error_code = "VUID-VkWriteDescriptorSet-dstBinding-00315"; |
| 3747 | std::stringstream error_str; |
| 3748 | error_str << dest_set->StringifySetAndLayout() << " does not have binding " << update->dstBinding; |
| 3749 | *error_msg = error_str.str(); |
| 3750 | return false; |
| 3751 | } |
| 3752 | |
Jeff Bolz | 6aad174 | 2019-10-16 11:10:09 -0500 | [diff] [blame] | 3753 | DescriptorSetLayout::ConstBindingIterator dest(dest_layout, update->dstBinding); |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3754 | // Make sure binding isn't empty |
| 3755 | if (0 == dest.GetDescriptorCount()) { |
| 3756 | *error_code = "VUID-VkWriteDescriptorSet-dstBinding-00316"; |
| 3757 | std::stringstream error_str; |
| 3758 | error_str << dest_set->StringifySetAndLayout() << " cannot updated binding " << update->dstBinding |
| 3759 | << " that has 0 descriptors"; |
| 3760 | *error_msg = error_str.str(); |
| 3761 | return false; |
| 3762 | } |
| 3763 | |
| 3764 | // Verify idle ds |
Jeremy Gebben | 9efe1cf | 2021-05-15 20:05:09 -0600 | [diff] [blame] | 3765 | if (dest_set->InUse() && !(dest.GetDescriptorBindingFlags() & (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT | |
Mike Schuchardt | 2df0891 | 2020-12-15 16:28:09 -0800 | [diff] [blame] | 3766 | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT))) { |
aitor-lunarg | 9f85900 | 2022-01-27 19:33:35 +0100 | [diff] [blame] | 3767 | *error_code = "VUID-vkUpdateDescriptorSets-None-03047"; |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3768 | std::stringstream error_str; |
| 3769 | error_str << "Cannot call " << func_name << " to perform write update on " << dest_set->StringifySetAndLayout() |
| 3770 | << " that is in use by a command buffer"; |
| 3771 | *error_msg = error_str.str(); |
| 3772 | return false; |
| 3773 | } |
| 3774 | // We know that binding is valid, verify update and do update on each descriptor |
| 3775 | auto start_idx = dest.GetGlobalIndexRange().start + update->dstArrayElement; |
| 3776 | auto type = dest.GetType(); |
Tony-LunarG | f563b36 | 2021-03-18 16:13:18 -0600 | [diff] [blame] | 3777 | if ((type != VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) && (type != update->descriptorType)) { |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3778 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00319"; |
| 3779 | std::stringstream error_str; |
| 3780 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding |
| 3781 | << " with type " << string_VkDescriptorType(type) << " but update type is " |
| 3782 | << string_VkDescriptorType(update->descriptorType); |
| 3783 | *error_msg = error_str.str(); |
| 3784 | return false; |
| 3785 | } |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3786 | if (type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) { |
| 3787 | if ((update->dstArrayElement % 4) != 0) { |
| 3788 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02219"; |
| 3789 | std::stringstream error_str; |
| 3790 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding |
| 3791 | << " with " |
| 3792 | << "dstArrayElement " << update->dstArrayElement << " not a multiple of 4"; |
| 3793 | *error_msg = error_str.str(); |
| 3794 | return false; |
| 3795 | } |
| 3796 | if ((update->descriptorCount % 4) != 0) { |
| 3797 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02220"; |
| 3798 | std::stringstream error_str; |
| 3799 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding |
| 3800 | << " with " |
| 3801 | << "descriptorCount " << update->descriptorCount << " not a multiple of 4"; |
| 3802 | *error_msg = error_str.str(); |
| 3803 | return false; |
| 3804 | } |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 3805 | const auto *write_inline_info = LvlFindInChain<VkWriteDescriptorSetInlineUniformBlockEXT>(update->pNext); |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3806 | if (!write_inline_info || write_inline_info->dataSize != update->descriptorCount) { |
| 3807 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02221"; |
| 3808 | std::stringstream error_str; |
| 3809 | if (!write_inline_info) { |
| 3810 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" |
| 3811 | << update->dstBinding << " with " |
Tony-LunarG | 836ea5b | 2021-12-07 10:07:17 -0700 | [diff] [blame] | 3812 | << "VkWriteDescriptorSetInlineUniformBlock missing"; |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3813 | } else { |
| 3814 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" |
| 3815 | << update->dstBinding << " with " |
Tony-LunarG | 836ea5b | 2021-12-07 10:07:17 -0700 | [diff] [blame] | 3816 | << "VkWriteDescriptorSetInlineUniformBlock dataSize " << write_inline_info->dataSize |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3817 | << " not equal to " |
| 3818 | << "VkWriteDescriptorSet descriptorCount " << update->descriptorCount; |
| 3819 | } |
| 3820 | *error_msg = error_str.str(); |
| 3821 | return false; |
| 3822 | } |
| 3823 | // This error is probably unreachable due to the previous two errors |
| 3824 | if (write_inline_info && (write_inline_info->dataSize % 4) != 0) { |
Tony-LunarG | 836ea5b | 2021-12-07 10:07:17 -0700 | [diff] [blame] | 3825 | *error_code = "VUID-VkWriteDescriptorSetInlineUniformBlock-dataSize-02222"; |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3826 | std::stringstream error_str; |
| 3827 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding |
| 3828 | << " with " |
Tony-LunarG | 836ea5b | 2021-12-07 10:07:17 -0700 | [diff] [blame] | 3829 | << "VkWriteDescriptorSetInlineUniformBlock dataSize " << write_inline_info->dataSize |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3830 | << " not a multiple of 4"; |
| 3831 | *error_msg = error_str.str(); |
| 3832 | return false; |
| 3833 | } |
| 3834 | } |
sfricke-samsung | 941d48b | 2020-02-10 00:20:01 -0800 | [diff] [blame] | 3835 | // Verify all bindings update share identical properties across all items |
| 3836 | if (update->descriptorCount > 0) { |
| 3837 | // Save first binding information and error if something different is found |
| 3838 | DescriptorSetLayout::ConstBindingIterator current_binding(dest_layout, update->dstBinding); |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 3839 | VkShaderStageFlags stage_flags = current_binding.GetStageFlags(); |
| 3840 | VkDescriptorType descriptor_type = current_binding.GetType(); |
| 3841 | bool immutable_samplers = (current_binding.GetImmutableSamplerPtr() == nullptr); |
| 3842 | uint32_t dst_array_element = update->dstArrayElement; |
sfricke-samsung | 941d48b | 2020-02-10 00:20:01 -0800 | [diff] [blame] | 3843 | |
Jeff Bolz | 9198e88 | 2020-03-18 13:03:30 -0500 | [diff] [blame] | 3844 | for (uint32_t i = 0; i < update->descriptorCount;) { |
sfricke-samsung | 941d48b | 2020-02-10 00:20:01 -0800 | [diff] [blame] | 3845 | if (current_binding.AtEnd() == true) { |
| 3846 | break; // prevents setting error here if bindings don't exist |
| 3847 | } |
| 3848 | |
Quentin Huot-Marchand | 98d84dd | 2021-06-24 09:54:58 +0200 | [diff] [blame] | 3849 | // All consecutive bindings updated, except those with a descriptorCount of zero, must have identical descType and stageFlags |
| 3850 | if(current_binding.GetDescriptorCount() > 0) { |
| 3851 | // Check for consistent stageFlags and descriptorType |
| 3852 | if ((current_binding.GetStageFlags() != stage_flags) || (current_binding.GetType() != descriptor_type)) { |
| 3853 | *error_code = "VUID-VkWriteDescriptorSet-descriptorCount-00317"; |
| 3854 | std::stringstream error_str; |
| 3855 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding index #" |
| 3856 | << current_binding.GetIndex() << " (" << i << " from dstBinding offset)" |
| 3857 | << " with a different stageFlag and/or descriptorType from previous bindings." |
| 3858 | << " All bindings must have consecutive stageFlag and/or descriptorType across a VkWriteDescriptorSet"; |
| 3859 | *error_msg = error_str.str(); |
| 3860 | return false; |
| 3861 | } |
| 3862 | // Check if all immutableSamplers or not |
| 3863 | if ((current_binding.GetImmutableSamplerPtr() == nullptr) != immutable_samplers) { |
| 3864 | *error_code = "VUID-VkWriteDescriptorSet-descriptorCount-00318"; |
| 3865 | std::stringstream error_str; |
| 3866 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding index #" |
| 3867 | << current_binding.GetIndex() << " (" << i << " from dstBinding offset)" |
| 3868 | << " with a different usage of immutable samplers from previous bindings." |
| 3869 | << " All bindings must have all or none usage of immutable samplers across a VkWriteDescriptorSet"; |
| 3870 | *error_msg = error_str.str(); |
| 3871 | return false; |
| 3872 | } |
sfricke-samsung | 941d48b | 2020-02-10 00:20:01 -0800 | [diff] [blame] | 3873 | } |
Jeff Bolz | 9198e88 | 2020-03-18 13:03:30 -0500 | [diff] [blame] | 3874 | |
| 3875 | // Skip the remaining descriptors for this binding, and move to the next binding |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 3876 | i += (current_binding.GetDescriptorCount() - dst_array_element); |
| 3877 | dst_array_element = 0; |
sfricke-samsung | 941d48b | 2020-02-10 00:20:01 -0800 | [diff] [blame] | 3878 | ++current_binding; |
| 3879 | } |
| 3880 | } |
| 3881 | |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3882 | // Verify consecutive bindings match (if needed) |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 3883 | if (!VerifyUpdateConsistency(report_data, DescriptorSetLayout::ConstBindingIterator(dest_layout, update->dstBinding), |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3884 | update->dstArrayElement, update->descriptorCount, "write update to", dest_set->GetSet(), |
| 3885 | error_msg)) { |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3886 | *error_code = "VUID-VkWriteDescriptorSet-dstArrayElement-00321"; |
| 3887 | return false; |
| 3888 | } |
Tony-LunarG | 1f79c95 | 2020-10-27 15:55:51 -0600 | [diff] [blame] | 3889 | // Verify write to variable descriptor |
| 3890 | if (dest_set->IsVariableDescriptorCount(update->dstBinding)) { |
| 3891 | if ((update->dstArrayElement + update->descriptorCount) > dest_set->GetVariableDescriptorCount()) { |
| 3892 | std::stringstream error_str; |
| 3893 | *error_code = "VUID-VkWriteDescriptorSet-dstArrayElement-00321"; |
| 3894 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding index #" |
| 3895 | << update->dstBinding << " array element " << update->dstArrayElement << " with " << update->descriptorCount |
| 3896 | << " writes but variable descriptor size is " << dest_set->GetVariableDescriptorCount(); |
| 3897 | *error_msg = error_str.str(); |
| 3898 | return false; |
| 3899 | } |
| 3900 | } |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3901 | // Update is within bounds and consistent so last step is to validate update contents |
ziga-lunarg | 6c46b24 | 2021-09-13 18:33:37 +0200 | [diff] [blame] | 3902 | if (!VerifyWriteUpdateContents(dest_set, update, start_idx, func_name, error_code, error_msg, push)) { |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3903 | std::stringstream error_str; |
| 3904 | error_str << "Write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding |
| 3905 | << " failed with error message: " << error_msg->c_str(); |
| 3906 | *error_msg = error_str.str(); |
| 3907 | return false; |
| 3908 | } |
ziga-lunarg | d67b5f5 | 2021-10-16 23:52:59 +0200 | [diff] [blame] | 3909 | const auto orig_binding = DescriptorSetLayout::ConstBindingIterator(dest_set->GetLayout().get(), update->dstBinding); |
| 3910 | if (!orig_binding.AtEnd() && orig_binding.GetType() == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) { |
| 3911 | // Check if the new descriptor descriptor type is in the list of allowed mutable types for this binding |
| 3912 | if (!orig_binding.Layout()->IsTypeMutable(update->descriptorType, update->dstBinding)) { |
| 3913 | *error_code = "VUID-VkWriteDescriptorSet-dstSet-04611"; |
| 3914 | std::stringstream error_str; |
| 3915 | error_str << "Write update type is " << string_VkDescriptorType(update->descriptorType) |
| 3916 | << ", but descriptor set layout binding was created with type VK_DESCRIPTOR_TYPE_MUTABLE_VALVE and used type " |
| 3917 | "is not in VkMutableDescriptorTypeListVALVE::pDescriptorTypes for this binding."; |
| 3918 | *error_msg = error_str.str(); |
| 3919 | return false; |
| 3920 | } |
| 3921 | } |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 3922 | // All checks passed, update is clean |
| 3923 | return true; |
| 3924 | } |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3925 | |
| 3926 | // Verify that the contents of the update are ok, but don't perform actual update |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3927 | bool CoreChecks::VerifyWriteUpdateContents(const DescriptorSet *dest_set, const VkWriteDescriptorSet *update, const uint32_t index, |
ziga-lunarg | 6c46b24 | 2021-09-13 18:33:37 +0200 | [diff] [blame] | 3928 | const char *func_name, std::string *error_code, std::string *error_msg, |
| 3929 | bool push) const { |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3930 | using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor; |
Nathaniel Cesario | ff52151 | 2020-12-11 16:00:26 -0700 | [diff] [blame] | 3931 | using Descriptor = cvdescriptorset::Descriptor; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 3932 | |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3933 | switch (update->descriptorType) { |
| 3934 | case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: { |
| 3935 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
| 3936 | // Validate image |
| 3937 | auto image_view = update->pImageInfo[di].imageView; |
| 3938 | auto image_layout = update->pImageInfo[di].imageLayout; |
Mark Lobodzinski | 3ca937b | 2020-02-14 14:56:06 -0700 | [diff] [blame] | 3939 | auto sampler = update->pImageInfo[di].sampler; |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 3940 | auto iv_state = Get<IMAGE_VIEW_STATE>(image_view); |
Nathaniel Cesario | ff52151 | 2020-12-11 16:00:26 -0700 | [diff] [blame] | 3941 | const ImageSamplerDescriptor *desc = |
| 3942 | (const ImageSamplerDescriptor *)dest_set->GetDescriptorFromGlobalIndex(index + di); |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3943 | if (image_view) { |
Jeremy Gebben | 9d0dda3 | 2022-01-28 10:16:56 -0700 | [diff] [blame] | 3944 | const auto *image_state = iv_state->image_state.get(); |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3945 | if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, func_name, error_code, error_msg)) { |
| 3946 | std::stringstream error_str; |
| 3947 | error_str << "Attempted write update to combined image sampler descriptor failed due to: " |
| 3948 | << error_msg->c_str(); |
| 3949 | *error_msg = error_str.str(); |
| 3950 | return false; |
| 3951 | } |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 3952 | if (IsExtEnabled(device_extensions.vk_khr_sampler_ycbcr_conversion)) { |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3953 | if (desc->IsImmutableSampler()) { |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 3954 | auto sampler_state = Get<SAMPLER_STATE>(desc->GetSampler()); |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3955 | if (iv_state && sampler_state) { |
| 3956 | if (iv_state->samplerConversion != sampler_state->samplerConversion) { |
| 3957 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-01948"; |
| 3958 | std::stringstream error_str; |
| 3959 | error_str |
| 3960 | << "Attempted write update to combined image sampler and image view and sampler ycbcr " |
| 3961 | "conversions are not identical, sampler: " |
| 3962 | << report_data->FormatHandle(desc->GetSampler()) |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3963 | << " image view: " << report_data->FormatHandle(iv_state->image_view()) << "."; |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3964 | *error_msg = error_str.str(); |
| 3965 | return false; |
| 3966 | } |
| 3967 | } |
| 3968 | } else { |
| 3969 | if (iv_state && (iv_state->samplerConversion != VK_NULL_HANDLE)) { |
| 3970 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02738"; |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3971 | std::stringstream error_str; |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3972 | error_str << "Because dstSet (" << report_data->FormatHandle(update->dstSet) |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3973 | << ") is bound to image view (" << report_data->FormatHandle(iv_state->image_view()) |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3974 | << ") that includes a YCBCR conversion, it must have been allocated with a layout that " |
| 3975 | "includes an immutable sampler."; |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3976 | *error_msg = error_str.str(); |
| 3977 | return false; |
| 3978 | } |
| 3979 | } |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3980 | } |
John Bauman | da8abff | 2020-10-19 21:25:21 +0000 | [diff] [blame] | 3981 | // If there is an immutable sampler then |sampler| isn't used, so the following VU does not apply. |
| 3982 | if (sampler && !desc->IsImmutableSampler() && FormatIsMultiplane(image_state->createInfo.format)) { |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3983 | // multiplane formats must be created with mutable format bit |
| 3984 | if (0 == (image_state->createInfo.flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT)) { |
| 3985 | *error_code = "VUID-VkDescriptorImageInfo-sampler-01564"; |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3986 | std::stringstream error_str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 3987 | error_str << "image " << report_data->FormatHandle(image_state->image()) |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3988 | << " combined image sampler is a multi-planar " |
| 3989 | << "format and was not was not created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT"; |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 3990 | *error_msg = error_str.str(); |
| 3991 | return false; |
| 3992 | } |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 3993 | // image view need aspect mask for only the planes supported of format |
| 3994 | VkImageAspectFlags legal_aspect_flags = (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT); |
| 3995 | legal_aspect_flags |= |
| 3996 | (FormatPlaneCount(image_state->createInfo.format) == 3) ? VK_IMAGE_ASPECT_PLANE_2_BIT : 0; |
| 3997 | if (0 != (iv_state->create_info.subresourceRange.aspectMask & (~legal_aspect_flags))) { |
| 3998 | *error_code = "VUID-VkDescriptorImageInfo-sampler-01564"; |
| 3999 | std::stringstream error_str; |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 4000 | error_str << "image " << report_data->FormatHandle(image_state->image()) |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 4001 | << " combined image sampler is a multi-planar " |
Jeremy Gebben | 14b0d1a | 2021-05-15 20:15:41 -0600 | [diff] [blame] | 4002 | << "format and " << report_data->FormatHandle(iv_state->image_view()) |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 4003 | << " aspectMask must only include " << string_VkImageAspectFlags(legal_aspect_flags); |
| 4004 | *error_msg = error_str.str(); |
| 4005 | return false; |
| 4006 | } |
sfricke-samsung | 27e5d5a | 2020-01-07 21:07:08 -0800 | [diff] [blame] | 4007 | } |
Nathaniel Cesario | 23afadd | 2020-11-17 12:51:45 -0700 | [diff] [blame] | 4008 | |
| 4009 | // Verify portability |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 4010 | auto sampler_state = Get<SAMPLER_STATE>(sampler); |
Nathaniel Cesario | 23afadd | 2020-11-17 12:51:45 -0700 | [diff] [blame] | 4011 | if (sampler_state) { |
sfricke-samsung | 45996a4 | 2021-09-16 13:45:27 -0700 | [diff] [blame] | 4012 | if (IsExtEnabled(device_extensions.vk_khr_portability_subset)) { |
Nathaniel Cesario | 23afadd | 2020-11-17 12:51:45 -0700 | [diff] [blame] | 4013 | if ((VK_FALSE == enabled_features.portability_subset_features.mutableComparisonSamplers) && |
| 4014 | (VK_FALSE != sampler_state->createInfo.compareEnable)) { |
| 4015 | LogError(device, "VUID-VkDescriptorImageInfo-mutableComparisonSamplers-04450", |
| 4016 | "%s (portability error): sampler comparison not available.", func_name); |
| 4017 | } |
| 4018 | } |
| 4019 | } |
sfricke-samsung | 27e5d5a | 2020-01-07 21:07:08 -0800 | [diff] [blame] | 4020 | } |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 4021 | } |
| 4022 | } |
Mark Lobodzinski | ac72777 | 2020-01-08 10:47:30 -0700 | [diff] [blame] | 4023 | // Fall through |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 4024 | case VK_DESCRIPTOR_TYPE_SAMPLER: { |
| 4025 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
Nathaniel Cesario | ce9b481 | 2020-12-17 08:55:28 -0700 | [diff] [blame] | 4026 | const auto *desc = static_cast<const Descriptor *>(dest_set->GetDescriptorFromGlobalIndex(index + di)); |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 4027 | if (!desc->IsImmutableSampler()) { |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 4028 | if (!ValidateSampler(update->pImageInfo[di].sampler)) { |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 4029 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00325"; |
| 4030 | std::stringstream error_str; |
| 4031 | error_str << "Attempted write update to sampler descriptor with invalid sampler: " |
Mark Lobodzinski | db35b8b | 2020-04-09 08:46:59 -0600 | [diff] [blame] | 4032 | << report_data->FormatHandle(update->pImageInfo[di].sampler) << "."; |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 4033 | *error_msg = error_str.str(); |
| 4034 | return false; |
| 4035 | } |
ziga-lunarg | 6c46b24 | 2021-09-13 18:33:37 +0200 | [diff] [blame] | 4036 | } else if (update->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER && !push) { |
Mark Lobodzinski | f4ed6c1 | 2020-01-03 11:21:58 -0700 | [diff] [blame] | 4037 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02752"; |
| 4038 | std::stringstream error_str; |
| 4039 | error_str << "Attempted write update to an immutable sampler descriptor."; |
| 4040 | *error_msg = error_str.str(); |
| 4041 | return false; |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 4042 | } |
| 4043 | } |
| 4044 | break; |
| 4045 | } |
| 4046 | case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: |
| 4047 | case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: |
| 4048 | case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: { |
| 4049 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
| 4050 | auto image_view = update->pImageInfo[di].imageView; |
| 4051 | auto image_layout = update->pImageInfo[di].imageLayout; |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 4052 | if (image_view) { |
| 4053 | if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, func_name, error_code, error_msg)) { |
| 4054 | std::stringstream error_str; |
| 4055 | error_str << "Attempted write update to image descriptor failed due to: " << error_msg->c_str(); |
| 4056 | *error_msg = error_str.str(); |
| 4057 | return false; |
| 4058 | } |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 4059 | } |
| 4060 | } |
| 4061 | break; |
| 4062 | } |
| 4063 | case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: |
| 4064 | case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: { |
| 4065 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
| 4066 | auto buffer_view = update->pTexelBufferView[di]; |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 4067 | if (buffer_view) { |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 4068 | auto bv_state = Get<BUFFER_VIEW_STATE>(buffer_view); |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 4069 | if (!bv_state) { |
| 4070 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02994"; |
| 4071 | std::stringstream error_str; |
| 4072 | error_str << "Attempted write update to texel buffer descriptor with invalid buffer view: " |
| 4073 | << report_data->FormatHandle(buffer_view); |
| 4074 | *error_msg = error_str.str(); |
| 4075 | return false; |
| 4076 | } |
| 4077 | auto buffer = bv_state->create_info.buffer; |
Jeremy Gebben | b20a824 | 2021-11-05 15:14:43 -0600 | [diff] [blame] | 4078 | auto buffer_state = Get<BUFFER_STATE>(buffer); |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 4079 | // Verify that buffer underlying the view hasn't been destroyed prematurely |
| 4080 | if (!buffer_state) { |
| 4081 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02994"; |
| 4082 | std::stringstream error_str; |
| 4083 | error_str << "Attempted write update to texel buffer descriptor failed because underlying buffer (" |
| 4084 | << report_data->FormatHandle(buffer) << ") has been destroyed: " << error_msg->c_str(); |
| 4085 | *error_msg = error_str.str(); |
| 4086 | return false; |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame] | 4087 | } else if (!cvdescriptorset::ValidateBufferUsage(report_data, buffer_state.get(), update->descriptorType, |
| 4088 | error_code, error_msg)) { |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 4089 | std::stringstream error_str; |
| 4090 | error_str << "Attempted write update to texel buffer descriptor failed due to: " << error_msg->c_str(); |
| 4091 | *error_msg = error_str.str(); |
| 4092 | return false; |
| 4093 | } |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 4094 | } |
| 4095 | } |
| 4096 | break; |
| 4097 | } |
| 4098 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: |
| 4099 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: |
| 4100 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: |
| 4101 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: { |
| 4102 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
Jeff Bolz | 165818a | 2020-05-08 11:19:03 -0500 | [diff] [blame] | 4103 | if (update->pBufferInfo[di].buffer) { |
| 4104 | if (!ValidateBufferUpdate(update->pBufferInfo + di, update->descriptorType, func_name, error_code, error_msg)) { |
| 4105 | std::stringstream error_str; |
| 4106 | error_str << "Attempted write update to buffer descriptor failed due to: " << error_msg->c_str(); |
| 4107 | *error_msg = error_str.str(); |
| 4108 | return false; |
| 4109 | } |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 4110 | } |
| 4111 | } |
| 4112 | break; |
| 4113 | } |
| 4114 | case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: |
| 4115 | break; |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 4116 | case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV: { |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 4117 | const auto *acc_info = LvlFindInChain<VkWriteDescriptorSetAccelerationStructureNV>(update->pNext); |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 4118 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
Jeremy Gebben | 9f53710 | 2021-10-05 16:37:12 -0600 | [diff] [blame] | 4119 | auto as_state = Get<ACCELERATION_STRUCTURE_STATE>(acc_info->pAccelerationStructures[di]); |
| 4120 | if (!ValidateAccelerationStructureUpdate(as_state.get(), func_name, error_code, error_msg)) { |
Jeff Bolz | 95176d0 | 2020-04-01 00:36:16 -0500 | [diff] [blame] | 4121 | std::stringstream error_str; |
| 4122 | error_str << "Attempted write update to acceleration structure descriptor failed due to: " |
| 4123 | << error_msg->c_str(); |
| 4124 | *error_msg = error_str.str(); |
| 4125 | return false; |
| 4126 | } |
| 4127 | } |
| 4128 | |
| 4129 | } break; |
sourav parmar | cd5fb18 | 2020-07-17 12:58:44 -0700 | [diff] [blame] | 4130 | // KHR acceleration structures don't require memory to be bound manually to them. |
| 4131 | case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: |
| 4132 | break; |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 4133 | default: |
| 4134 | assert(0); // We've already verified update type so should never get here |
| 4135 | break; |
| 4136 | } |
| 4137 | // All checks passed so update contents are good |
| 4138 | return true; |
| 4139 | } |