John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 1 | /* Copyright (c) 2015-2019 The Khronos Group Inc. |
| 2 | * Copyright (c) 2015-2019 Valve Corporation |
| 3 | * Copyright (c) 2015-2019 LunarG, Inc. |
| 4 | * Copyright (C) 2015-2019 Google Inc. |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 5 | * |
| 6 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 7 | * you may not use this file except in compliance with the License. |
| 8 | * You may obtain a copy of the License at |
| 9 | * |
| 10 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 11 | * |
| 12 | * Unless required by applicable law or agreed to in writing, software |
| 13 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 15 | * See the License for the specific language governing permissions and |
| 16 | * limitations under the License. |
| 17 | * |
| 18 | * Author: Tobin Ehlis <tobine@google.com> |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 19 | * John Zulauf <jzulauf@lunarg.com> |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 20 | */ |
| 21 | |
Tobin Ehlis | f922ef8 | 2016-11-30 10:19:14 -0700 | [diff] [blame] | 22 | // Allow use of STL min and max functions in Windows |
| 23 | #define NOMINMAX |
| 24 | |
Mark Lobodzinski | b56bbb9 | 2019-02-18 11:49:59 -0700 | [diff] [blame] | 25 | #include "chassis.h" |
Mark Lobodzinski | 76d7666 | 2019-02-14 14:38:21 -0700 | [diff] [blame] | 26 | #include "core_validation_error_enums.h" |
| 27 | #include "core_validation.h" |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 28 | #include "descriptor_sets.h" |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 29 | #include "hash_vk_types.h" |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 30 | #include "vk_enum_string_helper.h" |
| 31 | #include "vk_safe_struct.h" |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 32 | #include "vk_typemap_helper.h" |
Tobin Ehlis | c826645 | 2017-04-07 12:20:30 -0600 | [diff] [blame] | 33 | #include "buffer_validation.h" |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 34 | #include <sstream> |
Mark Lobodzinski | 2eee5d8 | 2016-12-02 15:33:18 -0700 | [diff] [blame] | 35 | #include <algorithm> |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 36 | #include <array> |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 37 | #include <memory> |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 38 | |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 39 | // ExtendedBinding collects a VkDescriptorSetLayoutBinding and any extended |
| 40 | // state that comes from a different array/structure so they can stay together |
| 41 | // while being sorted by binding number. |
| 42 | struct ExtendedBinding { |
| 43 | ExtendedBinding(const VkDescriptorSetLayoutBinding *l, VkDescriptorBindingFlagsEXT f) : layout_binding(l), binding_flags(f) {} |
| 44 | |
| 45 | const VkDescriptorSetLayoutBinding *layout_binding; |
| 46 | VkDescriptorBindingFlagsEXT binding_flags; |
| 47 | }; |
| 48 | |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 49 | struct BindingNumCmp { |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 50 | bool operator()(const ExtendedBinding &a, const ExtendedBinding &b) const { |
| 51 | return a.layout_binding->binding < b.layout_binding->binding; |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 52 | } |
| 53 | }; |
| 54 | |
John Zulauf | 613fd98 | 2019-06-04 15:14:41 -0600 | [diff] [blame] | 55 | using DescriptorSet = cvdescriptorset::DescriptorSet; |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 56 | using DescriptorSetLayout = cvdescriptorset::DescriptorSetLayout; |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 57 | using DescriptorSetLayoutDef = cvdescriptorset::DescriptorSetLayoutDef; |
| 58 | using DescriptorSetLayoutId = cvdescriptorset::DescriptorSetLayoutId; |
| 59 | |
John Zulauf | 34ebf27 | 2018-02-16 13:08:47 -0700 | [diff] [blame] | 60 | // Canonical dictionary of DescriptorSetLayoutDef (without any handle/device specific information) |
| 61 | cvdescriptorset::DescriptorSetLayoutDict descriptor_set_layout_dict; |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 62 | |
Shannon McPherson | c06c33d | 2018-06-28 17:21:12 -0600 | [diff] [blame] | 63 | DescriptorSetLayoutId GetCanonicalId(const VkDescriptorSetLayoutCreateInfo *p_create_info) { |
John Zulauf | 34ebf27 | 2018-02-16 13:08:47 -0700 | [diff] [blame] | 64 | return descriptor_set_layout_dict.look_up(DescriptorSetLayoutDef(p_create_info)); |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 65 | } |
John Zulauf | 34ebf27 | 2018-02-16 13:08:47 -0700 | [diff] [blame] | 66 | |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 67 | // Construct DescriptorSetLayout instance from given create info |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 68 | // Proactively reserve and resize as possible, as the reallocation was visible in profiling |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 69 | cvdescriptorset::DescriptorSetLayoutDef::DescriptorSetLayoutDef(const VkDescriptorSetLayoutCreateInfo *p_create_info) |
| 70 | : flags_(p_create_info->flags), binding_count_(0), descriptor_count_(0), dynamic_descriptor_count_(0) { |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 71 | const auto *flags_create_info = lvl_find_in_chain<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT>(p_create_info->pNext); |
| 72 | |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 73 | binding_type_stats_ = {0, 0, 0}; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 74 | std::set<ExtendedBinding, BindingNumCmp> sorted_bindings; |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 75 | const uint32_t input_bindings_count = p_create_info->bindingCount; |
| 76 | // Sort the input bindings in binding number order, eliminating duplicates |
| 77 | for (uint32_t i = 0; i < input_bindings_count; i++) { |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 78 | VkDescriptorBindingFlagsEXT flags = 0; |
| 79 | if (flags_create_info && flags_create_info->bindingCount == p_create_info->bindingCount) { |
| 80 | flags = flags_create_info->pBindingFlags[i]; |
| 81 | } |
| 82 | sorted_bindings.insert(ExtendedBinding(p_create_info->pBindings + i, flags)); |
John Zulauf | b6d7120 | 2017-12-22 16:47:09 -0700 | [diff] [blame] | 83 | } |
| 84 | |
| 85 | // Store the create info in the sorted order from above |
Tobin Ehlis | a3525e0 | 2016-11-17 10:50:52 -0700 | [diff] [blame] | 86 | std::map<uint32_t, uint32_t> binding_to_dyn_count; |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 87 | uint32_t index = 0; |
| 88 | binding_count_ = static_cast<uint32_t>(sorted_bindings.size()); |
| 89 | bindings_.reserve(binding_count_); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 90 | binding_flags_.reserve(binding_count_); |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 91 | binding_to_index_map_.reserve(binding_count_); |
| 92 | for (auto input_binding : sorted_bindings) { |
| 93 | // Add to binding and map, s.t. it is robust to invalid duplication of binding_num |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 94 | const auto binding_num = input_binding.layout_binding->binding; |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 95 | binding_to_index_map_[binding_num] = index++; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 96 | bindings_.emplace_back(input_binding.layout_binding); |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 97 | auto &binding_info = bindings_.back(); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 98 | binding_flags_.emplace_back(input_binding.binding_flags); |
John Zulauf | 508d13a | 2018-01-05 15:10:34 -0700 | [diff] [blame] | 99 | |
John Zulauf | b6d7120 | 2017-12-22 16:47:09 -0700 | [diff] [blame] | 100 | descriptor_count_ += binding_info.descriptorCount; |
| 101 | if (binding_info.descriptorCount > 0) { |
| 102 | non_empty_bindings_.insert(binding_num); |
Tobin Ehlis | 9637fb2 | 2016-12-12 15:59:34 -0700 | [diff] [blame] | 103 | } |
John Zulauf | b6d7120 | 2017-12-22 16:47:09 -0700 | [diff] [blame] | 104 | |
| 105 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC || |
| 106 | binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) { |
| 107 | binding_to_dyn_count[binding_num] = binding_info.descriptorCount; |
| 108 | dynamic_descriptor_count_ += binding_info.descriptorCount; |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 109 | binding_type_stats_.dynamic_buffer_count++; |
| 110 | } else if ((binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) || |
| 111 | (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)) { |
| 112 | binding_type_stats_.non_dynamic_buffer_count++; |
| 113 | } else { |
| 114 | binding_type_stats_.image_sampler_count++; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 115 | } |
| 116 | } |
Tobin Ehlis | 9637fb2 | 2016-12-12 15:59:34 -0700 | [diff] [blame] | 117 | assert(bindings_.size() == binding_count_); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 118 | assert(binding_flags_.size() == binding_count_); |
Tobin Ehlis | 9637fb2 | 2016-12-12 15:59:34 -0700 | [diff] [blame] | 119 | uint32_t global_index = 0; |
John Zulauf | 7705bfc | 2019-06-10 09:52:04 -0600 | [diff] [blame] | 120 | global_index_range_.reserve(binding_count_); |
| 121 | // Vector order is finalized so build vectors of descriptors and dynamic offsets by binding index |
Tobin Ehlis | 9637fb2 | 2016-12-12 15:59:34 -0700 | [diff] [blame] | 122 | for (uint32_t i = 0; i < binding_count_; ++i) { |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 123 | auto final_index = global_index + bindings_[i].descriptorCount; |
John Zulauf | 7705bfc | 2019-06-10 09:52:04 -0600 | [diff] [blame] | 124 | global_index_range_.emplace_back(global_index, final_index); |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 125 | global_index = final_index; |
Tobin Ehlis | 9637fb2 | 2016-12-12 15:59:34 -0700 | [diff] [blame] | 126 | } |
John Zulauf | b6d7120 | 2017-12-22 16:47:09 -0700 | [diff] [blame] | 127 | |
Tobin Ehlis | a3525e0 | 2016-11-17 10:50:52 -0700 | [diff] [blame] | 128 | // Now create dyn offset array mapping for any dynamic descriptors |
| 129 | uint32_t dyn_array_idx = 0; |
John Zulauf | b6d7120 | 2017-12-22 16:47:09 -0700 | [diff] [blame] | 130 | binding_to_dynamic_array_idx_map_.reserve(binding_to_dyn_count.size()); |
Tobin Ehlis | a3525e0 | 2016-11-17 10:50:52 -0700 | [diff] [blame] | 131 | for (const auto &bc_pair : binding_to_dyn_count) { |
| 132 | binding_to_dynamic_array_idx_map_[bc_pair.first] = dyn_array_idx; |
| 133 | dyn_array_idx += bc_pair.second; |
| 134 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 135 | } |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 136 | |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 137 | size_t cvdescriptorset::DescriptorSetLayoutDef::hash() const { |
| 138 | hash_util::HashCombiner hc; |
| 139 | hc << flags_; |
| 140 | hc.Combine(bindings_); |
John Zulauf | 223b69d | 2018-11-09 16:00:59 -0700 | [diff] [blame] | 141 | hc.Combine(binding_flags_); |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 142 | return hc.Value(); |
| 143 | } |
| 144 | // |
| 145 | |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 146 | // Return valid index or "end" i.e. binding_count_; |
| 147 | // The asserts in "Get" are reduced to the set where no valid answer(like null or 0) could be given |
| 148 | // Common code for all binding lookups. |
| 149 | uint32_t cvdescriptorset::DescriptorSetLayoutDef::GetIndexFromBinding(uint32_t binding) const { |
| 150 | const auto &bi_itr = binding_to_index_map_.find(binding); |
| 151 | if (bi_itr != binding_to_index_map_.cend()) return bi_itr->second; |
| 152 | return GetBindingCount(); |
| 153 | } |
| 154 | VkDescriptorSetLayoutBinding const *cvdescriptorset::DescriptorSetLayoutDef::GetDescriptorSetLayoutBindingPtrFromIndex( |
| 155 | const uint32_t index) const { |
| 156 | if (index >= bindings_.size()) return nullptr; |
| 157 | return bindings_[index].ptr(); |
| 158 | } |
| 159 | // Return descriptorCount for given index, 0 if index is unavailable |
| 160 | uint32_t cvdescriptorset::DescriptorSetLayoutDef::GetDescriptorCountFromIndex(const uint32_t index) const { |
| 161 | if (index >= bindings_.size()) return 0; |
| 162 | return bindings_[index].descriptorCount; |
| 163 | } |
| 164 | // For the given index, return descriptorType |
| 165 | VkDescriptorType cvdescriptorset::DescriptorSetLayoutDef::GetTypeFromIndex(const uint32_t index) const { |
| 166 | assert(index < bindings_.size()); |
| 167 | if (index < bindings_.size()) return bindings_[index].descriptorType; |
| 168 | return VK_DESCRIPTOR_TYPE_MAX_ENUM; |
| 169 | } |
| 170 | // For the given index, return stageFlags |
| 171 | VkShaderStageFlags cvdescriptorset::DescriptorSetLayoutDef::GetStageFlagsFromIndex(const uint32_t index) const { |
| 172 | assert(index < bindings_.size()); |
| 173 | if (index < bindings_.size()) return bindings_[index].stageFlags; |
| 174 | return VkShaderStageFlags(0); |
| 175 | } |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 176 | // Return binding flags for given index, 0 if index is unavailable |
| 177 | VkDescriptorBindingFlagsEXT cvdescriptorset::DescriptorSetLayoutDef::GetDescriptorBindingFlagsFromIndex( |
| 178 | const uint32_t index) const { |
| 179 | if (index >= binding_flags_.size()) return 0; |
| 180 | return binding_flags_[index]; |
| 181 | } |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 182 | |
John Zulauf | 7705bfc | 2019-06-10 09:52:04 -0600 | [diff] [blame] | 183 | const cvdescriptorset::IndexRange &cvdescriptorset::DescriptorSetLayoutDef::GetGlobalIndexRangeFromIndex(uint32_t index) const { |
| 184 | const static IndexRange kInvalidRange = {0xFFFFFFFF, 0xFFFFFFFF}; |
| 185 | if (index >= binding_flags_.size()) return kInvalidRange; |
| 186 | return global_index_range_[index]; |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 187 | } |
| 188 | |
John Zulauf | 7705bfc | 2019-06-10 09:52:04 -0600 | [diff] [blame] | 189 | // For the given binding, return the global index range (half open) |
| 190 | // As start and end are often needed in pairs, get both with a single lookup. |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 191 | const cvdescriptorset::IndexRange &cvdescriptorset::DescriptorSetLayoutDef::GetGlobalIndexRangeFromBinding( |
| 192 | const uint32_t binding) const { |
John Zulauf | 7705bfc | 2019-06-10 09:52:04 -0600 | [diff] [blame] | 193 | uint32_t index = GetIndexFromBinding(binding); |
| 194 | return GetGlobalIndexRangeFromIndex(index); |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 195 | } |
| 196 | |
| 197 | // For given binding, return ptr to ImmutableSampler array |
| 198 | VkSampler const *cvdescriptorset::DescriptorSetLayoutDef::GetImmutableSamplerPtrFromBinding(const uint32_t binding) const { |
| 199 | const auto &bi_itr = binding_to_index_map_.find(binding); |
| 200 | if (bi_itr != binding_to_index_map_.end()) { |
| 201 | return bindings_[bi_itr->second].pImmutableSamplers; |
| 202 | } |
| 203 | return nullptr; |
| 204 | } |
| 205 | // Move to next valid binding having a non-zero binding count |
| 206 | uint32_t cvdescriptorset::DescriptorSetLayoutDef::GetNextValidBinding(const uint32_t binding) const { |
| 207 | auto it = non_empty_bindings_.upper_bound(binding); |
| 208 | assert(it != non_empty_bindings_.cend()); |
| 209 | if (it != non_empty_bindings_.cend()) return *it; |
| 210 | return GetMaxBinding() + 1; |
| 211 | } |
| 212 | // For given index, return ptr to ImmutableSampler array |
| 213 | VkSampler const *cvdescriptorset::DescriptorSetLayoutDef::GetImmutableSamplerPtrFromIndex(const uint32_t index) const { |
| 214 | if (index < bindings_.size()) { |
| 215 | return bindings_[index].pImmutableSamplers; |
| 216 | } |
| 217 | return nullptr; |
| 218 | } |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 219 | |
| 220 | // If our layout is compatible with rh_ds_layout, return true. |
| 221 | bool cvdescriptorset::DescriptorSetLayout::IsCompatible(DescriptorSetLayout const *rh_ds_layout) const { |
| 222 | bool compatible = (this == rh_ds_layout) || (GetLayoutDef() == rh_ds_layout->GetLayoutDef()); |
| 223 | return compatible; |
| 224 | } |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 225 | // If our layout is compatible with rh_ds_layout, return true, |
| 226 | // else return false and fill in error_msg will description of what causes incompatibility |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 227 | bool cvdescriptorset::VerifySetLayoutCompatibility(DescriptorSetLayout const *lh_ds_layout, DescriptorSetLayout const *rh_ds_layout, |
| 228 | std::string *error_msg) { |
| 229 | // Short circuit the detailed check. |
| 230 | if (lh_ds_layout->IsCompatible(rh_ds_layout)) return true; |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 231 | |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 232 | // Do a detailed compatibility check of this lhs def (referenced by lh_ds_layout), vs. the rhs (layout and def) |
| 233 | // Should only be run if trivial accept has failed, and in that context should return false. |
| 234 | VkDescriptorSetLayout lh_dsl_handle = lh_ds_layout->GetDescriptorSetLayout(); |
| 235 | VkDescriptorSetLayout rh_dsl_handle = rh_ds_layout->GetDescriptorSetLayout(); |
| 236 | DescriptorSetLayoutDef const *lh_ds_layout_def = lh_ds_layout->GetLayoutDef(); |
| 237 | DescriptorSetLayoutDef const *rh_ds_layout_def = rh_ds_layout->GetLayoutDef(); |
| 238 | |
| 239 | // Check descriptor counts |
| 240 | if (lh_ds_layout_def->GetTotalDescriptorCount() != rh_ds_layout_def->GetTotalDescriptorCount()) { |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 241 | std::stringstream error_str; |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 242 | error_str << "DescriptorSetLayout " << lh_dsl_handle << " has " << lh_ds_layout_def->GetTotalDescriptorCount() |
| 243 | << " descriptors, but DescriptorSetLayout " << rh_dsl_handle << ", which comes from pipelineLayout, has " |
| 244 | << rh_ds_layout_def->GetTotalDescriptorCount() << " descriptors."; |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 245 | *error_msg = error_str.str(); |
| 246 | return false; // trivial fail case |
| 247 | } |
John Zulauf | d47d061 | 2018-02-16 13:00:34 -0700 | [diff] [blame] | 248 | |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 249 | // Descriptor counts match so need to go through bindings one-by-one |
| 250 | // and verify that type and stageFlags match |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 251 | for (const auto &binding : lh_ds_layout_def->GetBindings()) { |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 252 | // TODO : Do we also need to check immutable samplers? |
| 253 | // VkDescriptorSetLayoutBinding *rh_binding; |
| 254 | if (binding.descriptorCount != rh_ds_layout_def->GetDescriptorCountFromBinding(binding.binding)) { |
| 255 | std::stringstream error_str; |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 256 | error_str << "Binding " << binding.binding << " for DescriptorSetLayout " << lh_dsl_handle |
| 257 | << " has a descriptorCount of " << binding.descriptorCount << " but binding " << binding.binding |
| 258 | << " for DescriptorSetLayout " << rh_dsl_handle |
| 259 | << ", which comes from pipelineLayout, has a descriptorCount of " |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 260 | << rh_ds_layout_def->GetDescriptorCountFromBinding(binding.binding); |
| 261 | *error_msg = error_str.str(); |
| 262 | return false; |
| 263 | } else if (binding.descriptorType != rh_ds_layout_def->GetTypeFromBinding(binding.binding)) { |
| 264 | std::stringstream error_str; |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 265 | error_str << "Binding " << binding.binding << " for DescriptorSetLayout " << lh_dsl_handle << " is type '" |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 266 | << string_VkDescriptorType(binding.descriptorType) << "' but binding " << binding.binding |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 267 | << " for DescriptorSetLayout " << rh_dsl_handle << ", which comes from pipelineLayout, is type '" |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 268 | << string_VkDescriptorType(rh_ds_layout_def->GetTypeFromBinding(binding.binding)) << "'"; |
| 269 | *error_msg = error_str.str(); |
| 270 | return false; |
| 271 | } else if (binding.stageFlags != rh_ds_layout_def->GetStageFlagsFromBinding(binding.binding)) { |
| 272 | std::stringstream error_str; |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 273 | error_str << "Binding " << binding.binding << " for DescriptorSetLayout " << lh_dsl_handle << " has stageFlags " |
| 274 | << binding.stageFlags << " but binding " << binding.binding << " for DescriptorSetLayout " << rh_dsl_handle |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 275 | << ", which comes from pipelineLayout, has stageFlags " |
| 276 | << rh_ds_layout_def->GetStageFlagsFromBinding(binding.binding); |
| 277 | *error_msg = error_str.str(); |
| 278 | return false; |
| 279 | } |
| 280 | } |
John Zulauf | 9ce3b25 | 2019-06-06 15:20:22 -0600 | [diff] [blame] | 281 | // No detailed check should succeed if the trivial check failed -- or the dictionary has failed somehow. |
| 282 | bool compatible = true; |
| 283 | assert(!compatible); |
| 284 | return compatible; |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 285 | } |
| 286 | |
| 287 | bool cvdescriptorset::DescriptorSetLayoutDef::IsNextBindingConsistent(const uint32_t binding) const { |
| 288 | if (!binding_to_index_map_.count(binding + 1)) return false; |
| 289 | auto const &bi_itr = binding_to_index_map_.find(binding); |
| 290 | if (bi_itr != binding_to_index_map_.end()) { |
| 291 | const auto &next_bi_itr = binding_to_index_map_.find(binding + 1); |
| 292 | if (next_bi_itr != binding_to_index_map_.end()) { |
| 293 | auto type = bindings_[bi_itr->second].descriptorType; |
| 294 | auto stage_flags = bindings_[bi_itr->second].stageFlags; |
| 295 | auto immut_samp = bindings_[bi_itr->second].pImmutableSamplers ? true : false; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 296 | auto flags = binding_flags_[bi_itr->second]; |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 297 | if ((type != bindings_[next_bi_itr->second].descriptorType) || |
| 298 | (stage_flags != bindings_[next_bi_itr->second].stageFlags) || |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 299 | (immut_samp != (bindings_[next_bi_itr->second].pImmutableSamplers ? true : false)) || |
| 300 | (flags != binding_flags_[next_bi_itr->second])) { |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 301 | return false; |
| 302 | } |
| 303 | return true; |
| 304 | } |
| 305 | } |
| 306 | return false; |
| 307 | } |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 308 | |
| 309 | // The DescriptorSetLayout stores the per handle data for a descriptor set layout, and references the common defintion for the |
| 310 | // handle invariant portion |
| 311 | cvdescriptorset::DescriptorSetLayout::DescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo *p_create_info, |
| 312 | const VkDescriptorSetLayout layout) |
Shannon McPherson | c06c33d | 2018-06-28 17:21:12 -0600 | [diff] [blame] | 313 | : layout_(layout), layout_destroyed_(false), layout_id_(GetCanonicalId(p_create_info)) {} |
John Zulauf | 1f8174b | 2018-02-16 12:58:37 -0700 | [diff] [blame] | 314 | |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 315 | // Validate descriptor set layout create info |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 316 | bool cvdescriptorset::ValidateDescriptorSetLayoutCreateInfo( |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 317 | const debug_report_data *report_data, const VkDescriptorSetLayoutCreateInfo *create_info, const bool push_descriptor_ext, |
| 318 | const uint32_t max_push_descriptors, const bool descriptor_indexing_ext, |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 319 | const VkPhysicalDeviceDescriptorIndexingFeaturesEXT *descriptor_indexing_features, |
| 320 | const VkPhysicalDeviceInlineUniformBlockFeaturesEXT *inline_uniform_block_features, |
Tony-LunarG | d6744bc | 2019-08-23 09:57:10 -0600 | [diff] [blame] | 321 | const VkPhysicalDeviceInlineUniformBlockPropertiesEXT *inline_uniform_block_props, const DeviceExtensions *device_extensions) { |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 322 | bool skip = false; |
| 323 | std::unordered_set<uint32_t> bindings; |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 324 | uint64_t total_descriptors = 0; |
| 325 | |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 326 | const auto *flags_create_info = lvl_find_in_chain<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT>(create_info->pNext); |
| 327 | |
| 328 | const bool push_descriptor_set = !!(create_info->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR); |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 329 | if (push_descriptor_set && !push_descriptor_ext) { |
Mark Lobodzinski | b1fd9d1 | 2018-03-30 14:26:00 -0600 | [diff] [blame] | 330 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
Dave Houlton | 5165390 | 2018-06-22 17:32:13 -0600 | [diff] [blame] | 331 | kVUID_Core_DrawState_ExtensionNotEnabled, |
Mark Lobodzinski | fb5a3e6 | 2018-04-13 10:46:48 -0600 | [diff] [blame] | 332 | "Attempted to use %s in %s but its required extension %s has not been enabled.\n", |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 333 | "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR", "VkDescriptorSetLayoutCreateInfo::flags", |
| 334 | VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME); |
| 335 | } |
| 336 | |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 337 | const bool update_after_bind_set = !!(create_info->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT); |
| 338 | if (update_after_bind_set && !descriptor_indexing_ext) { |
| 339 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
Dave Houlton | 5165390 | 2018-06-22 17:32:13 -0600 | [diff] [blame] | 340 | kVUID_Core_DrawState_ExtensionNotEnabled, |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 341 | "Attemped to use %s in %s but its required extension %s has not been enabled.\n", |
| 342 | "VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT", "VkDescriptorSetLayoutCreateInfo::flags", |
| 343 | VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME); |
| 344 | } |
| 345 | |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 346 | auto valid_type = [push_descriptor_set](const VkDescriptorType type) { |
| 347 | return !push_descriptor_set || |
Dave Houlton | 142c4cb | 2018-10-17 15:04:41 -0600 | [diff] [blame] | 348 | ((type != VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) && (type != VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) && |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 349 | (type != VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT)); |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 350 | }; |
| 351 | |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 352 | uint32_t max_binding = 0; |
| 353 | |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 354 | for (uint32_t i = 0; i < create_info->bindingCount; ++i) { |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 355 | const auto &binding_info = create_info->pBindings[i]; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 356 | max_binding = std::max(max_binding, binding_info.binding); |
| 357 | |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 358 | if (!bindings.insert(binding_info.binding).second) { |
Mark Lobodzinski | b1fd9d1 | 2018-03-30 14:26:00 -0600 | [diff] [blame] | 359 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 360 | "VUID-VkDescriptorSetLayoutCreateInfo-binding-00279", |
| 361 | "duplicated binding number in VkDescriptorSetLayoutBinding."); |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 362 | } |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 363 | if (!valid_type(binding_info.descriptorType)) { |
Mark Lobodzinski | b1fd9d1 | 2018-03-30 14:26:00 -0600 | [diff] [blame] | 364 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
Dave Houlton | 142c4cb | 2018-10-17 15:04:41 -0600 | [diff] [blame] | 365 | (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) |
| 366 | ? "VUID-VkDescriptorSetLayoutCreateInfo-flags-02208" |
| 367 | : "VUID-VkDescriptorSetLayoutCreateInfo-flags-00280", |
Mark Lobodzinski | 487a0d1 | 2018-03-30 10:09:03 -0600 | [diff] [blame] | 368 | "invalid type %s ,for push descriptors in VkDescriptorSetLayoutBinding entry %" PRIu32 ".", |
| 369 | string_VkDescriptorType(binding_info.descriptorType), i); |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 370 | } |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 371 | |
| 372 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) { |
Tony-LunarG | d6744bc | 2019-08-23 09:57:10 -0600 | [diff] [blame] | 373 | if (!device_extensions->vk_ext_inline_uniform_block) { |
| 374 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, 0, |
| 375 | "UNASSIGNED-Extension not enabled", |
| 376 | "Creating VkDescriptorSetLayout with descriptor type VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT " |
| 377 | "but extension %s is missing", |
| 378 | VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME); |
| 379 | } else { |
| 380 | if ((binding_info.descriptorCount % 4) != 0) { |
| 381 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
| 382 | "VUID-VkDescriptorSetLayoutBinding-descriptorType-02209", |
| 383 | "descriptorCount =(%" PRIu32 ") must be a multiple of 4", binding_info.descriptorCount); |
| 384 | } |
| 385 | if (binding_info.descriptorCount > inline_uniform_block_props->maxInlineUniformBlockSize) { |
| 386 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
| 387 | "VUID-VkDescriptorSetLayoutBinding-descriptorType-02210", |
| 388 | "descriptorCount =(%" PRIu32 ") must be less than or equal to maxInlineUniformBlockSize", |
| 389 | binding_info.descriptorCount); |
| 390 | } |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 391 | } |
| 392 | } |
| 393 | |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 394 | total_descriptors += binding_info.descriptorCount; |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 395 | } |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 396 | |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 397 | if (flags_create_info) { |
| 398 | if (flags_create_info->bindingCount != 0 && flags_create_info->bindingCount != create_info->bindingCount) { |
| 399 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 400 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-bindingCount-03002", |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 401 | "VkDescriptorSetLayoutCreateInfo::bindingCount (%d) != " |
| 402 | "VkDescriptorSetLayoutBindingFlagsCreateInfoEXT::bindingCount (%d)", |
| 403 | create_info->bindingCount, flags_create_info->bindingCount); |
| 404 | } |
| 405 | |
| 406 | if (flags_create_info->bindingCount == create_info->bindingCount) { |
| 407 | for (uint32_t i = 0; i < create_info->bindingCount; ++i) { |
| 408 | const auto &binding_info = create_info->pBindings[i]; |
| 409 | |
| 410 | if (flags_create_info->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT) { |
| 411 | if (!update_after_bind_set) { |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 412 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
| 413 | "VUID-VkDescriptorSetLayoutCreateInfo-flags-03000", |
| 414 | "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 415 | } |
| 416 | |
| 417 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER && |
| 418 | !descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind) { |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 419 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
| 420 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-" |
| 421 | "descriptorBindingUniformBufferUpdateAfterBind-03005", |
| 422 | "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 423 | } |
| 424 | if ((binding_info.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER || |
| 425 | binding_info.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER || |
| 426 | binding_info.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) && |
| 427 | !descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind) { |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 428 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
| 429 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-" |
| 430 | "descriptorBindingSampledImageUpdateAfterBind-03006", |
| 431 | "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 432 | } |
| 433 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE && |
| 434 | !descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind) { |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 435 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
| 436 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-" |
| 437 | "descriptorBindingStorageImageUpdateAfterBind-03007", |
| 438 | "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 439 | } |
| 440 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER && |
| 441 | !descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind) { |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 442 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
| 443 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-" |
| 444 | "descriptorBindingStorageBufferUpdateAfterBind-03008", |
| 445 | "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 446 | } |
| 447 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER && |
| 448 | !descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind) { |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 449 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
| 450 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-" |
| 451 | "descriptorBindingUniformTexelBufferUpdateAfterBind-03009", |
| 452 | "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 453 | } |
| 454 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER && |
| 455 | !descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind) { |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 456 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
| 457 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-" |
| 458 | "descriptorBindingStorageTexelBufferUpdateAfterBind-03010", |
| 459 | "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 460 | } |
| 461 | if ((binding_info.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT || |
| 462 | binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC || |
| 463 | binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) { |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 464 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
| 465 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-None-03011", |
| 466 | "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 467 | } |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 468 | |
| 469 | if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT && |
| 470 | !inline_uniform_block_features->descriptorBindingInlineUniformBlockUpdateAfterBind) { |
| 471 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
Dave Houlton | 142c4cb | 2018-10-17 15:04:41 -0600 | [diff] [blame] | 472 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-" |
| 473 | "descriptorBindingInlineUniformBlockUpdateAfterBind-02211", |
| 474 | "Invalid flags (VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT) for " |
| 475 | "VkDescriptorSetLayoutBinding entry %" PRIu32 |
| 476 | " with descriptorBindingInlineUniformBlockUpdateAfterBind not enabled", |
| 477 | i); |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 478 | } |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 479 | } |
| 480 | |
| 481 | if (flags_create_info->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT) { |
| 482 | if (!descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending) { |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 483 | skip |= log_msg( |
| 484 | report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
| 485 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingUpdateUnusedWhilePending-03012", |
| 486 | "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 487 | } |
| 488 | } |
| 489 | |
| 490 | if (flags_create_info->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT) { |
| 491 | if (!descriptor_indexing_features->descriptorBindingPartiallyBound) { |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 492 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
| 493 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingPartiallyBound-03013", |
| 494 | "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 495 | } |
| 496 | } |
| 497 | |
| 498 | if (flags_create_info->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT) { |
| 499 | if (binding_info.binding != max_binding) { |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 500 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
| 501 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-pBindingFlags-03004", |
| 502 | "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 503 | } |
| 504 | |
| 505 | if (!descriptor_indexing_features->descriptorBindingVariableDescriptorCount) { |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 506 | skip |= log_msg( |
| 507 | report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
| 508 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingVariableDescriptorCount-03014", |
| 509 | "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 510 | } |
| 511 | if ((binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC || |
| 512 | binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) { |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 513 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
| 514 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-pBindingFlags-03015", |
| 515 | "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 516 | } |
| 517 | } |
| 518 | |
| 519 | if (push_descriptor_set && |
| 520 | (flags_create_info->pBindingFlags[i] & |
| 521 | (VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT | |
| 522 | VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT))) { |
| 523 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 524 | "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-flags-03003", |
| 525 | "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 526 | } |
| 527 | } |
| 528 | } |
| 529 | } |
| 530 | |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 531 | if ((push_descriptor_set) && (total_descriptors > max_push_descriptors)) { |
| 532 | const char *undefined = push_descriptor_ext ? "" : " -- undefined"; |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 533 | skip |= |
| 534 | log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, |
| 535 | "VUID-VkDescriptorSetLayoutCreateInfo-flags-00281", |
| 536 | "for push descriptor, total descriptor count in layout (%" PRIu64 |
| 537 | ") must not be greater than VkPhysicalDevicePushDescriptorPropertiesKHR::maxPushDescriptors (%" PRIu32 "%s).", |
| 538 | total_descriptors, max_push_descriptors, undefined); |
John Zulauf | 0fdeab3 | 2018-01-23 11:27:35 -0700 | [diff] [blame] | 539 | } |
| 540 | |
Tobin Ehlis | 154c269 | 2016-10-25 09:36:53 -0600 | [diff] [blame] | 541 | return skip; |
| 542 | } |
| 543 | |
Tobin Ehlis | 68d0adf | 2016-06-01 11:33:50 -0600 | [diff] [blame] | 544 | cvdescriptorset::AllocateDescriptorSetsData::AllocateDescriptorSetsData(uint32_t count) |
| 545 | : required_descriptors_by_type{}, layout_nodes(count, nullptr) {} |
| 546 | |
Tobin Ehlis | 93f2237 | 2016-10-12 14:34:12 -0600 | [diff] [blame] | 547 | cvdescriptorset::DescriptorSet::DescriptorSet(const VkDescriptorSet set, const VkDescriptorPool pool, |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 548 | const std::shared_ptr<DescriptorSetLayout const> &layout, uint32_t variable_count, |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 549 | cvdescriptorset::DescriptorSet::StateTracker *state_data) |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 550 | : some_update_(false), |
| 551 | set_(set), |
| 552 | pool_state_(nullptr), |
Tobin Ehlis | 7cd8c79 | 2017-06-20 08:30:39 -0600 | [diff] [blame] | 553 | p_layout_(layout), |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 554 | state_data_(state_data), |
Jeff Bolz | dd4cfa1 | 2019-08-11 20:57:51 -0500 | [diff] [blame] | 555 | variable_count_(variable_count), |
| 556 | change_count_(0) { |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 557 | pool_state_ = state_data->GetDescriptorPoolState(pool); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 558 | // Foreach binding, create default descriptors of given type |
John Zulauf | b6d7120 | 2017-12-22 16:47:09 -0700 | [diff] [blame] | 559 | descriptors_.reserve(p_layout_->GetTotalDescriptorCount()); |
Tobin Ehlis | 7cd8c79 | 2017-06-20 08:30:39 -0600 | [diff] [blame] | 560 | for (uint32_t i = 0; i < p_layout_->GetBindingCount(); ++i) { |
| 561 | auto type = p_layout_->GetTypeFromIndex(i); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 562 | switch (type) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 563 | case VK_DESCRIPTOR_TYPE_SAMPLER: { |
Tobin Ehlis | 7cd8c79 | 2017-06-20 08:30:39 -0600 | [diff] [blame] | 564 | auto immut_sampler = p_layout_->GetImmutableSamplerPtrFromIndex(i); |
| 565 | for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di) { |
Tobin Ehlis | 082c751 | 2017-05-08 11:24:57 -0600 | [diff] [blame] | 566 | if (immut_sampler) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 567 | descriptors_.emplace_back(new SamplerDescriptor(immut_sampler + di)); |
Tobin Ehlis | 082c751 | 2017-05-08 11:24:57 -0600 | [diff] [blame] | 568 | some_update_ = true; // Immutable samplers are updated at creation |
| 569 | } else |
Chris Forbes | 9f34085 | 2017-05-09 08:51:38 -0700 | [diff] [blame] | 570 | descriptors_.emplace_back(new SamplerDescriptor(nullptr)); |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 571 | } |
| 572 | break; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 573 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 574 | case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: { |
Tobin Ehlis | 7cd8c79 | 2017-06-20 08:30:39 -0600 | [diff] [blame] | 575 | auto immut = p_layout_->GetImmutableSamplerPtrFromIndex(i); |
| 576 | for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di) { |
Tobin Ehlis | 082c751 | 2017-05-08 11:24:57 -0600 | [diff] [blame] | 577 | if (immut) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 578 | descriptors_.emplace_back(new ImageSamplerDescriptor(immut + di)); |
Tobin Ehlis | 082c751 | 2017-05-08 11:24:57 -0600 | [diff] [blame] | 579 | some_update_ = true; // Immutable samplers are updated at creation |
| 580 | } else |
Chris Forbes | 9f34085 | 2017-05-09 08:51:38 -0700 | [diff] [blame] | 581 | descriptors_.emplace_back(new ImageSamplerDescriptor(nullptr)); |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 582 | } |
| 583 | break; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 584 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 585 | // ImageDescriptors |
| 586 | case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: |
| 587 | case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: |
| 588 | case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: |
Tobin Ehlis | 7cd8c79 | 2017-06-20 08:30:39 -0600 | [diff] [blame] | 589 | for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di) |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 590 | descriptors_.emplace_back(new ImageDescriptor(type)); |
| 591 | break; |
| 592 | case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: |
| 593 | case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: |
Tobin Ehlis | 7cd8c79 | 2017-06-20 08:30:39 -0600 | [diff] [blame] | 594 | for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di) |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 595 | descriptors_.emplace_back(new TexelDescriptor(type)); |
| 596 | break; |
| 597 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: |
| 598 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: |
| 599 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: |
| 600 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: |
Tobin Ehlis | 7cd8c79 | 2017-06-20 08:30:39 -0600 | [diff] [blame] | 601 | for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di) |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 602 | descriptors_.emplace_back(new BufferDescriptor(type)); |
| 603 | break; |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 604 | case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: |
| 605 | for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di) |
| 606 | descriptors_.emplace_back(new InlineUniformDescriptor(type)); |
| 607 | break; |
Eric Werness | 30127fd | 2018-10-31 21:01:03 -0700 | [diff] [blame] | 608 | case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV: |
Jeff Bolz | fbe5158 | 2018-09-13 10:01:35 -0500 | [diff] [blame] | 609 | for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di) |
| 610 | descriptors_.emplace_back(new AccelerationStructureDescriptor(type)); |
| 611 | break; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 612 | default: |
| 613 | assert(0); // Bad descriptor type specified |
| 614 | break; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 615 | } |
| 616 | } |
| 617 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 618 | |
Mark Lobodzinski | 729a8d3 | 2017-01-26 12:16:30 -0700 | [diff] [blame] | 619 | cvdescriptorset::DescriptorSet::~DescriptorSet() { InvalidateBoundCmdBuffers(); } |
Chris Forbes | 5798913 | 2016-07-26 17:06:10 +1200 | [diff] [blame] | 620 | |
Shannon McPherson | c06c33d | 2018-06-28 17:21:12 -0600 | [diff] [blame] | 621 | static std::string StringDescriptorReqViewType(descriptor_req req) { |
Chris Forbes | 6e58ebd | 2016-08-31 12:58:14 -0700 | [diff] [blame] | 622 | std::string result(""); |
Chris Forbes | 5798913 | 2016-07-26 17:06:10 +1200 | [diff] [blame] | 623 | for (unsigned i = 0; i <= VK_IMAGE_VIEW_TYPE_END_RANGE; i++) { |
| 624 | if (req & (1 << i)) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 625 | if (result.size()) result += ", "; |
Chris Forbes | 6e58ebd | 2016-08-31 12:58:14 -0700 | [diff] [blame] | 626 | result += string_VkImageViewType(VkImageViewType(i)); |
Chris Forbes | 5798913 | 2016-07-26 17:06:10 +1200 | [diff] [blame] | 627 | } |
| 628 | } |
| 629 | |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 630 | if (!result.size()) result = "(none)"; |
Chris Forbes | 6e58ebd | 2016-08-31 12:58:14 -0700 | [diff] [blame] | 631 | |
| 632 | return result; |
Chris Forbes | 5798913 | 2016-07-26 17:06:10 +1200 | [diff] [blame] | 633 | } |
| 634 | |
Chris Forbes | da01e8d | 2018-08-27 15:36:57 -0700 | [diff] [blame] | 635 | static char const *StringDescriptorReqComponentType(descriptor_req req) { |
| 636 | if (req & DESCRIPTOR_REQ_COMPONENT_TYPE_SINT) return "SINT"; |
| 637 | if (req & DESCRIPTOR_REQ_COMPONENT_TYPE_UINT) return "UINT"; |
| 638 | if (req & DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT) return "FLOAT"; |
| 639 | return "(none)"; |
| 640 | } |
| 641 | |
Jeff Bolz | 6cede83 | 2019-08-09 23:30:39 -0500 | [diff] [blame] | 642 | unsigned DescriptorRequirementsBitsFromFormat(VkFormat fmt) { |
Chris Forbes | da01e8d | 2018-08-27 15:36:57 -0700 | [diff] [blame] | 643 | if (FormatIsSInt(fmt)) return DESCRIPTOR_REQ_COMPONENT_TYPE_SINT; |
| 644 | if (FormatIsUInt(fmt)) return DESCRIPTOR_REQ_COMPONENT_TYPE_UINT; |
| 645 | if (FormatIsDepthAndStencil(fmt)) return DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT | DESCRIPTOR_REQ_COMPONENT_TYPE_UINT; |
| 646 | if (fmt == VK_FORMAT_UNDEFINED) return 0; |
| 647 | // everything else -- UNORM/SNORM/FLOAT/USCALED/SSCALED is all float in the shader. |
| 648 | return DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT; |
| 649 | } |
| 650 | |
Tobin Ehlis | 3066db6 | 2016-08-22 08:12:23 -0600 | [diff] [blame] | 651 | // Validate that the state of this set is appropriate for the given bindings and dynamic_offsets at Draw time |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 652 | // This includes validating that all descriptors in the given bindings are updated, |
| 653 | // that any update buffers are valid, and that any dynamic offsets are within the bounds of their buffers. |
| 654 | // Return true if state is acceptable, or false and write an error message into error string |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 655 | bool CoreChecks::ValidateDrawState(const DescriptorSet *descriptor_set, const std::map<uint32_t, descriptor_req> &bindings, |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 656 | const std::vector<uint32_t> &dynamic_offsets, const CMD_BUFFER_STATE *cb_node, |
| 657 | const char *caller, std::string *error) const { |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 658 | using DescriptorClass = cvdescriptorset::DescriptorClass; |
| 659 | using BufferDescriptor = cvdescriptorset::BufferDescriptor; |
| 660 | using ImageDescriptor = cvdescriptorset::ImageDescriptor; |
| 661 | using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor; |
| 662 | using SamplerDescriptor = cvdescriptorset::SamplerDescriptor; |
| 663 | using TexelDescriptor = cvdescriptorset::TexelDescriptor; |
Chris Forbes | c7090a8 | 2016-07-25 18:10:41 +1200 | [diff] [blame] | 664 | for (auto binding_pair : bindings) { |
| 665 | auto binding = binding_pair.first; |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 666 | DescriptorSetLayout::ConstBindingIterator binding_it(descriptor_set->GetLayout().get(), binding); |
| 667 | if (binding_it.AtEnd()) { // End at construction is the condition for an invalid binding. |
Tobin Ehlis | 58c5958 | 2016-06-21 12:34:33 -0600 | [diff] [blame] | 668 | std::stringstream error_str; |
| 669 | error_str << "Attempting to validate DrawState for binding #" << binding |
| 670 | << " which is an invalid binding for this descriptor set."; |
| 671 | *error = error_str.str(); |
| 672 | return false; |
| 673 | } |
Jeff Bolz | 6cede83 | 2019-08-09 23:30:39 -0500 | [diff] [blame] | 674 | |
| 675 | if (binding_it.GetDescriptorBindingFlags() & |
| 676 | (VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT)) { |
| 677 | // Can't validate the descriptor because it may not have been updated, |
| 678 | // or the view could have been destroyed |
| 679 | continue; |
| 680 | } |
| 681 | |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 682 | // Copy the range, the end range is subject to update based on variable length descriptor arrays. |
| 683 | cvdescriptorset::IndexRange index_range = binding_it.GetGlobalIndexRange(); |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 684 | auto array_idx = 0; // Track array idx if we're dealing with array descriptors |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 685 | |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 686 | if (binding_it.IsVariableDescriptorCount()) { |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 687 | // Only validate the first N descriptors if it uses variable_count |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 688 | index_range.end = index_range.start + descriptor_set->GetVariableDescriptorCount(); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 689 | } |
| 690 | |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 691 | for (uint32_t i = index_range.start; i < index_range.end; ++i, ++array_idx) { |
Locke | b994adf | 2019-03-29 23:52:31 -0600 | [diff] [blame] | 692 | uint32_t index = i - index_range.start; |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 693 | const auto *descriptor = descriptor_set->GetDescriptorFromGlobalIndex(i); |
Locke | b994adf | 2019-03-29 23:52:31 -0600 | [diff] [blame] | 694 | |
Jeff Bolz | 6cede83 | 2019-08-09 23:30:39 -0500 | [diff] [blame] | 695 | if (descriptor->GetClass() == DescriptorClass::InlineUniform) { |
| 696 | // Can't validate the descriptor because it may not have been updated. |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 697 | continue; |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 698 | } else if (!descriptor->updated) { |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 699 | std::stringstream error_str; |
Locke | b994adf | 2019-03-29 23:52:31 -0600 | [diff] [blame] | 700 | error_str << "Descriptor in binding #" << binding << " index " << index |
Tobin Ehlis | c604bd7 | 2019-06-19 11:54:51 -0600 | [diff] [blame] | 701 | << " is being used in draw but has never been updated via vkUpdateDescriptorSets() or a similar call."; |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 702 | *error = error_str.str(); |
| 703 | return false; |
| 704 | } else { |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 705 | auto descriptor_class = descriptor->GetClass(); |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 706 | if (descriptor_class == DescriptorClass::GeneralBuffer) { |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 707 | // Verify that buffers are valid |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 708 | auto buffer = static_cast<const BufferDescriptor *>(descriptor)->GetBuffer(); |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 709 | auto buffer_node = GetBufferState(buffer); |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 710 | if (!buffer_node) { |
| 711 | std::stringstream error_str; |
Locke | b994adf | 2019-03-29 23:52:31 -0600 | [diff] [blame] | 712 | error_str << "Descriptor in binding #" << binding << " index " << index << " references invalid buffer " |
| 713 | << buffer << "."; |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 714 | *error = error_str.str(); |
| 715 | return false; |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 716 | } else if (!buffer_node->sparse) { |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 717 | for (auto mem_binding : buffer_node->GetBoundMemory()) { |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 718 | if (!GetDevMemState(mem_binding)) { |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 719 | std::stringstream error_str; |
Locke | b994adf | 2019-03-29 23:52:31 -0600 | [diff] [blame] | 720 | error_str << "Descriptor in binding #" << binding << " index " << index << " uses buffer " << buffer |
| 721 | << " that references invalid memory " << mem_binding << "."; |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 722 | *error = error_str.str(); |
Tobin Ehlis | c826645 | 2017-04-07 12:20:30 -0600 | [diff] [blame] | 723 | return false; |
| 724 | } |
| 725 | } |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 726 | } |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 727 | if (descriptor->IsDynamic()) { |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 728 | // Validate that dynamic offsets are within the buffer |
| 729 | auto buffer_size = buffer_node->createInfo.size; |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 730 | auto range = static_cast<const BufferDescriptor *>(descriptor)->GetRange(); |
| 731 | auto desc_offset = static_cast<const BufferDescriptor *>(descriptor)->GetOffset(); |
| 732 | auto dyn_offset = dynamic_offsets[binding_it.GetDynamicOffsetIndex() + array_idx]; |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 733 | if (VK_WHOLE_SIZE == range) { |
| 734 | if ((dyn_offset + desc_offset) > buffer_size) { |
| 735 | std::stringstream error_str; |
Locke | b994adf | 2019-03-29 23:52:31 -0600 | [diff] [blame] | 736 | error_str << "Dynamic descriptor in binding #" << binding << " index " << index << " uses buffer " |
| 737 | << buffer << " with update range of VK_WHOLE_SIZE has dynamic offset " << dyn_offset |
| 738 | << " combined with offset " << desc_offset << " that oversteps the buffer size of " |
| 739 | << buffer_size << "."; |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 740 | *error = error_str.str(); |
| 741 | return false; |
| 742 | } |
| 743 | } else { |
| 744 | if ((dyn_offset + desc_offset + range) > buffer_size) { |
| 745 | std::stringstream error_str; |
Locke | b994adf | 2019-03-29 23:52:31 -0600 | [diff] [blame] | 746 | error_str << "Dynamic descriptor in binding #" << binding << " index " << index << " uses buffer " |
| 747 | << buffer << " with dynamic offset " << dyn_offset << " combined with offset " |
| 748 | << desc_offset << " and range " << range << " that oversteps the buffer size of " |
| 749 | << buffer_size << "."; |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 750 | *error = error_str.str(); |
| 751 | return false; |
| 752 | } |
| 753 | } |
| 754 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 755 | } else if (descriptor_class == DescriptorClass::ImageSampler || descriptor_class == DescriptorClass::Image) { |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 756 | VkImageView image_view; |
| 757 | VkImageLayout image_layout; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 758 | if (descriptor_class == DescriptorClass::ImageSampler) { |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 759 | image_view = static_cast<const ImageSamplerDescriptor *>(descriptor)->GetImageView(); |
| 760 | image_layout = static_cast<const ImageSamplerDescriptor *>(descriptor)->GetImageLayout(); |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 761 | } else { |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 762 | image_view = static_cast<const ImageDescriptor *>(descriptor)->GetImageView(); |
| 763 | image_layout = static_cast<const ImageDescriptor *>(descriptor)->GetImageLayout(); |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 764 | } |
| 765 | auto reqs = binding_pair.second; |
| 766 | |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 767 | auto image_view_state = GetImageViewState(image_view); |
Tobin Ehlis | 836a137 | 2017-07-14 11:25:21 -0600 | [diff] [blame] | 768 | if (nullptr == image_view_state) { |
| 769 | // Image view must have been destroyed since initial update. Could potentially flag the descriptor |
| 770 | // as "invalid" (updated = false) at DestroyImageView() time and detect this error at bind time |
| 771 | std::stringstream error_str; |
Locke | b994adf | 2019-03-29 23:52:31 -0600 | [diff] [blame] | 772 | error_str << "Descriptor in binding #" << binding << " index " << index << " is using imageView " |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 773 | << report_data->FormatHandle(image_view).c_str() << " that has been destroyed."; |
Tobin Ehlis | 836a137 | 2017-07-14 11:25:21 -0600 | [diff] [blame] | 774 | *error = error_str.str(); |
| 775 | return false; |
| 776 | } |
Jeff Bolz | 6cede83 | 2019-08-09 23:30:39 -0500 | [diff] [blame] | 777 | const auto &image_view_ci = image_view_state->create_info; |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 778 | |
unknown | 9b8a685 | 2019-08-13 17:53:15 -0600 | [diff] [blame] | 779 | if (reqs & DESCRIPTOR_REQ_ALL_VIEW_TYPE_BITS) { |
| 780 | if (~reqs & (1 << image_view_ci.viewType)) { |
| 781 | // bad view type |
| 782 | std::stringstream error_str; |
| 783 | error_str << "Descriptor in binding #" << binding << " index " << index |
| 784 | << " requires an image view of type " << StringDescriptorReqViewType(reqs) << " but got " |
| 785 | << string_VkImageViewType(image_view_ci.viewType) << "."; |
| 786 | *error = error_str.str(); |
| 787 | return false; |
| 788 | } |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 789 | |
unknown | 9b8a685 | 2019-08-13 17:53:15 -0600 | [diff] [blame] | 790 | if (!(reqs & image_view_state->descriptor_format_bits)) { |
| 791 | // bad component type |
| 792 | std::stringstream error_str; |
| 793 | error_str << "Descriptor in binding #" << binding << " index " << index << " requires " |
| 794 | << StringDescriptorReqComponentType(reqs) |
| 795 | << " component type, but bound descriptor format is " << string_VkFormat(image_view_ci.format) |
| 796 | << "."; |
| 797 | *error = error_str.str(); |
| 798 | return false; |
| 799 | } |
Chris Forbes | da01e8d | 2018-08-27 15:36:57 -0700 | [diff] [blame] | 800 | } |
| 801 | |
Jeff Bolz | 6cede83 | 2019-08-09 23:30:39 -0500 | [diff] [blame] | 802 | if (!disabled.image_layout_validation) { |
| 803 | auto image_node = GetImageState(image_view_ci.image); |
| 804 | assert(image_node); |
| 805 | // Verify Image Layout |
| 806 | // No "invalid layout" VUID required for this call, since the optimal_layout parameter is UNDEFINED. |
| 807 | bool hit_error = false; |
| 808 | VerifyImageLayout(cb_node, image_node, image_view_state->normalized_subresource_range, |
| 809 | image_view_ci.subresourceRange.aspectMask, image_layout, VK_IMAGE_LAYOUT_UNDEFINED, |
| 810 | caller, kVUIDUndefined, "VUID-VkDescriptorImageInfo-imageLayout-00344", &hit_error); |
| 811 | if (hit_error) { |
| 812 | *error = |
| 813 | "Image layout specified at vkUpdateDescriptorSet* or vkCmdPushDescriptorSet* time " |
| 814 | "doesn't match actual image layout at time descriptor is used. See previous error callback for " |
| 815 | "specific details."; |
| 816 | return false; |
| 817 | } |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 818 | } |
John Zulauf | f660ad6 | 2019-03-23 07:16:05 -0600 | [diff] [blame] | 819 | |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 820 | // Verify Sample counts |
Jeff Bolz | 6cede83 | 2019-08-09 23:30:39 -0500 | [diff] [blame] | 821 | if ((reqs & DESCRIPTOR_REQ_SINGLE_SAMPLE) && image_view_state->samples != VK_SAMPLE_COUNT_1_BIT) { |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 822 | std::stringstream error_str; |
Locke | b994adf | 2019-03-29 23:52:31 -0600 | [diff] [blame] | 823 | error_str << "Descriptor in binding #" << binding << " index " << index |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 824 | << " requires bound image to have VK_SAMPLE_COUNT_1_BIT but got " |
Jeff Bolz | 6cede83 | 2019-08-09 23:30:39 -0500 | [diff] [blame] | 825 | << string_VkSampleCountFlagBits(image_view_state->samples) << "."; |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 826 | *error = error_str.str(); |
| 827 | return false; |
| 828 | } |
Jeff Bolz | 6cede83 | 2019-08-09 23:30:39 -0500 | [diff] [blame] | 829 | if ((reqs & DESCRIPTOR_REQ_MULTI_SAMPLE) && image_view_state->samples == VK_SAMPLE_COUNT_1_BIT) { |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 830 | std::stringstream error_str; |
Locke | b994adf | 2019-03-29 23:52:31 -0600 | [diff] [blame] | 831 | error_str << "Descriptor in binding #" << binding << " index " << index |
Chris Forbes | 1f7f3ca | 2017-05-08 13:54:50 -0700 | [diff] [blame] | 832 | << " requires bound image to have multiple samples, but got VK_SAMPLE_COUNT_1_BIT."; |
| 833 | *error = error_str.str(); |
| 834 | return false; |
Chris Forbes | 5798913 | 2016-07-26 17:06:10 +1200 | [diff] [blame] | 835 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 836 | } else if (descriptor_class == DescriptorClass::TexelBuffer) { |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 837 | auto texel_buffer = static_cast<const TexelDescriptor *>(descriptor); |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 838 | auto buffer_view = GetBufferViewState(texel_buffer->GetBufferView()); |
Chris Forbes | e92dd1d | 2019-01-21 15:58:57 -0800 | [diff] [blame] | 839 | |
Locke | 2d9c3dd | 2019-04-08 16:29:09 -0600 | [diff] [blame] | 840 | if (nullptr == buffer_view) { |
| 841 | std::stringstream error_str; |
| 842 | error_str << "Descriptor in binding #" << binding << " index " << index << " is using bufferView " |
| 843 | << buffer_view << " that has been destroyed."; |
| 844 | *error = error_str.str(); |
| 845 | return false; |
| 846 | } |
| 847 | auto buffer = buffer_view->create_info.buffer; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 848 | auto buffer_state = GetBufferState(buffer); |
Locke | 2d9c3dd | 2019-04-08 16:29:09 -0600 | [diff] [blame] | 849 | if (!buffer_state) { |
| 850 | std::stringstream error_str; |
| 851 | error_str << "Descriptor in binding #" << binding << " index " << index << " is using buffer " |
| 852 | << buffer_state << " that has been destroyed."; |
| 853 | *error = error_str.str(); |
| 854 | return false; |
| 855 | } |
Chris Forbes | e92dd1d | 2019-01-21 15:58:57 -0800 | [diff] [blame] | 856 | auto reqs = binding_pair.second; |
| 857 | auto format_bits = DescriptorRequirementsBitsFromFormat(buffer_view->create_info.format); |
| 858 | |
| 859 | if (!(reqs & format_bits)) { |
| 860 | // bad component type |
| 861 | std::stringstream error_str; |
Locke | b994adf | 2019-03-29 23:52:31 -0600 | [diff] [blame] | 862 | error_str << "Descriptor in binding #" << binding << " index " << index << " requires " |
Chris Forbes | e92dd1d | 2019-01-21 15:58:57 -0800 | [diff] [blame] | 863 | << StringDescriptorReqComponentType(reqs) << " component type, but bound descriptor format is " |
| 864 | << string_VkFormat(buffer_view->create_info.format) << "."; |
| 865 | *error = error_str.str(); |
| 866 | return false; |
| 867 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 868 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 869 | if (descriptor_class == DescriptorClass::ImageSampler || descriptor_class == DescriptorClass::PlainSampler) { |
Tobin Ehlis | b1a2e4b | 2018-03-16 07:54:24 -0600 | [diff] [blame] | 870 | // Verify Sampler still valid |
| 871 | VkSampler sampler; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 872 | if (descriptor_class == DescriptorClass::ImageSampler) { |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 873 | sampler = static_cast<const ImageSamplerDescriptor *>(descriptor)->GetSampler(); |
Tobin Ehlis | b1a2e4b | 2018-03-16 07:54:24 -0600 | [diff] [blame] | 874 | } else { |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 875 | sampler = static_cast<const SamplerDescriptor *>(descriptor)->GetSampler(); |
Tobin Ehlis | b1a2e4b | 2018-03-16 07:54:24 -0600 | [diff] [blame] | 876 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 877 | if (!ValidateSampler(sampler)) { |
Tobin Ehlis | b1a2e4b | 2018-03-16 07:54:24 -0600 | [diff] [blame] | 878 | std::stringstream error_str; |
Locke | b994adf | 2019-03-29 23:52:31 -0600 | [diff] [blame] | 879 | error_str << "Descriptor in binding #" << binding << " index " << index << " is using sampler " << sampler |
| 880 | << " that has been destroyed."; |
Tobin Ehlis | b1a2e4b | 2018-03-16 07:54:24 -0600 | [diff] [blame] | 881 | *error = error_str.str(); |
| 882 | return false; |
Locke | a223c10 | 2019-04-05 00:38:24 -0600 | [diff] [blame] | 883 | } else { |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 884 | const SAMPLER_STATE *sampler_state = GetSamplerState(sampler); |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 885 | if (sampler_state->samplerConversion && !descriptor->IsImmutableSampler()) { |
Locke | a223c10 | 2019-04-05 00:38:24 -0600 | [diff] [blame] | 886 | std::stringstream error_str; |
John Zulauf | 382e191 | 2019-06-10 15:27:44 -0600 | [diff] [blame] | 887 | error_str << "sampler (" << sampler << ") in the descriptor set (" << descriptor_set->GetSet() |
Locke | a223c10 | 2019-04-05 00:38:24 -0600 | [diff] [blame] | 888 | << ") contains a YCBCR conversion (" << sampler_state->samplerConversion |
| 889 | << ") , then the sampler MUST also exists as an immutable sampler."; |
| 890 | *error = error_str.str(); |
| 891 | } |
Tobin Ehlis | b1a2e4b | 2018-03-16 07:54:24 -0600 | [diff] [blame] | 892 | } |
| 893 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 894 | } |
| 895 | } |
| 896 | } |
| 897 | return true; |
| 898 | } |
Chris Forbes | 5798913 | 2016-07-26 17:06:10 +1200 | [diff] [blame] | 899 | |
Tobin Ehlis | 9906d9d | 2016-05-17 14:23:46 -0600 | [diff] [blame] | 900 | // Set is being deleted or updates so invalidate all bound cmd buffers |
| 901 | void cvdescriptorset::DescriptorSet::InvalidateBoundCmdBuffers() { |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 902 | state_data_->InvalidateCommandBuffers(cb_bindings, VulkanTypedHandle(set_, kVulkanObjectTypeDescriptorSet)); |
Tobin Ehlis | 9906d9d | 2016-05-17 14:23:46 -0600 | [diff] [blame] | 903 | } |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 904 | |
| 905 | // Loop through the write updates to do for a push descriptor set, ignoring dstSet |
| 906 | void cvdescriptorset::DescriptorSet::PerformPushDescriptorsUpdate(uint32_t write_count, const VkWriteDescriptorSet *p_wds) { |
| 907 | assert(IsPushDescriptor()); |
| 908 | for (uint32_t i = 0; i < write_count; i++) { |
| 909 | PerformWriteUpdate(&p_wds[i]); |
| 910 | } |
Jason Macnak | 83cfd58 | 2019-07-31 10:14:24 -0700 | [diff] [blame^] | 911 | |
| 912 | push_descriptor_set_writes.clear(); |
| 913 | push_descriptor_set_writes.reserve(static_cast<std::size_t>(write_count)); |
| 914 | for (uint32_t i = 0; i < write_count; i++) { |
| 915 | push_descriptor_set_writes.push_back(safe_VkWriteDescriptorSet(&p_wds[i])); |
| 916 | } |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 917 | } |
| 918 | |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 919 | // Perform write update in given update struct |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 920 | void cvdescriptorset::DescriptorSet::PerformWriteUpdate(const VkWriteDescriptorSet *update) { |
Tobin Ehlis | f922ef8 | 2016-11-30 10:19:14 -0700 | [diff] [blame] | 921 | // Perform update on a per-binding basis as consecutive updates roll over to next binding |
| 922 | auto descriptors_remaining = update->descriptorCount; |
| 923 | auto binding_being_updated = update->dstBinding; |
| 924 | auto offset = update->dstArrayElement; |
Tobin Ehlis | e16805c | 2017-08-09 09:10:37 -0600 | [diff] [blame] | 925 | uint32_t update_index = 0; |
Tobin Ehlis | f922ef8 | 2016-11-30 10:19:14 -0700 | [diff] [blame] | 926 | while (descriptors_remaining) { |
| 927 | uint32_t update_count = std::min(descriptors_remaining, GetDescriptorCountFromBinding(binding_being_updated)); |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 928 | auto global_idx = p_layout_->GetGlobalIndexRangeFromBinding(binding_being_updated).start + offset; |
Tobin Ehlis | f922ef8 | 2016-11-30 10:19:14 -0700 | [diff] [blame] | 929 | // Loop over the updates for a single binding at a time |
Tobin Ehlis | e16805c | 2017-08-09 09:10:37 -0600 | [diff] [blame] | 930 | for (uint32_t di = 0; di < update_count; ++di, ++update_index) { |
| 931 | descriptors_[global_idx + di]->WriteUpdate(update, update_index); |
Tobin Ehlis | f922ef8 | 2016-11-30 10:19:14 -0700 | [diff] [blame] | 932 | } |
| 933 | // Roll over to next binding in case of consecutive update |
| 934 | descriptors_remaining -= update_count; |
| 935 | offset = 0; |
| 936 | binding_being_updated++; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 937 | } |
Jeff Bolz | dd4cfa1 | 2019-08-11 20:57:51 -0500 | [diff] [blame] | 938 | if (update->descriptorCount) { |
| 939 | some_update_ = true; |
| 940 | change_count_++; |
| 941 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 942 | |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 943 | if (!(p_layout_->GetDescriptorBindingFlagsFromBinding(update->dstBinding) & |
| 944 | (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT))) { |
| 945 | InvalidateBoundCmdBuffers(); |
| 946 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 947 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 948 | // Validate Copy update |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 949 | bool CoreChecks::ValidateCopyUpdate(const VkCopyDescriptorSet *update, const DescriptorSet *dst_set, const DescriptorSet *src_set, |
| 950 | const char *func_name, std::string *error_code, std::string *error_msg) { |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 951 | auto dst_layout = dst_set->GetLayout(); |
| 952 | auto src_layout = src_set->GetLayout(); |
| 953 | |
John Zulauf | 5dfd45c | 2018-01-17 11:06:34 -0700 | [diff] [blame] | 954 | // Verify dst layout still valid |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 955 | if (dst_layout->IsDestroyed()) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 956 | *error_code = "VUID-VkCopyDescriptorSet-dstSet-parameter"; |
John Zulauf | 5dfd45c | 2018-01-17 11:06:34 -0700 | [diff] [blame] | 957 | string_sprintf(error_msg, |
locke-lunarg | 9edc281 | 2019-06-17 23:18:52 -0600 | [diff] [blame] | 958 | "Cannot call %s to perform copy update on dstSet %s" |
| 959 | " created with destroyed %s.", |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 960 | func_name, report_data->FormatHandle(dst_set->GetSet()).c_str(), |
| 961 | report_data->FormatHandle(dst_layout->GetDescriptorSetLayout()).c_str()); |
John Zulauf | 5dfd45c | 2018-01-17 11:06:34 -0700 | [diff] [blame] | 962 | return false; |
| 963 | } |
| 964 | |
| 965 | // Verify src layout still valid |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 966 | if (src_layout->IsDestroyed()) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 967 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-parameter"; |
John Zulauf | b45fdc3 | 2018-10-12 15:14:17 -0600 | [diff] [blame] | 968 | string_sprintf(error_msg, |
Locke | ca0d979 | 2019-03-03 23:48:13 -0700 | [diff] [blame] | 969 | "Cannot call %s to perform copy update of dstSet %s" |
locke-lunarg | 9edc281 | 2019-06-17 23:18:52 -0600 | [diff] [blame] | 970 | " from srcSet %s" |
| 971 | " created with destroyed %s.", |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 972 | func_name, report_data->FormatHandle(dst_set->GetSet()).c_str(), |
| 973 | report_data->FormatHandle(src_set->GetSet()).c_str(), |
| 974 | report_data->FormatHandle(src_layout->GetDescriptorSetLayout()).c_str()); |
John Zulauf | 5dfd45c | 2018-01-17 11:06:34 -0700 | [diff] [blame] | 975 | return false; |
| 976 | } |
| 977 | |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 978 | if (!dst_layout->HasBinding(update->dstBinding)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 979 | *error_code = "VUID-VkCopyDescriptorSet-dstBinding-00347"; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 980 | std::stringstream error_str; |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 981 | error_str << "DescriptorSet " << dst_set->GetSet() << " does not have copy update dest binding of " << update->dstBinding; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 982 | *error_msg = error_str.str(); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 983 | return false; |
| 984 | } |
| 985 | if (!src_set->HasBinding(update->srcBinding)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 986 | *error_code = "VUID-VkCopyDescriptorSet-srcBinding-00345"; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 987 | std::stringstream error_str; |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 988 | error_str << "DescriptorSet " << dst_set->GetSet() << " does not have copy update src binding of " << update->srcBinding; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 989 | *error_msg = error_str.str(); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 990 | return false; |
| 991 | } |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 992 | // Verify idle ds |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 993 | if (dst_set->in_use.load() && |
| 994 | !(dst_layout->GetDescriptorBindingFlagsFromBinding(update->dstBinding) & |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 995 | (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT))) { |
| 996 | // TODO : Re-using Free Idle error code, need copy update idle error code |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 997 | *error_code = "VUID-vkFreeDescriptorSets-pDescriptorSets-00309"; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 998 | std::stringstream error_str; |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 999 | error_str << "Cannot call " << func_name << " to perform copy update on descriptor set " << dst_set->GetSet() |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1000 | << " that is in use by a command buffer"; |
| 1001 | *error_msg = error_str.str(); |
| 1002 | return false; |
| 1003 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1004 | // src & dst set bindings are valid |
| 1005 | // Check bounds of src & dst |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 1006 | auto src_start_idx = src_set->GetGlobalIndexRangeFromBinding(update->srcBinding).start + update->srcArrayElement; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1007 | if ((src_start_idx + update->descriptorCount) > src_set->GetTotalDescriptorCount()) { |
| 1008 | // SRC update out of bounds |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1009 | *error_code = "VUID-VkCopyDescriptorSet-srcArrayElement-00346"; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1010 | std::stringstream error_str; |
| 1011 | error_str << "Attempting copy update from descriptorSet " << update->srcSet << " binding#" << update->srcBinding |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 1012 | << " with offset index of " << src_set->GetGlobalIndexRangeFromBinding(update->srcBinding).start |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1013 | << " plus update array offset of " << update->srcArrayElement << " and update of " << update->descriptorCount |
Tobin Ehlis | 1d81edd | 2016-11-21 09:50:49 -0700 | [diff] [blame] | 1014 | << " descriptors oversteps total number of descriptors in set: " << src_set->GetTotalDescriptorCount(); |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1015 | *error_msg = error_str.str(); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1016 | return false; |
| 1017 | } |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1018 | auto dst_start_idx = dst_layout->GetGlobalIndexRangeFromBinding(update->dstBinding).start + update->dstArrayElement; |
| 1019 | if ((dst_start_idx + update->descriptorCount) > dst_layout->GetTotalDescriptorCount()) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1020 | // DST update out of bounds |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1021 | *error_code = "VUID-VkCopyDescriptorSet-dstArrayElement-00348"; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1022 | std::stringstream error_str; |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1023 | error_str << "Attempting copy update to descriptorSet " << dst_set->GetSet() << " binding#" << update->dstBinding |
| 1024 | << " with offset index of " << dst_layout->GetGlobalIndexRangeFromBinding(update->dstBinding).start |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1025 | << " plus update array offset of " << update->dstArrayElement << " and update of " << update->descriptorCount |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1026 | << " descriptors oversteps total number of descriptors in set: " << dst_layout->GetTotalDescriptorCount(); |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1027 | *error_msg = error_str.str(); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1028 | return false; |
| 1029 | } |
| 1030 | // Check that types match |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 1031 | // TODO : Base default error case going from here is "VUID-VkAcquireNextImageInfoKHR-semaphore-parameter"2ba which covers all |
| 1032 | // consistency issues, need more fine-grained error codes |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1033 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-00349"; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1034 | auto src_type = src_set->GetTypeFromBinding(update->srcBinding); |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1035 | auto dst_type = dst_layout->GetTypeFromBinding(update->dstBinding); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1036 | if (src_type != dst_type) { |
| 1037 | std::stringstream error_str; |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1038 | error_str << "Attempting copy update to descriptorSet " << dst_set->GetSet() << " binding #" << update->dstBinding |
| 1039 | << " with type " << string_VkDescriptorType(dst_type) << " from descriptorSet " << src_set->GetSet() |
| 1040 | << " binding #" << update->srcBinding << " with type " << string_VkDescriptorType(src_type) |
| 1041 | << ". Types do not match"; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1042 | *error_msg = error_str.str(); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1043 | return false; |
| 1044 | } |
| 1045 | // Verify consistency of src & dst bindings if update crosses binding boundaries |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1046 | if ((!VerifyUpdateConsistency(DescriptorSetLayout::ConstBindingIterator(src_layout.get(), update->srcBinding), |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 1047 | update->srcArrayElement, update->descriptorCount, "copy update from", src_set->GetSet(), |
| 1048 | error_msg)) || |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1049 | (!VerifyUpdateConsistency(DescriptorSetLayout::ConstBindingIterator(dst_layout.get(), update->dstBinding), |
| 1050 | update->dstArrayElement, update->descriptorCount, "copy update to", dst_set->GetSet(), |
| 1051 | error_msg))) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1052 | return false; |
| 1053 | } |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1054 | |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1055 | if ((src_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT) && |
| 1056 | !(dst_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1057 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-01918"; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1058 | std::stringstream error_str; |
| 1059 | error_str << "If pname:srcSet's (" << update->srcSet |
| 1060 | << ") layout was created with the " |
| 1061 | "ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT flag " |
| 1062 | "set, then pname:dstSet's (" |
| 1063 | << update->dstSet |
| 1064 | << ") layout must: also have been created with the " |
| 1065 | "ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT flag set"; |
| 1066 | *error_msg = error_str.str(); |
| 1067 | return false; |
| 1068 | } |
| 1069 | |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1070 | if (!(src_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT) && |
| 1071 | (dst_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1072 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-01919"; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1073 | std::stringstream error_str; |
| 1074 | error_str << "If pname:srcSet's (" << update->srcSet |
| 1075 | << ") layout was created without the " |
| 1076 | "ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT flag " |
| 1077 | "set, then pname:dstSet's (" |
| 1078 | << update->dstSet |
| 1079 | << ") layout must: also have been created without the " |
| 1080 | "ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT flag set"; |
| 1081 | *error_msg = error_str.str(); |
| 1082 | return false; |
| 1083 | } |
| 1084 | |
| 1085 | if ((src_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT) && |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1086 | !(dst_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1087 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-01920"; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1088 | std::stringstream error_str; |
| 1089 | error_str << "If the descriptor pool from which pname:srcSet (" << update->srcSet |
| 1090 | << ") was allocated was created " |
| 1091 | "with the ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT flag " |
| 1092 | "set, then the descriptor pool from which pname:dstSet (" |
| 1093 | << update->dstSet |
| 1094 | << ") was allocated must: " |
| 1095 | "also have been created with the ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT flag set"; |
| 1096 | *error_msg = error_str.str(); |
| 1097 | return false; |
| 1098 | } |
| 1099 | |
| 1100 | if (!(src_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT) && |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1101 | (dst_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1102 | *error_code = "VUID-VkCopyDescriptorSet-srcSet-01921"; |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1103 | std::stringstream error_str; |
| 1104 | error_str << "If the descriptor pool from which pname:srcSet (" << update->srcSet |
| 1105 | << ") was allocated was created " |
| 1106 | "without the ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT flag " |
| 1107 | "set, then the descriptor pool from which pname:dstSet (" |
| 1108 | << update->dstSet |
| 1109 | << ") was allocated must: " |
| 1110 | "also have been created without the ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT flag set"; |
| 1111 | *error_msg = error_str.str(); |
| 1112 | return false; |
| 1113 | } |
| 1114 | |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 1115 | if (src_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) { |
| 1116 | if ((update->srcArrayElement % 4) != 0) { |
| 1117 | *error_code = "VUID-VkCopyDescriptorSet-srcBinding-02223"; |
| 1118 | std::stringstream error_str; |
| 1119 | error_str << "Attempting copy update to VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT binding with " |
| 1120 | << "srcArrayElement " << update->srcArrayElement << " not a multiple of 4"; |
| 1121 | *error_msg = error_str.str(); |
| 1122 | return false; |
| 1123 | } |
| 1124 | if ((update->dstArrayElement % 4) != 0) { |
| 1125 | *error_code = "VUID-VkCopyDescriptorSet-dstBinding-02224"; |
| 1126 | std::stringstream error_str; |
| 1127 | error_str << "Attempting copy update to VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT binding with " |
| 1128 | << "dstArrayElement " << update->dstArrayElement << " not a multiple of 4"; |
| 1129 | *error_msg = error_str.str(); |
| 1130 | return false; |
| 1131 | } |
| 1132 | if ((update->descriptorCount % 4) != 0) { |
| 1133 | *error_code = "VUID-VkCopyDescriptorSet-srcBinding-02225"; |
| 1134 | std::stringstream error_str; |
| 1135 | error_str << "Attempting copy update to VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT binding with " |
| 1136 | << "descriptorCount " << update->descriptorCount << " not a multiple of 4"; |
| 1137 | *error_msg = error_str.str(); |
| 1138 | return false; |
| 1139 | } |
| 1140 | } |
| 1141 | |
Tobin Ehlis | d41e7b6 | 2016-05-19 07:56:18 -0600 | [diff] [blame] | 1142 | // Update parameters all look good and descriptor updated so verify update contents |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1143 | if (!VerifyCopyUpdateContents(update, src_set, src_type, src_start_idx, func_name, error_code, error_msg)) return false; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1144 | |
| 1145 | // All checks passed so update is good |
| 1146 | return true; |
| 1147 | } |
| 1148 | // Perform Copy update |
| 1149 | void cvdescriptorset::DescriptorSet::PerformCopyUpdate(const VkCopyDescriptorSet *update, const DescriptorSet *src_set) { |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 1150 | auto src_start_idx = src_set->GetGlobalIndexRangeFromBinding(update->srcBinding).start + update->srcArrayElement; |
| 1151 | auto dst_start_idx = p_layout_->GetGlobalIndexRangeFromBinding(update->dstBinding).start + update->dstArrayElement; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1152 | // Update parameters all look good so perform update |
| 1153 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 1154 | auto src = src_set->descriptors_[src_start_idx + di].get(); |
| 1155 | auto dst = descriptors_[dst_start_idx + di].get(); |
| 1156 | if (src->updated) { |
| 1157 | dst->CopyUpdate(src); |
| 1158 | some_update_ = true; |
Jeff Bolz | dd4cfa1 | 2019-08-11 20:57:51 -0500 | [diff] [blame] | 1159 | change_count_++; |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 1160 | } else { |
| 1161 | dst->updated = false; |
| 1162 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1163 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 1164 | |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 1165 | if (!(p_layout_->GetDescriptorBindingFlagsFromBinding(update->dstBinding) & |
| 1166 | (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT))) { |
| 1167 | InvalidateBoundCmdBuffers(); |
| 1168 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1169 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 1170 | |
John Zulauf | 6f3d2bd | 2018-10-29 17:08:42 -0600 | [diff] [blame] | 1171 | // Update the drawing state for the affected descriptors. |
| 1172 | // Set cb_node to this set and this set to cb_node. |
| 1173 | // Add the bindings of the descriptor |
| 1174 | // Set the layout based on the current descriptor layout (will mask subsequent layer mismatch errors) |
| 1175 | // TODO: Modify the UpdateDrawState virtural functions to *only* set initial layout and not change layouts |
Tobin Ehlis | f951910 | 2016-08-17 09:49:13 -0600 | [diff] [blame] | 1176 | // Prereq: This should be called for a set that has been confirmed to be active for the given cb_node, meaning it's going |
| 1177 | // to be used in a draw by the given cb_node |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 1178 | void cvdescriptorset::DescriptorSet::UpdateDrawState(ValidationStateTracker *device_data, CMD_BUFFER_STATE *cb_node, |
John Zulauf | 6f3d2bd | 2018-10-29 17:08:42 -0600 | [diff] [blame] | 1179 | const std::map<uint32_t, descriptor_req> &binding_req_map) { |
Jeff Bolz | afa429a | 2019-08-14 09:59:22 -0500 | [diff] [blame] | 1180 | if (!device_data->disabled.command_buffer_state) { |
| 1181 | // bind cb to this descriptor set |
| 1182 | // Add bindings for descriptor set, the set's pool, and individual objects in the set |
| 1183 | auto inserted = cb_node->object_bindings.emplace(set_, kVulkanObjectTypeDescriptorSet); |
| 1184 | if (inserted.second) { |
| 1185 | cb_bindings.insert(cb_node); |
| 1186 | auto inserted2 = cb_node->object_bindings.emplace(pool_state_->pool, kVulkanObjectTypeDescriptorPool); |
| 1187 | if (inserted2.second) { |
| 1188 | pool_state_->cb_bindings.insert(cb_node); |
| 1189 | } |
| 1190 | } |
| 1191 | } |
Jeff Bolz | e18e724 | 2019-08-12 20:55:22 -0500 | [diff] [blame] | 1192 | |
| 1193 | // Descriptor UpdateDrawState functions do two things - associate resources to the command buffer, |
| 1194 | // and call image layout validation callbacks. If both are disabled, skip the entire loop. |
| 1195 | if (device_data->disabled.command_buffer_state && device_data->disabled.image_layout_validation) { |
| 1196 | return; |
| 1197 | } |
| 1198 | |
Tobin Ehlis | f951910 | 2016-08-17 09:49:13 -0600 | [diff] [blame] | 1199 | // For the active slots, use set# to look up descriptorSet from boundDescriptorSets, and bind all of that descriptor set's |
| 1200 | // resources |
Tobin Ehlis | 022528b | 2016-12-29 12:22:32 -0700 | [diff] [blame] | 1201 | for (auto binding_req_pair : binding_req_map) { |
| 1202 | auto binding = binding_req_pair.first; |
Tony-LunarG | 62c5dba | 2018-12-20 14:27:23 -0700 | [diff] [blame] | 1203 | // We aren't validating descriptors created with PARTIALLY_BOUND or UPDATE_AFTER_BIND, so don't record state |
| 1204 | if (p_layout_->GetDescriptorBindingFlagsFromBinding(binding) & |
| 1205 | (VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT)) { |
| 1206 | continue; |
| 1207 | } |
John Zulauf | c483f44 | 2017-12-15 14:02:06 -0700 | [diff] [blame] | 1208 | auto range = p_layout_->GetGlobalIndexRangeFromBinding(binding); |
| 1209 | for (uint32_t i = range.start; i < range.end; ++i) { |
Mark Lobodzinski | fae179e | 2019-03-08 16:47:08 -0700 | [diff] [blame] | 1210 | descriptors_[i]->UpdateDrawState(device_data, cb_node); |
Mark Lobodzinski | 2872f4a | 2018-09-03 17:00:53 -0600 | [diff] [blame] | 1211 | } |
| 1212 | } |
| 1213 | } |
| 1214 | |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 1215 | void cvdescriptorset::DescriptorSet::FilterOneBindingReq(const BindingReqMap::value_type &binding_req_pair, BindingReqMap *out_req, |
| 1216 | const TrackedBindings &bindings, uint32_t limit) { |
| 1217 | if (bindings.size() < limit) { |
| 1218 | const auto it = bindings.find(binding_req_pair.first); |
| 1219 | if (it == bindings.cend()) out_req->emplace(binding_req_pair); |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 1220 | } |
| 1221 | } |
Mark Lobodzinski | 2872f4a | 2018-09-03 17:00:53 -0600 | [diff] [blame] | 1222 | |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 1223 | void cvdescriptorset::DescriptorSet::FilterBindingReqs(const CMD_BUFFER_STATE &cb_state, const PIPELINE_STATE &pipeline, |
| 1224 | const BindingReqMap &in_req, BindingReqMap *out_req) const { |
| 1225 | // For const cleanliness we have to find in the maps... |
| 1226 | const auto validated_it = cached_validation_.find(&cb_state); |
| 1227 | if (validated_it == cached_validation_.cend()) { |
| 1228 | // We have nothing validated, copy in to out |
| 1229 | for (const auto &binding_req_pair : in_req) { |
| 1230 | out_req->emplace(binding_req_pair); |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 1231 | } |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 1232 | return; |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 1233 | } |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 1234 | const auto &validated = validated_it->second; |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 1235 | |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 1236 | const auto image_sample_version_it = validated.image_samplers.find(&pipeline); |
| 1237 | const VersionedBindings *image_sample_version = nullptr; |
| 1238 | if (image_sample_version_it != validated.image_samplers.cend()) { |
| 1239 | image_sample_version = &(image_sample_version_it->second); |
| 1240 | } |
| 1241 | const auto &dynamic_buffers = validated.dynamic_buffers; |
| 1242 | const auto &non_dynamic_buffers = validated.non_dynamic_buffers; |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 1243 | const auto &stats = p_layout_->GetBindingTypeStats(); |
| 1244 | for (const auto &binding_req_pair : in_req) { |
| 1245 | auto binding = binding_req_pair.first; |
| 1246 | VkDescriptorSetLayoutBinding const *layout_binding = p_layout_->GetDescriptorSetLayoutBindingPtrFromBinding(binding); |
| 1247 | if (!layout_binding) { |
| 1248 | continue; |
| 1249 | } |
| 1250 | // Caching criteria differs per type. |
| 1251 | // If image_layout have changed , the image descriptors need to be validated against them. |
| 1252 | if ((layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) || |
| 1253 | (layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) { |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 1254 | FilterOneBindingReq(binding_req_pair, out_req, dynamic_buffers, stats.dynamic_buffer_count); |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 1255 | } else if ((layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) || |
| 1256 | (layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)) { |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 1257 | FilterOneBindingReq(binding_req_pair, out_req, non_dynamic_buffers, stats.non_dynamic_buffer_count); |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 1258 | } else { |
| 1259 | // This is rather crude, as the changed layouts may not impact the bound descriptors, |
| 1260 | // but the simple "versioning" is a simple "dirt" test. |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 1261 | bool stale = true; |
| 1262 | if (image_sample_version) { |
| 1263 | const auto version_it = image_sample_version->find(binding); |
| 1264 | if (version_it != image_sample_version->cend() && (version_it->second == cb_state.image_layout_change_count)) { |
| 1265 | stale = false; |
| 1266 | } |
| 1267 | } |
| 1268 | if (stale) { |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 1269 | out_req->emplace(binding_req_pair); |
| 1270 | } |
| 1271 | } |
| 1272 | } |
| 1273 | } |
Tobin Ehlis | 9252c2b | 2016-07-21 14:40:22 -0600 | [diff] [blame] | 1274 | |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 1275 | void cvdescriptorset::DescriptorSet::UpdateValidationCache(const CMD_BUFFER_STATE &cb_state, const PIPELINE_STATE &pipeline, |
| 1276 | const BindingReqMap &updated_bindings) { |
| 1277 | // For const cleanliness we have to find in the maps... |
| 1278 | auto &validated = cached_validation_[&cb_state]; |
| 1279 | |
| 1280 | auto &image_sample_version = validated.image_samplers[&pipeline]; |
| 1281 | auto &dynamic_buffers = validated.dynamic_buffers; |
| 1282 | auto &non_dynamic_buffers = validated.non_dynamic_buffers; |
| 1283 | for (const auto &binding_req_pair : updated_bindings) { |
| 1284 | auto binding = binding_req_pair.first; |
| 1285 | VkDescriptorSetLayoutBinding const *layout_binding = p_layout_->GetDescriptorSetLayoutBindingPtrFromBinding(binding); |
| 1286 | if (!layout_binding) { |
| 1287 | continue; |
| 1288 | } |
| 1289 | // Caching criteria differs per type. |
| 1290 | if ((layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) || |
| 1291 | (layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) { |
| 1292 | dynamic_buffers.emplace(binding); |
| 1293 | } else if ((layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) || |
| 1294 | (layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)) { |
| 1295 | non_dynamic_buffers.emplace(binding); |
| 1296 | } else { |
| 1297 | // Save the layout change version... |
| 1298 | image_sample_version[binding] = cb_state.image_layout_change_count; |
| 1299 | } |
| 1300 | } |
| 1301 | } |
| 1302 | |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1303 | cvdescriptorset::SamplerDescriptor::SamplerDescriptor(const VkSampler *immut) : sampler_(VK_NULL_HANDLE), immutable_(false) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1304 | updated = false; |
| 1305 | descriptor_class = PlainSampler; |
| 1306 | if (immut) { |
| 1307 | sampler_ = *immut; |
| 1308 | immutable_ = true; |
| 1309 | updated = true; |
| 1310 | } |
| 1311 | } |
Tobin Ehlis | e2f8029 | 2016-06-02 10:08:53 -0600 | [diff] [blame] | 1312 | // Validate given sampler. Currently this only checks to make sure it exists in the samplerMap |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1313 | bool CoreChecks::ValidateSampler(const VkSampler sampler) const { return (GetSamplerState(sampler) != nullptr); } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 1314 | |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1315 | bool CoreChecks::ValidateImageUpdate(VkImageView image_view, VkImageLayout image_layout, VkDescriptorType type, |
John Zulauf | bd9b341 | 2019-08-22 17:16:11 -0600 | [diff] [blame] | 1316 | const char *func_name, std::string *error_code, std::string *error_msg) const { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1317 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00326"; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1318 | auto iv_state = GetImageViewState(image_view); |
Mark Lobodzinski | 3fc3d75 | 2019-06-24 14:22:46 -0600 | [diff] [blame] | 1319 | assert(iv_state); |
| 1320 | |
Tobin Ehlis | 8128096 | 2016-07-20 14:04:20 -0600 | [diff] [blame] | 1321 | // Note that when an imageview is created, we validated that memory is bound so no need to re-check here |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 1322 | // Validate that imageLayout is compatible with aspect_mask and image format |
| 1323 | // and validate that image usage bits are correct for given usage |
Tobin Ehlis | 8b26a38 | 2016-09-14 08:02:49 -0600 | [diff] [blame] | 1324 | VkImageAspectFlags aspect_mask = iv_state->create_info.subresourceRange.aspectMask; |
| 1325 | VkImage image = iv_state->create_info.image; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 1326 | VkFormat format = VK_FORMAT_MAX_ENUM; |
| 1327 | VkImageUsageFlags usage = 0; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1328 | auto image_node = GetImageState(image); |
Mark Lobodzinski | 3fc3d75 | 2019-06-24 14:22:46 -0600 | [diff] [blame] | 1329 | assert(image_node); |
Chris Forbes | 67757ff | 2017-07-21 13:59:01 -0700 | [diff] [blame] | 1330 | |
Mark Lobodzinski | 3fc3d75 | 2019-06-24 14:22:46 -0600 | [diff] [blame] | 1331 | format = image_node->createInfo.format; |
| 1332 | usage = image_node->createInfo.usage; |
| 1333 | // Validate that memory is bound to image |
| 1334 | // TODO: This should have its own valid usage id apart from 2524 which is from CreateImageView case. The only |
| 1335 | // the error here occurs is if memory bound to a created imageView has been freed. |
| 1336 | if (ValidateMemoryIsBoundToImage(image_node, func_name, "VUID-VkImageViewCreateInfo-image-01020")) { |
| 1337 | *error_code = "VUID-VkImageViewCreateInfo-image-01020"; |
| 1338 | *error_msg = "No memory bound to image."; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 1339 | return false; |
| 1340 | } |
Mark Lobodzinski | 3fc3d75 | 2019-06-24 14:22:46 -0600 | [diff] [blame] | 1341 | |
| 1342 | // KHR_maintenance1 allows rendering into 2D or 2DArray views which slice a 3D image, |
| 1343 | // but not binding them to descriptor sets. |
| 1344 | if (image_node->createInfo.imageType == VK_IMAGE_TYPE_3D && (iv_state->create_info.viewType == VK_IMAGE_VIEW_TYPE_2D || |
| 1345 | iv_state->create_info.viewType == VK_IMAGE_VIEW_TYPE_2D_ARRAY)) { |
| 1346 | *error_code = "VUID-VkDescriptorImageInfo-imageView-00343"; |
| 1347 | *error_msg = "ImageView must not be a 2D or 2DArray view of a 3D image"; |
| 1348 | return false; |
| 1349 | } |
| 1350 | |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1351 | // TODO : The various image aspect and format checks here are based on general spec language in 11.5 Image Views section under |
| 1352 | // vkCreateImageView(). What's the best way to create unique id for these cases? |
Mark Lobodzinski | 3fc3d75 | 2019-06-24 14:22:46 -0600 | [diff] [blame] | 1353 | *error_code = "UNASSIGNED-CoreValidation-DrawState-InvalidImageView"; |
Dave Houlton | 1d2022c | 2017-03-29 11:43:58 -0600 | [diff] [blame] | 1354 | bool ds = FormatIsDepthOrStencil(format); |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 1355 | switch (image_layout) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1356 | case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: |
| 1357 | // Only Color bit must be set |
| 1358 | if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != VK_IMAGE_ASPECT_COLOR_BIT) { |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 1359 | std::stringstream error_str; |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 1360 | error_str |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1361 | << "ImageView (" << report_data->FormatHandle(image_view).c_str() |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 1362 | << ") uses layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but does not have VK_IMAGE_ASPECT_COLOR_BIT set."; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1363 | *error_msg = error_str.str(); |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 1364 | return false; |
| 1365 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1366 | // format must NOT be DS |
| 1367 | if (ds) { |
| 1368 | std::stringstream error_str; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1369 | error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str() |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1370 | << ") uses layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but the image format is " |
| 1371 | << string_VkFormat(format) << " which is not a color format."; |
| 1372 | *error_msg = error_str.str(); |
| 1373 | return false; |
| 1374 | } |
| 1375 | break; |
| 1376 | case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: |
| 1377 | case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: |
| 1378 | // Depth or stencil bit must be set, but both must NOT be set |
Tobin Ehlis | bbf3f91 | 2016-06-15 13:03:58 -0600 | [diff] [blame] | 1379 | if (aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) { |
| 1380 | if (aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) { |
| 1381 | // both must NOT be set |
| 1382 | std::stringstream error_str; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1383 | error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str() |
Mark Lobodzinski | 6b04292 | 2019-06-21 14:46:42 -0600 | [diff] [blame] | 1384 | << ") has both STENCIL and DEPTH aspects set"; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1385 | *error_msg = error_str.str(); |
Tobin Ehlis | bbf3f91 | 2016-06-15 13:03:58 -0600 | [diff] [blame] | 1386 | return false; |
| 1387 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1388 | } else if (!(aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT)) { |
| 1389 | // Neither were set |
| 1390 | std::stringstream error_str; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1391 | error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str() << ") has layout " |
Mark Lobodzinski | 6b04292 | 2019-06-21 14:46:42 -0600 | [diff] [blame] | 1392 | << string_VkImageLayout(image_layout) << " but does not have STENCIL or DEPTH aspects set"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1393 | *error_msg = error_str.str(); |
| 1394 | return false; |
Tobin Ehlis | bbf3f91 | 2016-06-15 13:03:58 -0600 | [diff] [blame] | 1395 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1396 | // format must be DS |
| 1397 | if (!ds) { |
| 1398 | std::stringstream error_str; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1399 | error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str() << ") has layout " |
Mark Lobodzinski | 6b04292 | 2019-06-21 14:46:42 -0600 | [diff] [blame] | 1400 | << string_VkImageLayout(image_layout) << " but the image format is " << string_VkFormat(format) |
| 1401 | << " which is not a depth/stencil format."; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1402 | *error_msg = error_str.str(); |
| 1403 | return false; |
| 1404 | } |
| 1405 | break; |
| 1406 | default: |
| 1407 | // For other layouts if the source is depth/stencil image, both aspect bits must not be set |
| 1408 | if (ds) { |
| 1409 | if (aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) { |
| 1410 | if (aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) { |
| 1411 | // both must NOT be set |
| 1412 | std::stringstream error_str; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1413 | error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str() << ") has layout " |
Mark Lobodzinski | 6b04292 | 2019-06-21 14:46:42 -0600 | [diff] [blame] | 1414 | << string_VkImageLayout(image_layout) << " and is using depth/stencil image of format " |
| 1415 | << string_VkFormat(format) |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1416 | << " but it has both STENCIL and DEPTH aspects set, which is illegal. When using a depth/stencil " |
| 1417 | "image in a descriptor set, please only set either VK_IMAGE_ASPECT_DEPTH_BIT or " |
| 1418 | "VK_IMAGE_ASPECT_STENCIL_BIT depending on whether it will be used for depth reads or stencil " |
| 1419 | "reads respectively."; |
Mark Lobodzinski | 4d05d7a | 2019-06-25 09:12:06 -0600 | [diff] [blame] | 1420 | *error_code = "VUID-VkDescriptorImageInfo-imageView-01976"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1421 | *error_msg = error_str.str(); |
| 1422 | return false; |
| 1423 | } |
| 1424 | } |
| 1425 | } |
| 1426 | break; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 1427 | } |
| 1428 | // Now validate that usage flags are correctly set for given type of update |
Tobin Ehlis | fb4cf71 | 2016-10-10 14:02:48 -0600 | [diff] [blame] | 1429 | // As we're switching per-type, if any type has specific layout requirements, check those here as well |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1430 | // TODO : The various image usage bit requirements are in general spec language for VkImageUsageFlags bit block in 11.3 Images |
| 1431 | // under vkCreateImage() |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 1432 | // TODO : Need to also validate case "VUID-VkWriteDescriptorSet-descriptorType-00336" where STORAGE_IMAGE & INPUT_ATTACH types |
| 1433 | // must have been created with identify swizzle |
Jeff Bolz | 6d3beaa | 2019-02-09 21:00:05 -0600 | [diff] [blame] | 1434 | const char *error_usage_bit = nullptr; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 1435 | switch (type) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1436 | case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: |
| 1437 | case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: { |
| 1438 | if (!(usage & VK_IMAGE_USAGE_SAMPLED_BIT)) { |
| 1439 | error_usage_bit = "VK_IMAGE_USAGE_SAMPLED_BIT"; |
| 1440 | } |
| 1441 | break; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 1442 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1443 | case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: { |
| 1444 | if (!(usage & VK_IMAGE_USAGE_STORAGE_BIT)) { |
| 1445 | error_usage_bit = "VK_IMAGE_USAGE_STORAGE_BIT"; |
| 1446 | } else if (VK_IMAGE_LAYOUT_GENERAL != image_layout) { |
| 1447 | std::stringstream error_str; |
Tobin Ehlis | bb03e5f | 2017-05-11 08:52:51 -0600 | [diff] [blame] | 1448 | // TODO : Need to create custom enum error codes for these cases |
| 1449 | if (image_node->shared_presentable) { |
| 1450 | if (VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR != image_layout) { |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1451 | error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str() |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 1452 | << ") of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE type with a front-buffered image is being updated with " |
| 1453 | "layout " |
| 1454 | << string_VkImageLayout(image_layout) |
| 1455 | << " but according to spec section 13.1 Descriptor Types, 'Front-buffered images that report " |
| 1456 | "support for VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT must be in the " |
| 1457 | "VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR layout.'"; |
Tobin Ehlis | bb03e5f | 2017-05-11 08:52:51 -0600 | [diff] [blame] | 1458 | *error_msg = error_str.str(); |
| 1459 | return false; |
| 1460 | } |
| 1461 | } else if (VK_IMAGE_LAYOUT_GENERAL != image_layout) { |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1462 | error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str() |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 1463 | << ") of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE type is being updated with layout " |
| 1464 | << string_VkImageLayout(image_layout) |
| 1465 | << " but according to spec section 13.1 Descriptor Types, 'Load and store operations on storage " |
| 1466 | "images can only be done on images in VK_IMAGE_LAYOUT_GENERAL layout.'"; |
Tobin Ehlis | bb03e5f | 2017-05-11 08:52:51 -0600 | [diff] [blame] | 1467 | *error_msg = error_str.str(); |
| 1468 | return false; |
| 1469 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1470 | } |
| 1471 | break; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 1472 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1473 | case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: { |
| 1474 | if (!(usage & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)) { |
| 1475 | error_usage_bit = "VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT"; |
| 1476 | } |
| 1477 | break; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 1478 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1479 | default: |
| 1480 | break; |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 1481 | } |
Jeff Bolz | 6d3beaa | 2019-02-09 21:00:05 -0600 | [diff] [blame] | 1482 | if (error_usage_bit) { |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 1483 | std::stringstream error_str; |
Mark Lobodzinski | 54a67c4 | 2019-06-24 14:35:21 -0600 | [diff] [blame] | 1484 | error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str() << ") with usage mask " << std::hex |
| 1485 | << std::showbase << usage << " being used for a descriptor update of type " << string_VkDescriptorType(type) |
| 1486 | << " does not have " << error_usage_bit << " set."; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1487 | *error_msg = error_str.str(); |
Tobin Ehlis | 1809f91 | 2016-05-25 09:24:36 -0600 | [diff] [blame] | 1488 | return false; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1489 | } |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 1490 | |
| 1491 | if ((type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) || (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)) { |
| 1492 | // Test that the layout is compatible with the descriptorType for the two sampled image types |
| 1493 | const static std::array<VkImageLayout, 3> valid_layouts = { |
Jeremy Hayes | d0549f6 | 2019-06-05 10:15:36 -0600 | [diff] [blame] | 1494 | {VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL}}; |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 1495 | |
| 1496 | struct ExtensionLayout { |
| 1497 | VkImageLayout layout; |
| 1498 | bool DeviceExtensions::*extension; |
| 1499 | }; |
| 1500 | |
| 1501 | const static std::array<ExtensionLayout, 3> extended_layouts{ |
| 1502 | {// Note double brace req'd for aggregate initialization |
| 1503 | {VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, &DeviceExtensions::vk_khr_shared_presentable_image}, |
| 1504 | {VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, &DeviceExtensions::vk_khr_maintenance2}, |
| 1505 | {VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, &DeviceExtensions::vk_khr_maintenance2}}}; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1506 | auto is_layout = [image_layout, this](const ExtensionLayout &ext_layout) { |
| 1507 | return device_extensions.*(ext_layout.extension) && (ext_layout.layout == image_layout); |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 1508 | }; |
| 1509 | |
| 1510 | bool valid_layout = (std::find(valid_layouts.cbegin(), valid_layouts.cend(), image_layout) != valid_layouts.cend()) || |
| 1511 | std::any_of(extended_layouts.cbegin(), extended_layouts.cend(), is_layout); |
| 1512 | |
| 1513 | if (!valid_layout) { |
| 1514 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-01403"; |
| 1515 | std::stringstream error_str; |
| 1516 | error_str << "Descriptor update with descriptorType " << string_VkDescriptorType(type) |
Mark Lobodzinski | 74eddba | 2019-06-21 14:16:33 -0600 | [diff] [blame] | 1517 | << " is being updated with invalid imageLayout " << string_VkImageLayout(image_layout) << " for image " |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1518 | << report_data->FormatHandle(image).c_str() << " in imageView " |
| 1519 | << report_data->FormatHandle(image_view).c_str() |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 1520 | << ". Allowed layouts are: VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, " |
| 1521 | << "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL"; |
| 1522 | for (auto &ext_layout : extended_layouts) { |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1523 | if (device_extensions.*(ext_layout.extension)) { |
John Zulauf | f4c0788 | 2019-01-24 14:03:36 -0700 | [diff] [blame] | 1524 | error_str << ", " << string_VkImageLayout(ext_layout.layout); |
| 1525 | } |
| 1526 | } |
| 1527 | *error_msg = error_str.str(); |
| 1528 | return false; |
| 1529 | } |
| 1530 | } |
| 1531 | |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1532 | return true; |
| 1533 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 1534 | |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1535 | void cvdescriptorset::SamplerDescriptor::WriteUpdate(const VkWriteDescriptorSet *update, const uint32_t index) { |
Chris Forbes | fea2c54 | 2018-04-13 09:34:15 -0700 | [diff] [blame] | 1536 | if (!immutable_) { |
| 1537 | sampler_ = update->pImageInfo[index].sampler; |
| 1538 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1539 | updated = true; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1540 | } |
| 1541 | |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1542 | void cvdescriptorset::SamplerDescriptor::CopyUpdate(const Descriptor *src) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1543 | if (!immutable_) { |
| 1544 | auto update_sampler = static_cast<const SamplerDescriptor *>(src)->sampler_; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1545 | sampler_ = update_sampler; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1546 | } |
| 1547 | updated = true; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1548 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 1549 | |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 1550 | void cvdescriptorset::SamplerDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) { |
Tobin Ehlis | 8020eea | 2016-08-17 11:10:41 -0600 | [diff] [blame] | 1551 | if (!immutable_) { |
Mark Lobodzinski | f8d1ef9 | 2019-03-06 11:53:27 -0700 | [diff] [blame] | 1552 | auto sampler_state = dev_data->GetSamplerState(sampler_); |
Mark Lobodzinski | b56bbb9 | 2019-02-18 11:49:59 -0700 | [diff] [blame] | 1553 | if (sampler_state) dev_data->AddCommandBufferBindingSampler(cb_node, sampler_state); |
Tobin Ehlis | 8020eea | 2016-08-17 11:10:41 -0600 | [diff] [blame] | 1554 | } |
| 1555 | } |
| 1556 | |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1557 | cvdescriptorset::ImageSamplerDescriptor::ImageSamplerDescriptor(const VkSampler *immut) |
Chris Forbes | 9f34085 | 2017-05-09 08:51:38 -0700 | [diff] [blame] | 1558 | : sampler_(VK_NULL_HANDLE), immutable_(false), image_view_(VK_NULL_HANDLE), image_layout_(VK_IMAGE_LAYOUT_UNDEFINED) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1559 | updated = false; |
| 1560 | descriptor_class = ImageSampler; |
| 1561 | if (immut) { |
| 1562 | sampler_ = *immut; |
| 1563 | immutable_ = true; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1564 | } |
| 1565 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 1566 | |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1567 | void cvdescriptorset::ImageSamplerDescriptor::WriteUpdate(const VkWriteDescriptorSet *update, const uint32_t index) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1568 | updated = true; |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 1569 | const auto &image_info = update->pImageInfo[index]; |
Chris Forbes | fea2c54 | 2018-04-13 09:34:15 -0700 | [diff] [blame] | 1570 | if (!immutable_) { |
| 1571 | sampler_ = image_info.sampler; |
| 1572 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1573 | image_view_ = image_info.imageView; |
| 1574 | image_layout_ = image_info.imageLayout; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1575 | } |
| 1576 | |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1577 | void cvdescriptorset::ImageSamplerDescriptor::CopyUpdate(const Descriptor *src) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1578 | if (!immutable_) { |
| 1579 | auto update_sampler = static_cast<const ImageSamplerDescriptor *>(src)->sampler_; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1580 | sampler_ = update_sampler; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1581 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1582 | auto image_view = static_cast<const ImageSamplerDescriptor *>(src)->image_view_; |
| 1583 | auto image_layout = static_cast<const ImageSamplerDescriptor *>(src)->image_layout_; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1584 | updated = true; |
| 1585 | image_view_ = image_view; |
| 1586 | image_layout_ = image_layout; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1587 | } |
| 1588 | |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 1589 | void cvdescriptorset::ImageSamplerDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) { |
Tobin Ehlis | 81e4637 | 2016-08-17 13:33:44 -0600 | [diff] [blame] | 1590 | // First add binding for any non-immutable sampler |
Tobin Ehlis | 8020eea | 2016-08-17 11:10:41 -0600 | [diff] [blame] | 1591 | if (!immutable_) { |
Mark Lobodzinski | f8d1ef9 | 2019-03-06 11:53:27 -0700 | [diff] [blame] | 1592 | auto sampler_state = dev_data->GetSamplerState(sampler_); |
Mark Lobodzinski | b56bbb9 | 2019-02-18 11:49:59 -0700 | [diff] [blame] | 1593 | if (sampler_state) dev_data->AddCommandBufferBindingSampler(cb_node, sampler_state); |
Tobin Ehlis | 8020eea | 2016-08-17 11:10:41 -0600 | [diff] [blame] | 1594 | } |
Tobin Ehlis | 81e4637 | 2016-08-17 13:33:44 -0600 | [diff] [blame] | 1595 | // Add binding for image |
Mark Lobodzinski | a3a230b | 2019-03-06 15:35:13 -0700 | [diff] [blame] | 1596 | auto iv_state = dev_data->GetImageViewState(image_view_); |
Tobin Ehlis | 8b26a38 | 2016-09-14 08:02:49 -0600 | [diff] [blame] | 1597 | if (iv_state) { |
Mark Lobodzinski | fae179e | 2019-03-08 16:47:08 -0700 | [diff] [blame] | 1598 | dev_data->AddCommandBufferBindingImageView(cb_node, iv_state); |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 1599 | dev_data->CallSetImageViewInitialLayoutCallback(cb_node, *iv_state, image_layout_); |
Jeff Bolz | 148d94e | 2018-12-13 21:25:56 -0600 | [diff] [blame] | 1600 | } |
Tobin Ehlis | 8020eea | 2016-08-17 11:10:41 -0600 | [diff] [blame] | 1601 | } |
| 1602 | |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1603 | cvdescriptorset::ImageDescriptor::ImageDescriptor(const VkDescriptorType type) |
| 1604 | : storage_(false), image_view_(VK_NULL_HANDLE), image_layout_(VK_IMAGE_LAYOUT_UNDEFINED) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1605 | updated = false; |
| 1606 | descriptor_class = Image; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1607 | if (VK_DESCRIPTOR_TYPE_STORAGE_IMAGE == type) storage_ = true; |
Petr Kraus | 13c98a6 | 2017-12-09 00:22:39 +0100 | [diff] [blame] | 1608 | } |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1609 | |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1610 | void cvdescriptorset::ImageDescriptor::WriteUpdate(const VkWriteDescriptorSet *update, const uint32_t index) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1611 | updated = true; |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 1612 | const auto &image_info = update->pImageInfo[index]; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1613 | image_view_ = image_info.imageView; |
| 1614 | image_layout_ = image_info.imageLayout; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1615 | } |
| 1616 | |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1617 | void cvdescriptorset::ImageDescriptor::CopyUpdate(const Descriptor *src) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1618 | auto image_view = static_cast<const ImageDescriptor *>(src)->image_view_; |
| 1619 | auto image_layout = static_cast<const ImageDescriptor *>(src)->image_layout_; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1620 | updated = true; |
| 1621 | image_view_ = image_view; |
| 1622 | image_layout_ = image_layout; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1623 | } |
| 1624 | |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 1625 | void cvdescriptorset::ImageDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) { |
Tobin Ehlis | 81e4637 | 2016-08-17 13:33:44 -0600 | [diff] [blame] | 1626 | // Add binding for image |
Mark Lobodzinski | a3a230b | 2019-03-06 15:35:13 -0700 | [diff] [blame] | 1627 | auto iv_state = dev_data->GetImageViewState(image_view_); |
Tobin Ehlis | 8b26a38 | 2016-09-14 08:02:49 -0600 | [diff] [blame] | 1628 | if (iv_state) { |
Mark Lobodzinski | fae179e | 2019-03-08 16:47:08 -0700 | [diff] [blame] | 1629 | dev_data->AddCommandBufferBindingImageView(cb_node, iv_state); |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 1630 | dev_data->CallSetImageViewInitialLayoutCallback(cb_node, *iv_state, image_layout_); |
Jeff Bolz | 148d94e | 2018-12-13 21:25:56 -0600 | [diff] [blame] | 1631 | } |
Tobin Ehlis | 8020eea | 2016-08-17 11:10:41 -0600 | [diff] [blame] | 1632 | } |
| 1633 | |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1634 | cvdescriptorset::BufferDescriptor::BufferDescriptor(const VkDescriptorType type) |
| 1635 | : storage_(false), dynamic_(false), buffer_(VK_NULL_HANDLE), offset_(0), range_(0) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1636 | updated = false; |
| 1637 | descriptor_class = GeneralBuffer; |
| 1638 | if (VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC == type) { |
| 1639 | dynamic_ = true; |
| 1640 | } else if (VK_DESCRIPTOR_TYPE_STORAGE_BUFFER == type) { |
| 1641 | storage_ = true; |
| 1642 | } else if (VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC == type) { |
| 1643 | dynamic_ = true; |
| 1644 | storage_ = true; |
| 1645 | } |
| 1646 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1647 | void cvdescriptorset::BufferDescriptor::WriteUpdate(const VkWriteDescriptorSet *update, const uint32_t index) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1648 | updated = true; |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 1649 | const auto &buffer_info = update->pBufferInfo[index]; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1650 | buffer_ = buffer_info.buffer; |
| 1651 | offset_ = buffer_info.offset; |
| 1652 | range_ = buffer_info.range; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1653 | } |
| 1654 | |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1655 | void cvdescriptorset::BufferDescriptor::CopyUpdate(const Descriptor *src) { |
| 1656 | auto buff_desc = static_cast<const BufferDescriptor *>(src); |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1657 | updated = true; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1658 | buffer_ = buff_desc->buffer_; |
| 1659 | offset_ = buff_desc->offset_; |
| 1660 | range_ = buff_desc->range_; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1661 | } |
| 1662 | |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 1663 | void cvdescriptorset::BufferDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) { |
Mark Lobodzinski | 6ed7414 | 2019-03-06 11:35:39 -0700 | [diff] [blame] | 1664 | auto buffer_node = dev_data->GetBufferState(buffer_); |
Mark Lobodzinski | fae179e | 2019-03-08 16:47:08 -0700 | [diff] [blame] | 1665 | if (buffer_node) dev_data->AddCommandBufferBindingBuffer(cb_node, buffer_node); |
Tobin Ehlis | 8020eea | 2016-08-17 11:10:41 -0600 | [diff] [blame] | 1666 | } |
| 1667 | |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1668 | cvdescriptorset::TexelDescriptor::TexelDescriptor(const VkDescriptorType type) : buffer_view_(VK_NULL_HANDLE), storage_(false) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1669 | updated = false; |
| 1670 | descriptor_class = TexelBuffer; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1671 | if (VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER == type) storage_ = true; |
Petr Kraus | 13c98a6 | 2017-12-09 00:22:39 +0100 | [diff] [blame] | 1672 | } |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 1673 | |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1674 | void cvdescriptorset::TexelDescriptor::WriteUpdate(const VkWriteDescriptorSet *update, const uint32_t index) { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1675 | updated = true; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1676 | buffer_view_ = update->pTexelBufferView[index]; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1677 | } |
| 1678 | |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1679 | void cvdescriptorset::TexelDescriptor::CopyUpdate(const Descriptor *src) { |
| 1680 | updated = true; |
| 1681 | buffer_view_ = static_cast<const TexelDescriptor *>(src)->buffer_view_; |
| 1682 | } |
Tobin Ehlis | 8020eea | 2016-08-17 11:10:41 -0600 | [diff] [blame] | 1683 | |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 1684 | void cvdescriptorset::TexelDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) { |
Mark Lobodzinski | 31aa9b0 | 2019-03-06 11:51:37 -0700 | [diff] [blame] | 1685 | auto bv_state = dev_data->GetBufferViewState(buffer_view_); |
Tobin Ehlis | 8b87246 | 2016-09-14 08:12:08 -0600 | [diff] [blame] | 1686 | if (bv_state) { |
Mark Lobodzinski | fae179e | 2019-03-08 16:47:08 -0700 | [diff] [blame] | 1687 | dev_data->AddCommandBufferBindingBufferView(cb_node, bv_state); |
Tobin Ehlis | 81e4637 | 2016-08-17 13:33:44 -0600 | [diff] [blame] | 1688 | } |
Tobin Ehlis | 8020eea | 2016-08-17 11:10:41 -0600 | [diff] [blame] | 1689 | } |
| 1690 | |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1691 | // This is a helper function that iterates over a set of Write and Copy updates, pulls the DescriptorSet* for updated |
| 1692 | // sets, and then calls their respective Validate[Write|Copy]Update functions. |
| 1693 | // If the update hits an issue for which the callback returns "true", meaning that the call down the chain should |
| 1694 | // be skipped, then true is returned. |
| 1695 | // If there is no issue with the update, then false is returned. |
Mark Lobodzinski | 3840ca0 | 2019-03-08 18:36:11 -0700 | [diff] [blame] | 1696 | bool CoreChecks::ValidateUpdateDescriptorSets(uint32_t write_count, const VkWriteDescriptorSet *p_wds, uint32_t copy_count, |
Mark Lobodzinski | b56bbb9 | 2019-02-18 11:49:59 -0700 | [diff] [blame] | 1697 | const VkCopyDescriptorSet *p_cds, const char *func_name) { |
Mark Lobodzinski | bdc3b02 | 2017-04-24 09:11:35 -0600 | [diff] [blame] | 1698 | bool skip = false; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1699 | // Validate Write updates |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 1700 | for (uint32_t i = 0; i < write_count; i++) { |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1701 | auto dest_set = p_wds[i].dstSet; |
Mark Lobodzinski | fc2f0d3 | 2019-03-06 11:25:39 -0700 | [diff] [blame] | 1702 | auto set_node = GetSetNode(dest_set); |
Tobin Ehlis | 6a72dc7 | 2016-06-01 16:41:17 -0600 | [diff] [blame] | 1703 | if (!set_node) { |
John Zulauf | b45fdc3 | 2018-10-12 15:14:17 -0600 | [diff] [blame] | 1704 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, |
| 1705 | HandleToUint64(dest_set), kVUID_Core_DrawState_InvalidDescriptorSet, |
locke-lunarg | 9edc281 | 2019-06-17 23:18:52 -0600 | [diff] [blame] | 1706 | "Cannot call %s on %s that has not been allocated.", func_name, |
Locke | ca0d979 | 2019-03-03 23:48:13 -0700 | [diff] [blame] | 1707 | report_data->FormatHandle(dest_set).c_str()); |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1708 | } else { |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 1709 | std::string error_code; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1710 | std::string error_str; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1711 | if (!ValidateWriteUpdate(set_node, &p_wds[i], func_name, &error_code, &error_str)) { |
Mark Lobodzinski | bdc3b02 | 2017-04-24 09:11:35 -0600 | [diff] [blame] | 1712 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, |
locke-lunarg | 9edc281 | 2019-06-17 23:18:52 -0600 | [diff] [blame] | 1713 | HandleToUint64(dest_set), error_code, "%s failed write update validation for %s with error: %s.", |
| 1714 | func_name, report_data->FormatHandle(dest_set).c_str(), error_str.c_str()); |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1715 | } |
| 1716 | } |
| 1717 | } |
| 1718 | // Now validate copy updates |
Tobin Ehlis | 56a3094 | 2016-05-19 08:00:00 -0600 | [diff] [blame] | 1719 | for (uint32_t i = 0; i < copy_count; ++i) { |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1720 | auto dst_set = p_cds[i].dstSet; |
| 1721 | auto src_set = p_cds[i].srcSet; |
Mark Lobodzinski | fc2f0d3 | 2019-03-06 11:25:39 -0700 | [diff] [blame] | 1722 | auto src_node = GetSetNode(src_set); |
| 1723 | auto dst_node = GetSetNode(dst_set); |
Tobin Ehlis | a171275 | 2017-01-04 09:41:47 -0700 | [diff] [blame] | 1724 | // Object_tracker verifies that src & dest descriptor set are valid |
| 1725 | assert(src_node); |
| 1726 | assert(dst_node); |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 1727 | std::string error_code; |
Tobin Ehlis | a171275 | 2017-01-04 09:41:47 -0700 | [diff] [blame] | 1728 | std::string error_str; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1729 | if (!ValidateCopyUpdate(&p_cds[i], dst_node, src_node, func_name, &error_code, &error_str)) { |
locke-lunarg | 9edc281 | 2019-06-17 23:18:52 -0600 | [diff] [blame] | 1730 | skip |= |
| 1731 | log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, |
| 1732 | HandleToUint64(dst_set), error_code, "%s failed copy update from %s to %s with error: %s.", func_name, |
| 1733 | report_data->FormatHandle(src_set).c_str(), report_data->FormatHandle(dst_set).c_str(), error_str.c_str()); |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1734 | } |
| 1735 | } |
Mark Lobodzinski | bdc3b02 | 2017-04-24 09:11:35 -0600 | [diff] [blame] | 1736 | return skip; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1737 | } |
| 1738 | // This is a helper function that iterates over a set of Write and Copy updates, pulls the DescriptorSet* for updated |
| 1739 | // sets, and then calls their respective Perform[Write|Copy]Update functions. |
| 1740 | // Prerequisite : ValidateUpdateDescriptorSets() should be called and return "false" prior to calling PerformUpdateDescriptorSets() |
| 1741 | // with the same set of updates. |
| 1742 | // This is split from the validate code to allow validation prior to calling down the chain, and then update after |
| 1743 | // calling down the chain. |
John Zulauf | e3b35f3 | 2019-06-25 14:21:21 -0600 | [diff] [blame] | 1744 | void cvdescriptorset::PerformUpdateDescriptorSets(ValidationStateTracker *dev_data, uint32_t write_count, |
| 1745 | const VkWriteDescriptorSet *p_wds, uint32_t copy_count, |
| 1746 | const VkCopyDescriptorSet *p_cds) { |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1747 | // Write updates first |
| 1748 | uint32_t i = 0; |
| 1749 | for (i = 0; i < write_count; ++i) { |
| 1750 | auto dest_set = p_wds[i].dstSet; |
Mark Lobodzinski | fc2f0d3 | 2019-03-06 11:25:39 -0700 | [diff] [blame] | 1751 | auto set_node = dev_data->GetSetNode(dest_set); |
Tobin Ehlis | 6a72dc7 | 2016-06-01 16:41:17 -0600 | [diff] [blame] | 1752 | if (set_node) { |
| 1753 | set_node->PerformWriteUpdate(&p_wds[i]); |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1754 | } |
| 1755 | } |
| 1756 | // Now copy updates |
| 1757 | for (i = 0; i < copy_count; ++i) { |
| 1758 | auto dst_set = p_cds[i].dstSet; |
| 1759 | auto src_set = p_cds[i].srcSet; |
Mark Lobodzinski | fc2f0d3 | 2019-03-06 11:25:39 -0700 | [diff] [blame] | 1760 | auto src_node = dev_data->GetSetNode(src_set); |
| 1761 | auto dst_node = dev_data->GetSetNode(dst_set); |
Tobin Ehlis | 6a72dc7 | 2016-06-01 16:41:17 -0600 | [diff] [blame] | 1762 | if (src_node && dst_node) { |
| 1763 | dst_node->PerformCopyUpdate(&p_cds[i], src_node); |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 1764 | } |
| 1765 | } |
| 1766 | } |
Mark Lobodzinski | 3d63a04 | 2017-03-09 16:24:13 -0700 | [diff] [blame] | 1767 | |
John Zulauf | e3b35f3 | 2019-06-25 14:21:21 -0600 | [diff] [blame] | 1768 | cvdescriptorset::DecodedTemplateUpdate::DecodedTemplateUpdate(const ValidationStateTracker *device_data, |
| 1769 | VkDescriptorSet descriptorSet, const TEMPLATE_STATE *template_state, |
| 1770 | const void *pData, VkDescriptorSetLayout push_layout) { |
John Zulauf | b845eb2 | 2018-10-12 11:41:06 -0600 | [diff] [blame] | 1771 | auto const &create_info = template_state->create_info; |
| 1772 | inline_infos.resize(create_info.descriptorUpdateEntryCount); // Make sure we have one if we need it |
| 1773 | desc_writes.reserve(create_info.descriptorUpdateEntryCount); // emplaced, so reserved without initialization |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 1774 | VkDescriptorSetLayout effective_dsl = create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET |
| 1775 | ? create_info.descriptorSetLayout |
| 1776 | : push_layout; |
| 1777 | auto layout_obj = GetDescriptorSetLayout(device_data, effective_dsl); |
Mark Lobodzinski | 3d63a04 | 2017-03-09 16:24:13 -0700 | [diff] [blame] | 1778 | |
| 1779 | // Create a WriteDescriptorSet struct for each template update entry |
| 1780 | for (uint32_t i = 0; i < create_info.descriptorUpdateEntryCount; i++) { |
| 1781 | auto binding_count = layout_obj->GetDescriptorCountFromBinding(create_info.pDescriptorUpdateEntries[i].dstBinding); |
| 1782 | auto binding_being_updated = create_info.pDescriptorUpdateEntries[i].dstBinding; |
| 1783 | auto dst_array_element = create_info.pDescriptorUpdateEntries[i].dstArrayElement; |
| 1784 | |
John Zulauf | b6d7120 | 2017-12-22 16:47:09 -0700 | [diff] [blame] | 1785 | desc_writes.reserve(desc_writes.size() + create_info.pDescriptorUpdateEntries[i].descriptorCount); |
Mark Lobodzinski | 3d63a04 | 2017-03-09 16:24:13 -0700 | [diff] [blame] | 1786 | for (uint32_t j = 0; j < create_info.pDescriptorUpdateEntries[i].descriptorCount; j++) { |
| 1787 | desc_writes.emplace_back(); |
| 1788 | auto &write_entry = desc_writes.back(); |
| 1789 | |
| 1790 | size_t offset = create_info.pDescriptorUpdateEntries[i].offset + j * create_info.pDescriptorUpdateEntries[i].stride; |
| 1791 | char *update_entry = (char *)(pData) + offset; |
| 1792 | |
| 1793 | if (dst_array_element >= binding_count) { |
| 1794 | dst_array_element = 0; |
Mark Lobodzinski | 4aa479d | 2017-03-10 09:14:00 -0700 | [diff] [blame] | 1795 | binding_being_updated = layout_obj->GetNextValidBinding(binding_being_updated); |
Mark Lobodzinski | 3d63a04 | 2017-03-09 16:24:13 -0700 | [diff] [blame] | 1796 | } |
| 1797 | |
| 1798 | write_entry.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; |
| 1799 | write_entry.pNext = NULL; |
| 1800 | write_entry.dstSet = descriptorSet; |
| 1801 | write_entry.dstBinding = binding_being_updated; |
| 1802 | write_entry.dstArrayElement = dst_array_element; |
| 1803 | write_entry.descriptorCount = 1; |
| 1804 | write_entry.descriptorType = create_info.pDescriptorUpdateEntries[i].descriptorType; |
| 1805 | |
| 1806 | switch (create_info.pDescriptorUpdateEntries[i].descriptorType) { |
| 1807 | case VK_DESCRIPTOR_TYPE_SAMPLER: |
| 1808 | case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: |
| 1809 | case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: |
| 1810 | case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: |
| 1811 | case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: |
| 1812 | write_entry.pImageInfo = reinterpret_cast<VkDescriptorImageInfo *>(update_entry); |
| 1813 | break; |
| 1814 | |
| 1815 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: |
| 1816 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: |
| 1817 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: |
| 1818 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: |
| 1819 | write_entry.pBufferInfo = reinterpret_cast<VkDescriptorBufferInfo *>(update_entry); |
| 1820 | break; |
| 1821 | |
| 1822 | case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: |
| 1823 | case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: |
| 1824 | write_entry.pTexelBufferView = reinterpret_cast<VkBufferView *>(update_entry); |
| 1825 | break; |
Dave Houlton | 142c4cb | 2018-10-17 15:04:41 -0600 | [diff] [blame] | 1826 | case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: { |
| 1827 | VkWriteDescriptorSetInlineUniformBlockEXT *inline_info = &inline_infos[i]; |
| 1828 | inline_info->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT; |
| 1829 | inline_info->pNext = nullptr; |
| 1830 | inline_info->dataSize = create_info.pDescriptorUpdateEntries[i].descriptorCount; |
| 1831 | inline_info->pData = update_entry; |
| 1832 | write_entry.pNext = inline_info; |
Ricardo Garcia | fee1573 | 2019-05-28 11:13:31 +0200 | [diff] [blame] | 1833 | // descriptorCount must match the dataSize member of the VkWriteDescriptorSetInlineUniformBlockEXT structure |
| 1834 | write_entry.descriptorCount = inline_info->dataSize; |
Dave Houlton | 142c4cb | 2018-10-17 15:04:41 -0600 | [diff] [blame] | 1835 | // skip the rest of the array, they just represent bytes in the update |
| 1836 | j = create_info.pDescriptorUpdateEntries[i].descriptorCount; |
| 1837 | break; |
| 1838 | } |
Mark Lobodzinski | 3d63a04 | 2017-03-09 16:24:13 -0700 | [diff] [blame] | 1839 | default: |
| 1840 | assert(0); |
| 1841 | break; |
| 1842 | } |
| 1843 | dst_array_element++; |
| 1844 | } |
| 1845 | } |
John Zulauf | b845eb2 | 2018-10-12 11:41:06 -0600 | [diff] [blame] | 1846 | } |
John Zulauf | b45fdc3 | 2018-10-12 15:14:17 -0600 | [diff] [blame] | 1847 | // These helper functions carry out the validate and record descriptor updates peformed via update templates. They decode |
| 1848 | // the templatized data and leverage the non-template UpdateDescriptor helper functions. |
Mark Lobodzinski | 3840ca0 | 2019-03-08 18:36:11 -0700 | [diff] [blame] | 1849 | bool CoreChecks::ValidateUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet, const TEMPLATE_STATE *template_state, |
| 1850 | const void *pData) { |
John Zulauf | b45fdc3 | 2018-10-12 15:14:17 -0600 | [diff] [blame] | 1851 | // Translate the templated update into a normal update for validation... |
Mark Lobodzinski | 3840ca0 | 2019-03-08 18:36:11 -0700 | [diff] [blame] | 1852 | cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData); |
| 1853 | return ValidateUpdateDescriptorSets(static_cast<uint32_t>(decoded_update.desc_writes.size()), decoded_update.desc_writes.data(), |
| 1854 | 0, NULL, "vkUpdateDescriptorSetWithTemplate()"); |
John Zulauf | b45fdc3 | 2018-10-12 15:14:17 -0600 | [diff] [blame] | 1855 | } |
John Zulauf | b845eb2 | 2018-10-12 11:41:06 -0600 | [diff] [blame] | 1856 | |
John Zulauf | e3b35f3 | 2019-06-25 14:21:21 -0600 | [diff] [blame] | 1857 | void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet, |
| 1858 | const TEMPLATE_STATE *template_state, const void *pData) { |
John Zulauf | b45fdc3 | 2018-10-12 15:14:17 -0600 | [diff] [blame] | 1859 | // Translate the templated update into a normal update for validation... |
Mark Lobodzinski | 254c851 | 2019-03-09 12:21:15 -0700 | [diff] [blame] | 1860 | cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData); |
| 1861 | cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()), |
Mark Lobodzinski | b56bbb9 | 2019-02-18 11:49:59 -0700 | [diff] [blame] | 1862 | decoded_update.desc_writes.data(), 0, NULL); |
Mark Lobodzinski | 3d63a04 | 2017-03-09 16:24:13 -0700 | [diff] [blame] | 1863 | } |
John Zulauf | 4e7bcb5 | 2018-11-02 10:46:30 -0600 | [diff] [blame] | 1864 | |
| 1865 | std::string cvdescriptorset::DescriptorSet::StringifySetAndLayout() const { |
| 1866 | std::string out; |
Mark Lobodzinski | 6b04292 | 2019-06-21 14:46:42 -0600 | [diff] [blame] | 1867 | auto layout_handle = p_layout_->GetDescriptorSetLayout(); |
John Zulauf | 4e7bcb5 | 2018-11-02 10:46:30 -0600 | [diff] [blame] | 1868 | if (IsPushDescriptor()) { |
Mark Lobodzinski | 6b04292 | 2019-06-21 14:46:42 -0600 | [diff] [blame] | 1869 | string_sprintf(&out, "Push Descriptors defined with VkDescriptorSetLayout %s", |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1870 | state_data_->report_data->FormatHandle(layout_handle).c_str()); |
John Zulauf | 4e7bcb5 | 2018-11-02 10:46:30 -0600 | [diff] [blame] | 1871 | } else { |
Mark Lobodzinski | 6b04292 | 2019-06-21 14:46:42 -0600 | [diff] [blame] | 1872 | string_sprintf(&out, "VkDescriptorSet %s allocated with VkDescriptorSetLayout %s", |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1873 | state_data_->report_data->FormatHandle(set_).c_str(), |
| 1874 | state_data_->report_data->FormatHandle(layout_handle).c_str()); |
John Zulauf | 4e7bcb5 | 2018-11-02 10:46:30 -0600 | [diff] [blame] | 1875 | } |
| 1876 | return out; |
| 1877 | }; |
| 1878 | |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 1879 | // Loop through the write updates to validate for a push descriptor set, ignoring dstSet |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1880 | bool CoreChecks::ValidatePushDescriptorsUpdate(const DescriptorSet *push_set, uint32_t write_count, |
John Zulauf | bd9b341 | 2019-08-22 17:16:11 -0600 | [diff] [blame] | 1881 | const VkWriteDescriptorSet *p_wds, const char *func_name) const { |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1882 | assert(push_set->IsPushDescriptor()); |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 1883 | bool skip = false; |
| 1884 | for (uint32_t i = 0; i < write_count; i++) { |
| 1885 | std::string error_code; |
| 1886 | std::string error_str; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1887 | if (!ValidateWriteUpdate(push_set, &p_wds[i], func_name, &error_code, &error_str)) { |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 1888 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1889 | HandleToUint64(push_set->GetDescriptorSetLayout()), error_code, "%s failed update validation: %s.", |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 1890 | func_name, error_str.c_str()); |
| 1891 | } |
| 1892 | } |
| 1893 | return skip; |
| 1894 | } |
| 1895 | |
Tobin Ehlis | 6bd2b98 | 2016-05-24 12:33:42 -0600 | [diff] [blame] | 1896 | // For the given buffer, verify that its creation parameters are appropriate for the given type |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1897 | // If there's an error, update the error_msg string with details and return false, else return true |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1898 | bool cvdescriptorset::ValidateBufferUsage(BUFFER_STATE const *buffer_node, VkDescriptorType type, std::string *error_code, |
| 1899 | std::string *error_msg) { |
Tobin Ehlis | 6bd2b98 | 2016-05-24 12:33:42 -0600 | [diff] [blame] | 1900 | // Verify that usage bits set correctly for given type |
Tobin Ehlis | 94bc5d2 | 2016-06-02 07:46:52 -0600 | [diff] [blame] | 1901 | auto usage = buffer_node->createInfo.usage; |
Jeff Bolz | 6d3beaa | 2019-02-09 21:00:05 -0600 | [diff] [blame] | 1902 | const char *error_usage_bit = nullptr; |
Tobin Ehlis | 6bd2b98 | 2016-05-24 12:33:42 -0600 | [diff] [blame] | 1903 | switch (type) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1904 | case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: |
| 1905 | if (!(usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1906 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00334"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1907 | error_usage_bit = "VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT"; |
| 1908 | } |
| 1909 | break; |
| 1910 | case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: |
| 1911 | if (!(usage & VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1912 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00335"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1913 | error_usage_bit = "VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT"; |
| 1914 | } |
| 1915 | break; |
| 1916 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: |
| 1917 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: |
| 1918 | if (!(usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1919 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00330"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1920 | error_usage_bit = "VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT"; |
| 1921 | } |
| 1922 | break; |
| 1923 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: |
| 1924 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: |
| 1925 | if (!(usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1926 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00331"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 1927 | error_usage_bit = "VK_BUFFER_USAGE_STORAGE_BUFFER_BIT"; |
| 1928 | } |
| 1929 | break; |
| 1930 | default: |
| 1931 | break; |
Tobin Ehlis | 6bd2b98 | 2016-05-24 12:33:42 -0600 | [diff] [blame] | 1932 | } |
Jeff Bolz | 6d3beaa | 2019-02-09 21:00:05 -0600 | [diff] [blame] | 1933 | if (error_usage_bit) { |
Tobin Ehlis | 6bd2b98 | 2016-05-24 12:33:42 -0600 | [diff] [blame] | 1934 | std::stringstream error_str; |
Mark Lobodzinski | 54a67c4 | 2019-06-24 14:35:21 -0600 | [diff] [blame] | 1935 | error_str << "Buffer (" << buffer_node->buffer << ") with usage mask " << std::hex << std::showbase << usage |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 1936 | << " being used for a descriptor update of type " << string_VkDescriptorType(type) << " does not have " |
| 1937 | << error_usage_bit << " set."; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1938 | *error_msg = error_str.str(); |
Tobin Ehlis | 6bd2b98 | 2016-05-24 12:33:42 -0600 | [diff] [blame] | 1939 | return false; |
| 1940 | } |
| 1941 | return true; |
| 1942 | } |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 1943 | // For buffer descriptor updates, verify the buffer usage and VkDescriptorBufferInfo struct which includes: |
| 1944 | // 1. buffer is valid |
| 1945 | // 2. buffer was created with correct usage flags |
| 1946 | // 3. offset is less than buffer size |
| 1947 | // 4. range is either VK_WHOLE_SIZE or falls in (0, (buffer size - offset)] |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 1948 | // 5. range and offset are within the device's limits |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1949 | // If there's an error, update the error_msg string with details and return false, else return true |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1950 | bool CoreChecks::ValidateBufferUpdate(VkDescriptorBufferInfo const *buffer_info, VkDescriptorType type, const char *func_name, |
John Zulauf | bd9b341 | 2019-08-22 17:16:11 -0600 | [diff] [blame] | 1951 | std::string *error_code, std::string *error_msg) const { |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 1952 | // First make sure that buffer is valid |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1953 | auto buffer_node = GetBufferState(buffer_info->buffer); |
Tobin Ehlis | fa8b618 | 2016-12-22 13:40:45 -0700 | [diff] [blame] | 1954 | // Any invalid buffer should already be caught by object_tracker |
| 1955 | assert(buffer_node); |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1956 | if (ValidateMemoryIsBoundToBuffer(buffer_node, func_name, "VUID-VkWriteDescriptorSet-descriptorType-00329")) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1957 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00329"; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1958 | *error_msg = "No memory bound to buffer."; |
Tobin Ehlis | 8128096 | 2016-07-20 14:04:20 -0600 | [diff] [blame] | 1959 | return false; |
Tobin Ehlis | fed999f | 2016-09-21 15:09:45 -0600 | [diff] [blame] | 1960 | } |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 1961 | // Verify usage bits |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1962 | if (!cvdescriptorset::ValidateBufferUsage(buffer_node, type, error_code, error_msg)) { |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1963 | // error_msg will have been updated by ValidateBufferUsage() |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 1964 | return false; |
| 1965 | } |
| 1966 | // offset must be less than buffer size |
Jeremy Hayes | d1a6a82 | 2017-03-09 14:39:45 -0700 | [diff] [blame] | 1967 | if (buffer_info->offset >= buffer_node->createInfo.size) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1968 | *error_code = "VUID-VkDescriptorBufferInfo-offset-00340"; |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 1969 | std::stringstream error_str; |
Jeremy Hayes | d1a6a82 | 2017-03-09 14:39:45 -0700 | [diff] [blame] | 1970 | error_str << "VkDescriptorBufferInfo offset of " << buffer_info->offset << " is greater than or equal to buffer " |
| 1971 | << buffer_node->buffer << " size of " << buffer_node->createInfo.size; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1972 | *error_msg = error_str.str(); |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 1973 | return false; |
| 1974 | } |
| 1975 | if (buffer_info->range != VK_WHOLE_SIZE) { |
| 1976 | // Range must be VK_WHOLE_SIZE or > 0 |
| 1977 | if (!buffer_info->range) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1978 | *error_code = "VUID-VkDescriptorBufferInfo-range-00341"; |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 1979 | std::stringstream error_str; |
| 1980 | error_str << "VkDescriptorBufferInfo range is not VK_WHOLE_SIZE and is zero, which is not allowed."; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1981 | *error_msg = error_str.str(); |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 1982 | return false; |
| 1983 | } |
| 1984 | // Range must be VK_WHOLE_SIZE or <= (buffer size - offset) |
| 1985 | if (buffer_info->range > (buffer_node->createInfo.size - buffer_info->offset)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1986 | *error_code = "VUID-VkDescriptorBufferInfo-range-00342"; |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 1987 | std::stringstream error_str; |
| 1988 | error_str << "VkDescriptorBufferInfo range is " << buffer_info->range << " which is greater than buffer size (" |
| 1989 | << buffer_node->createInfo.size << ") minus requested offset of " << buffer_info->offset; |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 1990 | *error_msg = error_str.str(); |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 1991 | return false; |
| 1992 | } |
| 1993 | } |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 1994 | // Check buffer update sizes against device limits |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 1995 | const auto &limits = phys_dev_props.limits; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 1996 | if (VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER == type || VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC == type) { |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 1997 | auto max_ub_range = limits.maxUniformBufferRange; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 1998 | if (buffer_info->range != VK_WHOLE_SIZE && buffer_info->range > max_ub_range) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 1999 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00332"; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 2000 | std::stringstream error_str; |
| 2001 | error_str << "VkDescriptorBufferInfo range is " << buffer_info->range |
| 2002 | << " which is greater than this device's maxUniformBufferRange (" << max_ub_range << ")"; |
| 2003 | *error_msg = error_str.str(); |
| 2004 | return false; |
Peter Kohaut | 2794a29 | 2018-07-13 11:13:47 +0200 | [diff] [blame] | 2005 | } else if (buffer_info->range == VK_WHOLE_SIZE && (buffer_node->createInfo.size - buffer_info->offset) > max_ub_range) { |
| 2006 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00332"; |
| 2007 | std::stringstream error_str; |
Peter Kohaut | 18f413d | 2018-07-16 13:15:42 +0200 | [diff] [blame] | 2008 | error_str << "VkDescriptorBufferInfo range is VK_WHOLE_SIZE but effective range " |
| 2009 | << "(" << (buffer_node->createInfo.size - buffer_info->offset) << ") is greater than this device's " |
Peter Kohaut | 2794a29 | 2018-07-13 11:13:47 +0200 | [diff] [blame] | 2010 | << "maxUniformBufferRange (" << max_ub_range << ")"; |
| 2011 | *error_msg = error_str.str(); |
| 2012 | return false; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 2013 | } |
| 2014 | } else if (VK_DESCRIPTOR_TYPE_STORAGE_BUFFER == type || VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC == type) { |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 2015 | auto max_sb_range = limits.maxStorageBufferRange; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 2016 | if (buffer_info->range != VK_WHOLE_SIZE && buffer_info->range > max_sb_range) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 2017 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00333"; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 2018 | std::stringstream error_str; |
| 2019 | error_str << "VkDescriptorBufferInfo range is " << buffer_info->range |
| 2020 | << " which is greater than this device's maxStorageBufferRange (" << max_sb_range << ")"; |
| 2021 | *error_msg = error_str.str(); |
| 2022 | return false; |
Peter Kohaut | 2794a29 | 2018-07-13 11:13:47 +0200 | [diff] [blame] | 2023 | } else if (buffer_info->range == VK_WHOLE_SIZE && (buffer_node->createInfo.size - buffer_info->offset) > max_sb_range) { |
| 2024 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00333"; |
| 2025 | std::stringstream error_str; |
Peter Kohaut | 18f413d | 2018-07-16 13:15:42 +0200 | [diff] [blame] | 2026 | error_str << "VkDescriptorBufferInfo range is VK_WHOLE_SIZE but effective range " |
| 2027 | << "(" << (buffer_node->createInfo.size - buffer_info->offset) << ") is greater than this device's " |
Peter Kohaut | 2794a29 | 2018-07-13 11:13:47 +0200 | [diff] [blame] | 2028 | << "maxStorageBufferRange (" << max_sb_range << ")"; |
| 2029 | *error_msg = error_str.str(); |
| 2030 | return false; |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 2031 | } |
| 2032 | } |
Tobin Ehlis | 3d38f08 | 2016-07-01 17:36:48 -0600 | [diff] [blame] | 2033 | return true; |
| 2034 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2035 | // Verify that the contents of the update are ok, but don't perform actual update |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2036 | bool CoreChecks::VerifyCopyUpdateContents(const VkCopyDescriptorSet *update, const DescriptorSet *src_set, VkDescriptorType type, |
| 2037 | uint32_t index, const char *func_name, std::string *error_code, std::string *error_msg) { |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 2038 | // Note : Repurposing some Write update error codes here as specific details aren't called out for copy updates like they are |
| 2039 | // for write updates |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2040 | using DescriptorClass = cvdescriptorset::DescriptorClass; |
| 2041 | using BufferDescriptor = cvdescriptorset::BufferDescriptor; |
| 2042 | using ImageDescriptor = cvdescriptorset::ImageDescriptor; |
| 2043 | using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor; |
| 2044 | using SamplerDescriptor = cvdescriptorset::SamplerDescriptor; |
| 2045 | using TexelDescriptor = cvdescriptorset::TexelDescriptor; |
| 2046 | |
| 2047 | auto device_data = this; |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 2048 | switch (src_set->GetDescriptorFromGlobalIndex(index)->descriptor_class) { |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2049 | case DescriptorClass::PlainSampler: { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2050 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 2051 | const auto src_desc = src_set->GetDescriptorFromGlobalIndex(index + di); |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 2052 | if (!src_desc->updated) continue; |
| 2053 | if (!src_desc->IsImmutableSampler()) { |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 2054 | auto update_sampler = static_cast<const SamplerDescriptor *>(src_desc)->GetSampler(); |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2055 | if (!ValidateSampler(update_sampler)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 2056 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00325"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2057 | std::stringstream error_str; |
| 2058 | error_str << "Attempted copy update to sampler descriptor with invalid sampler: " << update_sampler << "."; |
| 2059 | *error_msg = error_str.str(); |
| 2060 | return false; |
| 2061 | } |
| 2062 | } else { |
| 2063 | // TODO : Warn here |
| 2064 | } |
| 2065 | } |
| 2066 | break; |
| 2067 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2068 | case DescriptorClass::ImageSampler: { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2069 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 2070 | const auto src_desc = src_set->GetDescriptorFromGlobalIndex(index + di); |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 2071 | if (!src_desc->updated) continue; |
| 2072 | auto img_samp_desc = static_cast<const ImageSamplerDescriptor *>(src_desc); |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2073 | // First validate sampler |
| 2074 | if (!img_samp_desc->IsImmutableSampler()) { |
| 2075 | auto update_sampler = img_samp_desc->GetSampler(); |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2076 | if (!ValidateSampler(update_sampler)) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 2077 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00325"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2078 | std::stringstream error_str; |
| 2079 | error_str << "Attempted copy update to sampler descriptor with invalid sampler: " << update_sampler << "."; |
| 2080 | *error_msg = error_str.str(); |
| 2081 | return false; |
| 2082 | } |
| 2083 | } else { |
| 2084 | // TODO : Warn here |
| 2085 | } |
| 2086 | // Validate image |
| 2087 | auto image_view = img_samp_desc->GetImageView(); |
| 2088 | auto image_layout = img_samp_desc->GetImageLayout(); |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2089 | if (!ValidateImageUpdate(image_view, image_layout, type, func_name, error_code, error_msg)) { |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2090 | std::stringstream error_str; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2091 | error_str << "Attempted copy update to combined image sampler descriptor failed due to: " << error_msg->c_str(); |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 2092 | *error_msg = error_str.str(); |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2093 | return false; |
| 2094 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2095 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2096 | break; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2097 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2098 | case DescriptorClass::Image: { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2099 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 2100 | const auto src_desc = src_set->GetDescriptorFromGlobalIndex(index + di); |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 2101 | if (!src_desc->updated) continue; |
| 2102 | auto img_desc = static_cast<const ImageDescriptor *>(src_desc); |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2103 | auto image_view = img_desc->GetImageView(); |
| 2104 | auto image_layout = img_desc->GetImageLayout(); |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2105 | if (!ValidateImageUpdate(image_view, image_layout, type, func_name, error_code, error_msg)) { |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2106 | std::stringstream error_str; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2107 | error_str << "Attempted copy update to image descriptor failed due to: " << error_msg->c_str(); |
Tobin Ehlis | 75f04ec | 2016-10-06 17:43:11 -0600 | [diff] [blame] | 2108 | *error_msg = error_str.str(); |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2109 | return false; |
| 2110 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2111 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2112 | break; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2113 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2114 | case DescriptorClass::TexelBuffer: { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2115 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 2116 | const auto src_desc = src_set->GetDescriptorFromGlobalIndex(index + di); |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 2117 | if (!src_desc->updated) continue; |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 2118 | auto buffer_view = static_cast<const TexelDescriptor *>(src_desc)->GetBufferView(); |
| 2119 | auto bv_state = device_data->GetBufferViewState(buffer_view); |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2120 | if (!bv_state) { |
Dave Houlton | 00c154e | 2018-05-24 13:20:50 -0600 | [diff] [blame] | 2121 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00323"; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2122 | std::stringstream error_str; |
| 2123 | error_str << "Attempted copy update to texel buffer descriptor with invalid buffer view: " << buffer_view; |
| 2124 | *error_msg = error_str.str(); |
| 2125 | return false; |
| 2126 | } |
| 2127 | auto buffer = bv_state->create_info.buffer; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2128 | if (!cvdescriptorset::ValidateBufferUsage(GetBufferState(buffer), type, error_code, error_msg)) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2129 | std::stringstream error_str; |
| 2130 | error_str << "Attempted copy update to texel buffer descriptor failed due to: " << error_msg->c_str(); |
| 2131 | *error_msg = error_str.str(); |
| 2132 | return false; |
| 2133 | } |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2134 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2135 | break; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2136 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2137 | case DescriptorClass::GeneralBuffer: { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2138 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 2139 | const auto src_desc = src_set->GetDescriptorFromGlobalIndex(index + di); |
Józef Kucia | 5297e37 | 2017-10-13 22:31:34 +0200 | [diff] [blame] | 2140 | if (!src_desc->updated) continue; |
John Zulauf | d9435c3 | 2019-06-05 15:55:36 -0600 | [diff] [blame] | 2141 | auto buffer = static_cast<const BufferDescriptor *>(src_desc)->GetBuffer(); |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2142 | if (!cvdescriptorset::ValidateBufferUsage(GetBufferState(buffer), type, error_code, error_msg)) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2143 | std::stringstream error_str; |
| 2144 | error_str << "Attempted copy update to buffer descriptor failed due to: " << error_msg->c_str(); |
| 2145 | *error_msg = error_str.str(); |
| 2146 | return false; |
| 2147 | } |
Tobin Ehlis | cbcf234 | 2016-05-24 13:07:12 -0600 | [diff] [blame] | 2148 | } |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2149 | break; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2150 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2151 | case DescriptorClass::InlineUniform: |
| 2152 | case DescriptorClass::AccelerationStructure: |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 2153 | break; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 2154 | default: |
| 2155 | assert(0); // We've already verified update type so should never get here |
| 2156 | break; |
Tobin Ehlis | 300888c | 2016-05-18 13:43:26 -0600 | [diff] [blame] | 2157 | } |
| 2158 | // All checks passed so update contents are good |
| 2159 | return true; |
Chris Forbes | b4e0bdb | 2016-05-31 16:34:40 +1200 | [diff] [blame] | 2160 | } |
Tobin Ehlis | f320b19 | 2017-03-14 11:22:50 -0600 | [diff] [blame] | 2161 | // Update the common AllocateDescriptorSetsData |
Camden Stocker | 325ae06 | 2019-08-28 11:22:59 -0600 | [diff] [blame] | 2162 | void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info, |
| 2163 | cvdescriptorset::AllocateDescriptorSetsData *ds_data) { |
Tobin Ehlis | f320b19 | 2017-03-14 11:22:50 -0600 | [diff] [blame] | 2164 | for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) { |
Mark Lobodzinski | 3840ca0 | 2019-03-08 18:36:11 -0700 | [diff] [blame] | 2165 | auto layout = GetDescriptorSetLayout(this, p_alloc_info->pSetLayouts[i]); |
Tobin Ehlis | f320b19 | 2017-03-14 11:22:50 -0600 | [diff] [blame] | 2166 | if (layout) { |
| 2167 | ds_data->layout_nodes[i] = layout; |
| 2168 | // Count total descriptors required per type |
| 2169 | for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) { |
| 2170 | const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j); |
| 2171 | uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType); |
| 2172 | ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount; |
| 2173 | } |
| 2174 | } |
| 2175 | // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call |
| 2176 | } |
Petr Kraus | 13c98a6 | 2017-12-09 00:22:39 +0100 | [diff] [blame] | 2177 | } |
Tobin Ehlis | ee47146 | 2016-05-26 11:21:59 -0600 | [diff] [blame] | 2178 | // Verify that the state at allocate time is correct, but don't actually allocate the sets yet |
Mark Lobodzinski | 3840ca0 | 2019-03-08 18:36:11 -0700 | [diff] [blame] | 2179 | bool CoreChecks::ValidateAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info, |
Mark Lobodzinski | b56bbb9 | 2019-02-18 11:49:59 -0700 | [diff] [blame] | 2180 | const cvdescriptorset::AllocateDescriptorSetsData *ds_data) { |
Mark Lobodzinski | bdc3b02 | 2017-04-24 09:11:35 -0600 | [diff] [blame] | 2181 | bool skip = false; |
Mark Lobodzinski | 7804bd4 | 2019-03-06 11:28:48 -0700 | [diff] [blame] | 2182 | auto pool_state = GetDescriptorPoolState(p_alloc_info->descriptorPool); |
Tobin Ehlis | ee47146 | 2016-05-26 11:21:59 -0600 | [diff] [blame] | 2183 | |
| 2184 | for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) { |
Mark Lobodzinski | 3840ca0 | 2019-03-08 18:36:11 -0700 | [diff] [blame] | 2185 | auto layout = GetDescriptorSetLayout(this, p_alloc_info->pSetLayouts[i]); |
John Zulauf | 5562d06 | 2018-01-24 11:54:05 -0700 | [diff] [blame] | 2186 | if (layout) { // nullptr layout indicates no valid layout handle for this device, validated/logged in object_tracker |
John Zulauf | 1d27e0a | 2018-11-05 10:12:48 -0700 | [diff] [blame] | 2187 | if (layout->IsPushDescriptor()) { |
John Zulauf | 5562d06 | 2018-01-24 11:54:05 -0700 | [diff] [blame] | 2188 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 2189 | HandleToUint64(p_alloc_info->pSetLayouts[i]), "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-00308", |
locke-lunarg | 9edc281 | 2019-06-17 23:18:52 -0600 | [diff] [blame] | 2190 | "%s specified at pSetLayouts[%" PRIu32 |
Mark Lobodzinski | 487a0d1 | 2018-03-30 10:09:03 -0600 | [diff] [blame] | 2191 | "] in vkAllocateDescriptorSets() was created with invalid flag %s set.", |
Locke | ca0d979 | 2019-03-03 23:48:13 -0700 | [diff] [blame] | 2192 | report_data->FormatHandle(p_alloc_info->pSetLayouts[i]).c_str(), i, |
Mark Lobodzinski | 487a0d1 | 2018-03-30 10:09:03 -0600 | [diff] [blame] | 2193 | "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR"); |
John Zulauf | 5562d06 | 2018-01-24 11:54:05 -0700 | [diff] [blame] | 2194 | } |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 2195 | if (layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT && |
| 2196 | !(pool_state->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT)) { |
| 2197 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 2198 | 0, "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-03044", |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 2199 | "Descriptor set layout create flags and pool create flags mismatch for index (%d)", i); |
| 2200 | } |
Tobin Ehlis | ee47146 | 2016-05-26 11:21:59 -0600 | [diff] [blame] | 2201 | } |
| 2202 | } |
Mark Lobodzinski | f45e45f | 2019-04-19 14:15:39 -0600 | [diff] [blame] | 2203 | if (!device_extensions.vk_khr_maintenance1) { |
Mike Schuchardt | 64b5bb7 | 2017-03-21 16:33:26 -0600 | [diff] [blame] | 2204 | // Track number of descriptorSets allowable in this pool |
| 2205 | if (pool_state->availableSets < p_alloc_info->descriptorSetCount) { |
Mark Lobodzinski | bdc3b02 | 2017-04-24 09:11:35 -0600 | [diff] [blame] | 2206 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 2207 | HandleToUint64(pool_state->pool), "VUID-VkDescriptorSetAllocateInfo-descriptorSetCount-00306", |
locke-lunarg | 9edc281 | 2019-06-17 23:18:52 -0600 | [diff] [blame] | 2208 | "Unable to allocate %u descriptorSets from %s" |
Mark Lobodzinski | 487a0d1 | 2018-03-30 10:09:03 -0600 | [diff] [blame] | 2209 | ". This pool only has %d descriptorSets remaining.", |
Locke | ca0d979 | 2019-03-03 23:48:13 -0700 | [diff] [blame] | 2210 | p_alloc_info->descriptorSetCount, report_data->FormatHandle(pool_state->pool).c_str(), |
| 2211 | pool_state->availableSets); |
Mike Schuchardt | 64b5bb7 | 2017-03-21 16:33:26 -0600 | [diff] [blame] | 2212 | } |
| 2213 | // Determine whether descriptor counts are satisfiable |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 2214 | for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) { |
| 2215 | if (ds_data->required_descriptors_by_type.at(it->first) > pool_state->availableDescriptorTypeCount[it->first]) { |
Locke | ca0d979 | 2019-03-03 23:48:13 -0700 | [diff] [blame] | 2216 | skip |= log_msg( |
| 2217 | report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, |
| 2218 | HandleToUint64(pool_state->pool), "VUID-VkDescriptorSetAllocateInfo-descriptorPool-00307", |
locke-lunarg | 9edc281 | 2019-06-17 23:18:52 -0600 | [diff] [blame] | 2219 | "Unable to allocate %u descriptors of type %s from %s" |
Locke | ca0d979 | 2019-03-03 23:48:13 -0700 | [diff] [blame] | 2220 | ". This pool only has %d descriptors of this type remaining.", |
| 2221 | ds_data->required_descriptors_by_type.at(it->first), string_VkDescriptorType(VkDescriptorType(it->first)), |
| 2222 | report_data->FormatHandle(pool_state->pool).c_str(), pool_state->availableDescriptorTypeCount[it->first]); |
Mike Schuchardt | 64b5bb7 | 2017-03-21 16:33:26 -0600 | [diff] [blame] | 2223 | } |
Tobin Ehlis | ee47146 | 2016-05-26 11:21:59 -0600 | [diff] [blame] | 2224 | } |
| 2225 | } |
Tobin Ehlis | 5d749ea | 2016-07-18 13:14:01 -0600 | [diff] [blame] | 2226 | |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 2227 | const auto *count_allocate_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext); |
| 2228 | |
| 2229 | if (count_allocate_info) { |
| 2230 | if (count_allocate_info->descriptorSetCount != 0 && |
| 2231 | count_allocate_info->descriptorSetCount != p_alloc_info->descriptorSetCount) { |
| 2232 | skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, 0, |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 2233 | "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-descriptorSetCount-03045", |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 2234 | "VkDescriptorSetAllocateInfo::descriptorSetCount (%d) != " |
| 2235 | "VkDescriptorSetVariableDescriptorCountAllocateInfoEXT::descriptorSetCount (%d)", |
| 2236 | p_alloc_info->descriptorSetCount, count_allocate_info->descriptorSetCount); |
| 2237 | } |
| 2238 | if (count_allocate_info->descriptorSetCount == p_alloc_info->descriptorSetCount) { |
| 2239 | for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) { |
Mark Lobodzinski | 3840ca0 | 2019-03-08 18:36:11 -0700 | [diff] [blame] | 2240 | auto layout = GetDescriptorSetLayout(this, p_alloc_info->pSetLayouts[i]); |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 2241 | if (count_allocate_info->pDescriptorCounts[i] > layout->GetDescriptorCountFromBinding(layout->GetMaxBinding())) { |
| 2242 | skip |= log_msg( |
| 2243 | report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, 0, |
Dave Houlton | d8ed021 | 2018-05-16 17:18:24 -0600 | [diff] [blame] | 2244 | "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-pSetLayouts-03046", |
| 2245 | "pDescriptorCounts[%d] = (%d), binding's descriptorCount = (%d)", i, |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 2246 | count_allocate_info->pDescriptorCounts[i], layout->GetDescriptorCountFromBinding(layout->GetMaxBinding())); |
| 2247 | } |
| 2248 | } |
| 2249 | } |
| 2250 | } |
| 2251 | |
Mark Lobodzinski | bdc3b02 | 2017-04-24 09:11:35 -0600 | [diff] [blame] | 2252 | return skip; |
Tobin Ehlis | ee47146 | 2016-05-26 11:21:59 -0600 | [diff] [blame] | 2253 | } |
| 2254 | // Decrement allocated sets from the pool and insert new sets into set_map |
John Zulauf | e3b35f3 | 2019-06-25 14:21:21 -0600 | [diff] [blame] | 2255 | void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info, |
| 2256 | const VkDescriptorSet *descriptor_sets, |
| 2257 | const cvdescriptorset::AllocateDescriptorSetsData *ds_data) { |
Mark Lobodzinski | a33af95 | 2019-04-25 14:59:05 -0600 | [diff] [blame] | 2258 | auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get(); |
Mark Lobodzinski | c943018 | 2017-06-13 13:00:05 -0600 | [diff] [blame] | 2259 | // Account for sets and individual descriptors allocated from pool |
Tobin Ehlis | ee47146 | 2016-05-26 11:21:59 -0600 | [diff] [blame] | 2260 | pool_state->availableSets -= p_alloc_info->descriptorSetCount; |
Jeff Bolz | e54ae89 | 2018-09-08 12:16:29 -0500 | [diff] [blame] | 2261 | for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) { |
| 2262 | pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first); |
Tobin Ehlis | 68d0adf | 2016-06-01 11:33:50 -0600 | [diff] [blame] | 2263 | } |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 2264 | |
| 2265 | const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext); |
| 2266 | bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount; |
| 2267 | |
Mark Lobodzinski | c943018 | 2017-06-13 13:00:05 -0600 | [diff] [blame] | 2268 | // Create tracking object for each descriptor set; insert into global map and the pool's set. |
Tobin Ehlis | ee47146 | 2016-05-26 11:21:59 -0600 | [diff] [blame] | 2269 | for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) { |
Jeff Bolz | fdf9607 | 2018-04-10 14:32:18 -0500 | [diff] [blame] | 2270 | uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0; |
| 2271 | |
Mark Lobodzinski | 14112ab | 2019-04-25 15:29:34 -0600 | [diff] [blame] | 2272 | std::unique_ptr<cvdescriptorset::DescriptorSet> new_ds(new cvdescriptorset::DescriptorSet( |
| 2273 | descriptor_sets[i], p_alloc_info->descriptorPool, ds_data->layout_nodes[i], variable_count, this)); |
| 2274 | pool_state->sets.insert(new_ds.get()); |
Tobin Ehlis | ee47146 | 2016-05-26 11:21:59 -0600 | [diff] [blame] | 2275 | new_ds->in_use.store(0); |
Mark Lobodzinski | 14112ab | 2019-04-25 15:29:34 -0600 | [diff] [blame] | 2276 | setMap[descriptor_sets[i]] = std::move(new_ds); |
Tobin Ehlis | ee47146 | 2016-05-26 11:21:59 -0600 | [diff] [blame] | 2277 | } |
| 2278 | } |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 2279 | |
Jeff Bolz | dd4cfa1 | 2019-08-11 20:57:51 -0500 | [diff] [blame] | 2280 | const BindingReqMap &cvdescriptorset::PrefilterBindRequestMap::FilteredMap(const CMD_BUFFER_STATE &cb_state, |
| 2281 | const PIPELINE_STATE &pipeline) { |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2282 | if (IsManyDescriptors()) { |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 2283 | filtered_map_.reset(new std::map<uint32_t, descriptor_req>()); |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2284 | descriptor_set_.FilterBindingReqs(cb_state, pipeline, orig_map_, filtered_map_.get()); |
| 2285 | return *filtered_map_; |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 2286 | } |
John Zulauf | fbf3c20 | 2019-07-17 14:57:14 -0600 | [diff] [blame] | 2287 | return orig_map_; |
Artem Kharytoniuk | 2456f99 | 2018-01-12 14:17:41 +0100 | [diff] [blame] | 2288 | } |
John Zulauf | 4a015c9 | 2019-06-04 09:50:05 -0600 | [diff] [blame] | 2289 | |
| 2290 | // Starting at offset descriptor of given binding, parse over update_count |
| 2291 | // descriptor updates and verify that for any binding boundaries that are crossed, the next binding(s) are all consistent |
| 2292 | // Consistency means that their type, stage flags, and whether or not they use immutable samplers matches |
| 2293 | // If so, return true. If not, fill in error_msg and return false |
| 2294 | bool cvdescriptorset::VerifyUpdateConsistency(DescriptorSetLayout::ConstBindingIterator current_binding, uint32_t offset, |
| 2295 | uint32_t update_count, const char *type, const VkDescriptorSet set, |
| 2296 | std::string *error_msg) { |
| 2297 | // Verify consecutive bindings match (if needed) |
| 2298 | auto orig_binding = current_binding; |
| 2299 | // Track count of descriptors in the current_bindings that are remaining to be updated |
| 2300 | auto binding_remaining = current_binding.GetDescriptorCount(); |
| 2301 | // First, it's legal to offset beyond your own binding so handle that case |
| 2302 | // Really this is just searching for the binding in which the update begins and adjusting offset accordingly |
| 2303 | while (offset >= binding_remaining && !current_binding.AtEnd()) { |
| 2304 | // Advance to next binding, decrement offset by binding size |
| 2305 | offset -= binding_remaining; |
| 2306 | ++current_binding; |
| 2307 | binding_remaining = current_binding.GetDescriptorCount(); // Accessors are safe if AtEnd |
| 2308 | } |
| 2309 | assert(!current_binding.AtEnd()); // As written assumes range check has been made before calling |
| 2310 | binding_remaining -= offset; |
| 2311 | while (update_count > binding_remaining) { // While our updates overstep current binding |
| 2312 | // Verify next consecutive binding matches type, stage flags & immutable sampler use |
| 2313 | auto next_binding = current_binding.Next(); |
| 2314 | if (!current_binding.IsConsistent(next_binding)) { |
| 2315 | std::stringstream error_str; |
| 2316 | error_str << "Attempting " << type; |
| 2317 | if (current_binding.Layout()->IsPushDescriptor()) { |
| 2318 | error_str << " push descriptors"; |
| 2319 | } else { |
| 2320 | error_str << " descriptor set " << set; |
| 2321 | } |
| 2322 | error_str << " binding #" << orig_binding.Binding() << " with #" << update_count |
| 2323 | << " descriptors being updated but this update oversteps the bounds of this binding and the next binding is " |
| 2324 | "not consistent with current binding so this update is invalid."; |
| 2325 | *error_msg = error_str.str(); |
| 2326 | return false; |
| 2327 | } |
| 2328 | current_binding = next_binding; |
| 2329 | // For sake of this check consider the bindings updated and grab count for next binding |
| 2330 | update_count -= binding_remaining; |
| 2331 | binding_remaining = current_binding.GetDescriptorCount(); |
| 2332 | } |
| 2333 | return true; |
| 2334 | } |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 2335 | |
| 2336 | // Validate the state for a given write update but don't actually perform the update |
| 2337 | // If an error would occur for this update, return false and fill in details in error_msg string |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2338 | bool CoreChecks::ValidateWriteUpdate(const DescriptorSet *dest_set, const VkWriteDescriptorSet *update, const char *func_name, |
John Zulauf | bd9b341 | 2019-08-22 17:16:11 -0600 | [diff] [blame] | 2339 | std::string *error_code, std::string *error_msg) const { |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 2340 | const auto dest_layout = dest_set->GetLayout(); |
| 2341 | |
| 2342 | // Verify dst layout still valid |
| 2343 | if (dest_layout->IsDestroyed()) { |
| 2344 | *error_code = "VUID-VkWriteDescriptorSet-dstSet-00320"; |
| 2345 | string_sprintf(error_msg, "Cannot call %s to perform write update on %s which has been destroyed", func_name, |
| 2346 | dest_set->StringifySetAndLayout().c_str()); |
| 2347 | return false; |
| 2348 | } |
| 2349 | // Verify dst binding exists |
| 2350 | if (!dest_layout->HasBinding(update->dstBinding)) { |
| 2351 | *error_code = "VUID-VkWriteDescriptorSet-dstBinding-00315"; |
| 2352 | std::stringstream error_str; |
| 2353 | error_str << dest_set->StringifySetAndLayout() << " does not have binding " << update->dstBinding; |
| 2354 | *error_msg = error_str.str(); |
| 2355 | return false; |
| 2356 | } |
| 2357 | |
| 2358 | DescriptorSetLayout::ConstBindingIterator dest(dest_layout.get(), update->dstBinding); |
| 2359 | // Make sure binding isn't empty |
| 2360 | if (0 == dest.GetDescriptorCount()) { |
| 2361 | *error_code = "VUID-VkWriteDescriptorSet-dstBinding-00316"; |
| 2362 | std::stringstream error_str; |
| 2363 | error_str << dest_set->StringifySetAndLayout() << " cannot updated binding " << update->dstBinding |
| 2364 | << " that has 0 descriptors"; |
| 2365 | *error_msg = error_str.str(); |
| 2366 | return false; |
| 2367 | } |
| 2368 | |
| 2369 | // Verify idle ds |
| 2370 | if (dest_set->in_use.load() && !(dest.GetDescriptorBindingFlags() & (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT | |
| 2371 | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT))) { |
| 2372 | // TODO : Re-using Free Idle error code, need write update idle error code |
| 2373 | *error_code = "VUID-vkFreeDescriptorSets-pDescriptorSets-00309"; |
| 2374 | std::stringstream error_str; |
| 2375 | error_str << "Cannot call " << func_name << " to perform write update on " << dest_set->StringifySetAndLayout() |
| 2376 | << " that is in use by a command buffer"; |
| 2377 | *error_msg = error_str.str(); |
| 2378 | return false; |
| 2379 | } |
| 2380 | // We know that binding is valid, verify update and do update on each descriptor |
| 2381 | auto start_idx = dest.GetGlobalIndexRange().start + update->dstArrayElement; |
| 2382 | auto type = dest.GetType(); |
| 2383 | if (type != update->descriptorType) { |
| 2384 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00319"; |
| 2385 | std::stringstream error_str; |
| 2386 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding |
| 2387 | << " with type " << string_VkDescriptorType(type) << " but update type is " |
| 2388 | << string_VkDescriptorType(update->descriptorType); |
| 2389 | *error_msg = error_str.str(); |
| 2390 | return false; |
| 2391 | } |
| 2392 | auto total_descriptors = dest_layout->GetTotalDescriptorCount(); |
| 2393 | if (update->descriptorCount > (total_descriptors - start_idx)) { |
| 2394 | *error_code = "VUID-VkWriteDescriptorSet-dstArrayElement-00321"; |
| 2395 | std::stringstream error_str; |
| 2396 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding |
| 2397 | << " with " << total_descriptors - start_idx |
| 2398 | << " descriptors in that binding and all successive bindings of the set, but update of " |
| 2399 | << update->descriptorCount << " descriptors combined with update array element offset of " |
| 2400 | << update->dstArrayElement << " oversteps the available number of consecutive descriptors"; |
| 2401 | *error_msg = error_str.str(); |
| 2402 | return false; |
| 2403 | } |
| 2404 | if (type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) { |
| 2405 | if ((update->dstArrayElement % 4) != 0) { |
| 2406 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02219"; |
| 2407 | std::stringstream error_str; |
| 2408 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding |
| 2409 | << " with " |
| 2410 | << "dstArrayElement " << update->dstArrayElement << " not a multiple of 4"; |
| 2411 | *error_msg = error_str.str(); |
| 2412 | return false; |
| 2413 | } |
| 2414 | if ((update->descriptorCount % 4) != 0) { |
| 2415 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02220"; |
| 2416 | std::stringstream error_str; |
| 2417 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding |
| 2418 | << " with " |
| 2419 | << "descriptorCount " << update->descriptorCount << " not a multiple of 4"; |
| 2420 | *error_msg = error_str.str(); |
| 2421 | return false; |
| 2422 | } |
| 2423 | const auto *write_inline_info = lvl_find_in_chain<VkWriteDescriptorSetInlineUniformBlockEXT>(update->pNext); |
| 2424 | if (!write_inline_info || write_inline_info->dataSize != update->descriptorCount) { |
| 2425 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02221"; |
| 2426 | std::stringstream error_str; |
| 2427 | if (!write_inline_info) { |
| 2428 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" |
| 2429 | << update->dstBinding << " with " |
| 2430 | << "VkWriteDescriptorSetInlineUniformBlockEXT missing"; |
| 2431 | } else { |
| 2432 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" |
| 2433 | << update->dstBinding << " with " |
| 2434 | << "VkWriteDescriptorSetInlineUniformBlockEXT dataSize " << write_inline_info->dataSize |
| 2435 | << " not equal to " |
| 2436 | << "VkWriteDescriptorSet descriptorCount " << update->descriptorCount; |
| 2437 | } |
| 2438 | *error_msg = error_str.str(); |
| 2439 | return false; |
| 2440 | } |
| 2441 | // This error is probably unreachable due to the previous two errors |
| 2442 | if (write_inline_info && (write_inline_info->dataSize % 4) != 0) { |
| 2443 | *error_code = "VUID-VkWriteDescriptorSetInlineUniformBlockEXT-dataSize-02222"; |
| 2444 | std::stringstream error_str; |
| 2445 | error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding |
| 2446 | << " with " |
| 2447 | << "VkWriteDescriptorSetInlineUniformBlockEXT dataSize " << write_inline_info->dataSize |
| 2448 | << " not a multiple of 4"; |
| 2449 | *error_msg = error_str.str(); |
| 2450 | return false; |
| 2451 | } |
| 2452 | } |
| 2453 | // Verify consecutive bindings match (if needed) |
| 2454 | if (!VerifyUpdateConsistency(DescriptorSetLayout::ConstBindingIterator(dest_layout.get(), update->dstBinding), |
| 2455 | update->dstArrayElement, update->descriptorCount, "write update to", dest_set->GetSet(), |
| 2456 | error_msg)) { |
| 2457 | // TODO : Should break out "consecutive binding updates" language into valid usage statements |
| 2458 | *error_code = "VUID-VkWriteDescriptorSet-dstArrayElement-00321"; |
| 2459 | return false; |
| 2460 | } |
| 2461 | // Update is within bounds and consistent so last step is to validate update contents |
John Zulauf | 459939f | 2019-06-04 16:49:35 -0600 | [diff] [blame] | 2462 | if (!VerifyWriteUpdateContents(dest_set, update, start_idx, func_name, error_code, error_msg)) { |
John Zulauf | 4956fff | 2019-06-04 16:54:38 -0600 | [diff] [blame] | 2463 | std::stringstream error_str; |
| 2464 | error_str << "Write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding |
| 2465 | << " failed with error message: " << error_msg->c_str(); |
| 2466 | *error_msg = error_str.str(); |
| 2467 | return false; |
| 2468 | } |
| 2469 | // All checks passed, update is clean |
| 2470 | return true; |
| 2471 | } |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 2472 | |
| 2473 | // Verify that the contents of the update are ok, but don't perform actual update |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2474 | bool CoreChecks::VerifyWriteUpdateContents(const DescriptorSet *dest_set, const VkWriteDescriptorSet *update, const uint32_t index, |
John Zulauf | bd9b341 | 2019-08-22 17:16:11 -0600 | [diff] [blame] | 2475 | const char *func_name, std::string *error_code, std::string *error_msg) const { |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2476 | using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor; |
| 2477 | using SamplerDescriptor = cvdescriptorset::SamplerDescriptor; |
| 2478 | |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 2479 | switch (update->descriptorType) { |
| 2480 | case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: { |
| 2481 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
| 2482 | // Validate image |
| 2483 | auto image_view = update->pImageInfo[di].imageView; |
| 2484 | auto image_layout = update->pImageInfo[di].imageLayout; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2485 | if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, func_name, error_code, error_msg)) { |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 2486 | std::stringstream error_str; |
| 2487 | error_str << "Attempted write update to combined image sampler descriptor failed due to: " |
| 2488 | << error_msg->c_str(); |
| 2489 | *error_msg = error_str.str(); |
| 2490 | return false; |
| 2491 | } |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2492 | if (device_extensions.vk_khr_sampler_ycbcr_conversion) { |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 2493 | ImageSamplerDescriptor *desc = (ImageSamplerDescriptor *)dest_set->GetDescriptorFromGlobalIndex(index + di); |
| 2494 | if (desc->IsImmutableSampler()) { |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2495 | auto sampler_state = GetSamplerState(desc->GetSampler()); |
| 2496 | auto iv_state = GetImageViewState(image_view); |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 2497 | if (iv_state && sampler_state) { |
| 2498 | if (iv_state->samplerConversion != sampler_state->samplerConversion) { |
| 2499 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-01948"; |
| 2500 | std::stringstream error_str; |
| 2501 | error_str << "Attempted write update to combined image sampler and image view and sampler ycbcr " |
| 2502 | "conversions are not identical, sampler: " |
| 2503 | << desc->GetSampler() << " image view: " << iv_state->image_view << "."; |
| 2504 | *error_msg = error_str.str(); |
| 2505 | return false; |
| 2506 | } |
| 2507 | } |
| 2508 | } else { |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2509 | auto iv_state = GetImageViewState(image_view); |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 2510 | if (iv_state && (iv_state->samplerConversion != VK_NULL_HANDLE)) { |
Shannon McPherson | f7d9cf6 | 2019-06-26 09:23:57 -0600 | [diff] [blame] | 2511 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02738"; |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 2512 | std::stringstream error_str; |
| 2513 | error_str << "Because dstSet (" << update->dstSet << ") is bound to image view (" |
| 2514 | << iv_state->image_view |
| 2515 | << ") that includes a YCBCR conversion, it must have been allocated with a layout that " |
| 2516 | "includes an immutable sampler."; |
| 2517 | *error_msg = error_str.str(); |
| 2518 | return false; |
| 2519 | } |
| 2520 | } |
| 2521 | } |
| 2522 | } |
| 2523 | } |
| 2524 | // fall through |
| 2525 | case VK_DESCRIPTOR_TYPE_SAMPLER: { |
| 2526 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
| 2527 | SamplerDescriptor *desc = (SamplerDescriptor *)dest_set->GetDescriptorFromGlobalIndex(index + di); |
| 2528 | if (!desc->IsImmutableSampler()) { |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2529 | if (!ValidateSampler(update->pImageInfo[di].sampler)) { |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 2530 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00325"; |
| 2531 | std::stringstream error_str; |
| 2532 | error_str << "Attempted write update to sampler descriptor with invalid sampler: " |
| 2533 | << update->pImageInfo[di].sampler << "."; |
| 2534 | *error_msg = error_str.str(); |
| 2535 | return false; |
| 2536 | } |
| 2537 | } else { |
| 2538 | // TODO : Warn here |
| 2539 | } |
| 2540 | } |
| 2541 | break; |
| 2542 | } |
| 2543 | case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: |
| 2544 | case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: |
| 2545 | case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: { |
| 2546 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
| 2547 | auto image_view = update->pImageInfo[di].imageView; |
| 2548 | auto image_layout = update->pImageInfo[di].imageLayout; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2549 | if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, func_name, error_code, error_msg)) { |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 2550 | std::stringstream error_str; |
| 2551 | error_str << "Attempted write update to image descriptor failed due to: " << error_msg->c_str(); |
| 2552 | *error_msg = error_str.str(); |
| 2553 | return false; |
| 2554 | } |
| 2555 | } |
| 2556 | break; |
| 2557 | } |
| 2558 | case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: |
| 2559 | case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: { |
| 2560 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
| 2561 | auto buffer_view = update->pTexelBufferView[di]; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2562 | auto bv_state = GetBufferViewState(buffer_view); |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 2563 | if (!bv_state) { |
| 2564 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00323"; |
| 2565 | std::stringstream error_str; |
| 2566 | error_str << "Attempted write update to texel buffer descriptor with invalid buffer view: " << buffer_view; |
| 2567 | *error_msg = error_str.str(); |
| 2568 | return false; |
| 2569 | } |
| 2570 | auto buffer = bv_state->create_info.buffer; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2571 | auto buffer_state = GetBufferState(buffer); |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 2572 | // Verify that buffer underlying the view hasn't been destroyed prematurely |
| 2573 | if (!buffer_state) { |
| 2574 | *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00323"; |
| 2575 | std::stringstream error_str; |
| 2576 | error_str << "Attempted write update to texel buffer descriptor failed because underlying buffer (" << buffer |
| 2577 | << ") has been destroyed: " << error_msg->c_str(); |
| 2578 | *error_msg = error_str.str(); |
| 2579 | return false; |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2580 | } else if (!cvdescriptorset::ValidateBufferUsage(buffer_state, update->descriptorType, error_code, error_msg)) { |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 2581 | std::stringstream error_str; |
| 2582 | error_str << "Attempted write update to texel buffer descriptor failed due to: " << error_msg->c_str(); |
| 2583 | *error_msg = error_str.str(); |
| 2584 | return false; |
| 2585 | } |
| 2586 | } |
| 2587 | break; |
| 2588 | } |
| 2589 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: |
| 2590 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: |
| 2591 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: |
| 2592 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: { |
| 2593 | for (uint32_t di = 0; di < update->descriptorCount; ++di) { |
John Zulauf | c93c425 | 2019-06-25 09:19:49 -0600 | [diff] [blame] | 2594 | if (!ValidateBufferUpdate(update->pBufferInfo + di, update->descriptorType, func_name, error_code, error_msg)) { |
John Zulauf | adb3f54 | 2019-06-04 17:01:00 -0600 | [diff] [blame] | 2595 | std::stringstream error_str; |
| 2596 | error_str << "Attempted write update to buffer descriptor failed due to: " << error_msg->c_str(); |
| 2597 | *error_msg = error_str.str(); |
| 2598 | return false; |
| 2599 | } |
| 2600 | } |
| 2601 | break; |
| 2602 | } |
| 2603 | case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: |
| 2604 | break; |
| 2605 | case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV: |
| 2606 | // XXX TODO |
| 2607 | break; |
| 2608 | default: |
| 2609 | assert(0); // We've already verified update type so should never get here |
| 2610 | break; |
| 2611 | } |
| 2612 | // All checks passed so update contents are good |
| 2613 | return true; |
| 2614 | } |