blob: ecaa80e285c9c218d1fb3115d9a87eda69aba574 [file] [log] [blame]
Tony-LunarG73719992020-01-15 10:20:28 -07001/* Copyright (c) 2015-2020 The Khronos Group Inc.
2 * Copyright (c) 2015-2020 Valve Corporation
3 * Copyright (c) 2015-2020 LunarG, Inc.
4 * Copyright (C) 2015-2020 Google Inc.
Tobias Hector6663c9b2020-11-05 10:18:02 +00005 * Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved.
locke-lunargd556cc32019-09-17 01:21:23 -06006 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Mark Lobodzinski <mark@lunarg.com>
20 * Author: Dave Houlton <daveh@lunarg.com>
21 * Shannon McPherson <shannon@lunarg.com>
Tobias Hector6663c9b2020-11-05 10:18:02 +000022 * Author: Tobias Hector <tobias.hector@amd.com>
locke-lunargd556cc32019-09-17 01:21:23 -060023 */
24
locke-lunargd556cc32019-09-17 01:21:23 -060025#include <cmath>
26#include <set>
locke-lunargd556cc32019-09-17 01:21:23 -060027
28#include "vk_enum_string_helper.h"
29#include "vk_format_utils.h"
30#include "vk_layer_data.h"
31#include "vk_layer_utils.h"
32#include "vk_layer_logging.h"
33#include "vk_typemap_helper.h"
34
35#include "chassis.h"
36#include "state_tracker.h"
37#include "shader_validation.h"
38
John Zulauf890b50b2020-06-17 15:18:19 -060039const char *CommandTypeString(CMD_TYPE type) {
40 // Autogenerated as part of the vk_validation_error_message.h codegen
Mark Lobodzinski677dc0b2020-11-12 15:28:09 -070041 return kGeneratedCommandNameList[type];
John Zulauf890b50b2020-06-17 15:18:19 -060042}
43
locke-lunarg4189aa22020-10-21 00:23:48 -060044VkDynamicState ConvertToDynamicState(CBStatusFlagBits flag) {
45 switch (flag) {
46 case CBSTATUS_LINE_WIDTH_SET:
47 return VK_DYNAMIC_STATE_LINE_WIDTH;
48 case CBSTATUS_DEPTH_BIAS_SET:
49 return VK_DYNAMIC_STATE_DEPTH_BIAS;
50 case CBSTATUS_BLEND_CONSTANTS_SET:
51 return VK_DYNAMIC_STATE_BLEND_CONSTANTS;
52 case CBSTATUS_DEPTH_BOUNDS_SET:
53 return VK_DYNAMIC_STATE_DEPTH_BOUNDS;
54 case CBSTATUS_STENCIL_READ_MASK_SET:
55 return VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK;
56 case CBSTATUS_STENCIL_WRITE_MASK_SET:
57 return VK_DYNAMIC_STATE_STENCIL_WRITE_MASK;
58 case CBSTATUS_STENCIL_REFERENCE_SET:
59 return VK_DYNAMIC_STATE_STENCIL_REFERENCE;
60 case CBSTATUS_VIEWPORT_SET:
61 return VK_DYNAMIC_STATE_VIEWPORT;
62 case CBSTATUS_SCISSOR_SET:
63 return VK_DYNAMIC_STATE_SCISSOR;
64 case CBSTATUS_EXCLUSIVE_SCISSOR_SET:
65 return VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV;
66 case CBSTATUS_SHADING_RATE_PALETTE_SET:
67 return VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV;
68 case CBSTATUS_LINE_STIPPLE_SET:
69 return VK_DYNAMIC_STATE_LINE_STIPPLE_EXT;
70 case CBSTATUS_VIEWPORT_W_SCALING_SET:
71 return VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV;
72 case CBSTATUS_CULL_MODE_SET:
73 return VK_DYNAMIC_STATE_CULL_MODE_EXT;
74 case CBSTATUS_FRONT_FACE_SET:
75 return VK_DYNAMIC_STATE_FRONT_FACE_EXT;
76 case CBSTATUS_PRIMITIVE_TOPOLOGY_SET:
77 return VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT;
78 case CBSTATUS_VIEWPORT_WITH_COUNT_SET:
79 return VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT;
80 case CBSTATUS_SCISSOR_WITH_COUNT_SET:
81 return VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT;
82 case CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET:
83 return VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT;
84 case CBSTATUS_DEPTH_TEST_ENABLE_SET:
85 return VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT;
86 case CBSTATUS_DEPTH_WRITE_ENABLE_SET:
87 return VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT;
88 case CBSTATUS_DEPTH_COMPARE_OP_SET:
89 return VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT;
90 case CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET:
91 return VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT;
92 case CBSTATUS_STENCIL_TEST_ENABLE_SET:
93 return VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT;
94 case CBSTATUS_STENCIL_OP_SET:
95 return VK_DYNAMIC_STATE_STENCIL_OP_EXT;
96 case CBSTATUS_DISCARD_RECTANGLE_SET:
97 return VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT;
98 case CBSTATUS_SAMPLE_LOCATIONS_SET:
99 return VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT;
100 case CBSTATUS_COARSE_SAMPLE_ORDER_SET:
101 return VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV;
102 default:
103 // CBSTATUS_INDEX_BUFFER_BOUND is not in VkDynamicState
104 return VK_DYNAMIC_STATE_MAX_ENUM;
105 }
106 return VK_DYNAMIC_STATE_MAX_ENUM;
107}
108
109CBStatusFlagBits ConvertToCBStatusFlagBits(VkDynamicState state) {
110 switch (state) {
111 case VK_DYNAMIC_STATE_VIEWPORT:
112 return CBSTATUS_VIEWPORT_SET;
113 case VK_DYNAMIC_STATE_SCISSOR:
114 return CBSTATUS_SCISSOR_SET;
115 case VK_DYNAMIC_STATE_LINE_WIDTH:
116 return CBSTATUS_LINE_WIDTH_SET;
117 case VK_DYNAMIC_STATE_DEPTH_BIAS:
118 return CBSTATUS_DEPTH_BIAS_SET;
119 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
120 return CBSTATUS_BLEND_CONSTANTS_SET;
121 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
122 return CBSTATUS_DEPTH_BOUNDS_SET;
123 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
124 return CBSTATUS_STENCIL_READ_MASK_SET;
125 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
126 return CBSTATUS_STENCIL_WRITE_MASK_SET;
127 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
128 return CBSTATUS_STENCIL_REFERENCE_SET;
129 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
130 return CBSTATUS_VIEWPORT_W_SCALING_SET;
131 case VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT:
132 return CBSTATUS_DISCARD_RECTANGLE_SET;
133 case VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT:
134 return CBSTATUS_SAMPLE_LOCATIONS_SET;
135 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
136 return CBSTATUS_SHADING_RATE_PALETTE_SET;
137 case VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV:
138 return CBSTATUS_COARSE_SAMPLE_ORDER_SET;
139 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
140 return CBSTATUS_EXCLUSIVE_SCISSOR_SET;
141 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
142 return CBSTATUS_LINE_STIPPLE_SET;
143 case VK_DYNAMIC_STATE_CULL_MODE_EXT:
144 return CBSTATUS_CULL_MODE_SET;
145 case VK_DYNAMIC_STATE_FRONT_FACE_EXT:
146 return CBSTATUS_FRONT_FACE_SET;
147 case VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT:
148 return CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
149 case VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT:
150 return CBSTATUS_VIEWPORT_WITH_COUNT_SET;
151 case VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT:
152 return CBSTATUS_SCISSOR_WITH_COUNT_SET;
153 case VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT:
154 return CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
155 case VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT:
156 return CBSTATUS_DEPTH_TEST_ENABLE_SET;
157 case VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT:
158 return CBSTATUS_DEPTH_WRITE_ENABLE_SET;
159 case VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT:
160 return CBSTATUS_DEPTH_COMPARE_OP_SET;
161 case VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT:
162 return CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
163 case VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT:
164 return CBSTATUS_STENCIL_TEST_ENABLE_SET;
165 case VK_DYNAMIC_STATE_STENCIL_OP_EXT:
166 return CBSTATUS_STENCIL_OP_SET;
167 default:
168 return CBSTATUS_NONE;
169 }
170 return CBSTATUS_NONE;
171}
172
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -0600173void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
174 if (add_obj) {
175 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
176 // Call base class
177 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
178 }
179}
180
John Zulauf5c5e88d2019-12-26 11:22:02 -0700181uint32_t ResolveRemainingLevels(const VkImageSubresourceRange *range, uint32_t mip_levels) {
182 // Return correct number of mip levels taking into account VK_REMAINING_MIP_LEVELS
183 uint32_t mip_level_count = range->levelCount;
184 if (range->levelCount == VK_REMAINING_MIP_LEVELS) {
185 mip_level_count = mip_levels - range->baseMipLevel;
186 }
187 return mip_level_count;
188}
189
190uint32_t ResolveRemainingLayers(const VkImageSubresourceRange *range, uint32_t layers) {
191 // Return correct number of layers taking into account VK_REMAINING_ARRAY_LAYERS
192 uint32_t array_layer_count = range->layerCount;
193 if (range->layerCount == VK_REMAINING_ARRAY_LAYERS) {
194 array_layer_count = layers - range->baseArrayLayer;
195 }
196 return array_layer_count;
197}
198
199VkImageSubresourceRange NormalizeSubresourceRange(const VkImageCreateInfo &image_create_info,
200 const VkImageSubresourceRange &range) {
201 VkImageSubresourceRange norm = range;
202 norm.levelCount = ResolveRemainingLevels(&range, image_create_info.mipLevels);
203
Mike Schuchardt2df08912020-12-15 16:28:09 -0800204 // Special case for 3D images with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT flag bit, where <extent.depth> and
John Zulauf5c5e88d2019-12-26 11:22:02 -0700205 // <arrayLayers> can potentially alias.
Mike Schuchardt2df08912020-12-15 16:28:09 -0800206 uint32_t layer_limit = (0 != (image_create_info.flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT))
John Zulauf5c5e88d2019-12-26 11:22:02 -0700207 ? image_create_info.extent.depth
208 : image_create_info.arrayLayers;
209 norm.layerCount = ResolveRemainingLayers(&range, layer_limit);
210
211 // For multiplanar formats, IMAGE_ASPECT_COLOR is equivalent to adding the aspect of the individual planes
212 VkImageAspectFlags &aspect_mask = norm.aspectMask;
213 if (FormatIsMultiplane(image_create_info.format)) {
214 if (aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) {
215 aspect_mask &= ~VK_IMAGE_ASPECT_COLOR_BIT;
216 aspect_mask |= (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT);
217 if (FormatPlaneCount(image_create_info.format) > 2) {
218 aspect_mask |= VK_IMAGE_ASPECT_PLANE_2_BIT;
219 }
220 }
221 }
222 return norm;
223}
224
225VkImageSubresourceRange NormalizeSubresourceRange(const IMAGE_STATE &image_state, const VkImageSubresourceRange &range) {
226 const VkImageCreateInfo &image_create_info = image_state.createInfo;
227 return NormalizeSubresourceRange(image_create_info, range);
228}
229
John Zulauf2bc1fde2020-04-24 15:09:51 -0600230// NOTE: Beware the lifespan of the rp_begin when holding the return. If the rp_begin isn't a "safe" copy, "IMAGELESS"
231// attachments won't persist past the API entry point exit.
232std::pair<uint32_t, const VkImageView *> GetFramebufferAttachments(const VkRenderPassBeginInfo &rp_begin,
233 const FRAMEBUFFER_STATE &fb_state) {
234 const VkImageView *attachments = fb_state.createInfo.pAttachments;
235 uint32_t count = fb_state.createInfo.attachmentCount;
236 if (fb_state.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) {
237 const auto *framebuffer_attachments = lvl_find_in_chain<VkRenderPassAttachmentBeginInfo>(rp_begin.pNext);
238 if (framebuffer_attachments) {
239 attachments = framebuffer_attachments->pAttachments;
240 count = framebuffer_attachments->attachmentCount;
241 }
242 }
243 return std::make_pair(count, attachments);
244}
245
246std::vector<const IMAGE_VIEW_STATE *> ValidationStateTracker::GetAttachmentViews(const VkRenderPassBeginInfo &rp_begin,
247 const FRAMEBUFFER_STATE &fb_state) const {
248 std::vector<const IMAGE_VIEW_STATE *> views;
249
250 const auto count_attachment = GetFramebufferAttachments(rp_begin, fb_state);
251 const auto attachment_count = count_attachment.first;
252 const auto *attachments = count_attachment.second;
253 views.resize(attachment_count, nullptr);
254 for (uint32_t i = 0; i < attachment_count; i++) {
255 if (attachments[i] != VK_NULL_HANDLE) {
256 views[i] = Get<IMAGE_VIEW_STATE>(attachments[i]);
257 }
258 }
259 return views;
260}
261
262std::vector<const IMAGE_VIEW_STATE *> ValidationStateTracker::GetCurrentAttachmentViews(const CMD_BUFFER_STATE &cb_state) const {
263 // Only valid *after* RecordBeginRenderPass and *before* RecordEndRenderpass as it relies on cb_state for the renderpass info.
264 std::vector<const IMAGE_VIEW_STATE *> views;
265
locke-lunargaecf2152020-05-12 17:15:41 -0600266 const auto *rp_state = cb_state.activeRenderPass.get();
John Zulauf2bc1fde2020-04-24 15:09:51 -0600267 if (!rp_state) return views;
268 const auto &rp_begin = *cb_state.activeRenderPassBeginInfo.ptr();
269 const auto *fb_state = Get<FRAMEBUFFER_STATE>(rp_begin.framebuffer);
270 if (!fb_state) return views;
271
272 return GetAttachmentViews(rp_begin, *fb_state);
273}
274
locke-lunarg3e127c72020-06-09 17:45:28 -0600275PIPELINE_STATE *GetCurrentPipelineFromCommandBuffer(const CMD_BUFFER_STATE &cmd, VkPipelineBindPoint pipelineBindPoint) {
locke-lunargb8d7a7a2020-10-25 16:01:52 -0600276 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
277 return cmd.lastBound[lv_bind_point].pipeline_state;
locke-lunarg3e127c72020-06-09 17:45:28 -0600278}
279
280void GetCurrentPipelineAndDesriptorSetsFromCommandBuffer(const CMD_BUFFER_STATE &cmd, VkPipelineBindPoint pipelineBindPoint,
281 const PIPELINE_STATE **rtn_pipe,
282 const std::vector<LAST_BOUND_STATE::PER_SET> **rtn_sets) {
locke-lunargb8d7a7a2020-10-25 16:01:52 -0600283 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
284 const auto &last_bound_it = cmd.lastBound[lv_bind_point];
285 if (!last_bound_it.IsUsing()) {
locke-lunarg3e127c72020-06-09 17:45:28 -0600286 return;
287 }
locke-lunargb8d7a7a2020-10-25 16:01:52 -0600288 *rtn_pipe = last_bound_it.pipeline_state;
289 *rtn_sets = &(last_bound_it.per_set);
locke-lunarg3e127c72020-06-09 17:45:28 -0600290}
291
locke-lunargd556cc32019-09-17 01:21:23 -0600292#ifdef VK_USE_PLATFORM_ANDROID_KHR
293// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
294// This could also move into a seperate core_validation_android.cpp file... ?
295
296void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
297 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
298 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
Spencer Fricke224c9852020-04-06 07:47:29 -0700299 is_node->external_ahb = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600300 }
301 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
302 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
303 is_node->has_ahb_format = true;
304 is_node->ahb_format = ext_fmt_android->externalFormat;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700305 // VUID 01894 will catch if not found in map
306 auto it = ahb_ext_formats_map.find(ext_fmt_android->externalFormat);
307 if (it != ahb_ext_formats_map.end()) {
308 is_node->format_features = it->second;
309 }
locke-lunargd556cc32019-09-17 01:21:23 -0600310 }
311}
312
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700313void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {
314 const VkExternalMemoryBufferCreateInfo *embci = lvl_find_in_chain<VkExternalMemoryBufferCreateInfo>(create_info->pNext);
315 if (embci && (embci->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
316 bs_node->external_ahb = true;
317 }
318}
319
locke-lunargd556cc32019-09-17 01:21:23 -0600320void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700321 VkSamplerYcbcrConversion ycbcr_conversion,
322 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state) {
locke-lunargd556cc32019-09-17 01:21:23 -0600323 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
324 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
325 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700326 // VUID 01894 will catch if not found in map
327 auto it = ahb_ext_formats_map.find(ext_format_android->externalFormat);
328 if (it != ahb_ext_formats_map.end()) {
329 ycbcr_state->format_features = it->second;
330 }
locke-lunargd556cc32019-09-17 01:21:23 -0600331 }
332};
333
334void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
335 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
336};
337
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700338void ValidationStateTracker::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
339 VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties, VkResult result) {
340 if (VK_SUCCESS != result) return;
341 auto ahb_format_props = lvl_find_in_chain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
342 if (ahb_format_props) {
343 ahb_ext_formats_map.insert({ahb_format_props->externalFormat, ahb_format_props->formatFeatures});
344 }
345}
346
locke-lunargd556cc32019-09-17 01:21:23 -0600347#else
348
349void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
350
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700351void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {}
352
locke-lunargd556cc32019-09-17 01:21:23 -0600353void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700354 VkSamplerYcbcrConversion ycbcr_conversion,
355 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state){};
locke-lunargd556cc32019-09-17 01:21:23 -0600356
357void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
358
359#endif // VK_USE_PLATFORM_ANDROID_KHR
360
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -0600361std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPipelineLayout(PIPELINE_LAYOUT_STATE const *layout_data,
362 uint32_t set) {
363 std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> dsl = nullptr;
364 if (layout_data && (set < layout_data->set_layouts.size())) {
365 dsl = layout_data->set_layouts[set];
366 }
367 return dsl;
368}
369
Petr Kraus44f1c482020-04-25 20:09:25 +0200370void AddImageStateProps(IMAGE_STATE &image_state, const VkDevice device, const VkPhysicalDevice physical_device) {
371 // Add feature support according to Image Format Features (vkspec.html#resources-image-format-features)
372 // if format is AHB external format then the features are already set
373 if (image_state.has_ahb_format == false) {
374 const VkImageTiling image_tiling = image_state.createInfo.tiling;
375 const VkFormat image_format = image_state.createInfo.format;
376 if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
377 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {
378 VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, nullptr};
379 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state.image, &drm_format_properties);
380
381 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
382 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
383 nullptr};
384 format_properties_2.pNext = (void *)&drm_properties_list;
385 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Lionel Landwerlin09351a72020-06-22 18:15:59 +0300386 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
387 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
388 drm_properties_list.pDrmFormatModifierProperties = &drm_properties[0];
389 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Petr Kraus44f1c482020-04-25 20:09:25 +0200390
391 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300392 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier ==
393 drm_format_properties.drmFormatModifier) {
394 image_state.format_features =
Petr Kraus44f1c482020-04-25 20:09:25 +0200395 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300396 break;
Petr Kraus44f1c482020-04-25 20:09:25 +0200397 }
398 }
399 } else {
400 VkFormatProperties format_properties;
401 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_format, &format_properties);
402 image_state.format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
403 : format_properties.optimalTilingFeatures;
404 }
405 }
406}
407
locke-lunargd556cc32019-09-17 01:21:23 -0600408void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
409 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
410 if (VK_SUCCESS != result) return;
locke-lunarg296a3c92020-03-25 01:04:29 -0600411 auto is_node = std::make_shared<IMAGE_STATE>(device, *pImage, pCreateInfo);
sfricke-samsung71bc6572020-04-29 15:49:43 -0700412 is_node->disjoint = ((pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT) != 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600413 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
414 RecordCreateImageANDROID(pCreateInfo, is_node.get());
415 }
416 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
417 if (swapchain_info) {
418 is_node->create_from_swapchain = swapchain_info->swapchain;
419 }
420
locke-lunargd556cc32019-09-17 01:21:23 -0600421 // Record the memory requirements in case they won't be queried
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700422 // External AHB memory can't be queried until after memory is bound
Spencer Fricke224c9852020-04-06 07:47:29 -0700423 if (is_node->external_ahb == false) {
sfricke-samsung71bc6572020-04-29 15:49:43 -0700424 if (is_node->disjoint == false) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700425 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
426 } else {
427 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
428 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
429 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
430 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
431 mem_req_info2.pNext = &image_plane_req;
432 mem_req_info2.image = *pImage;
433
434 assert(plane_count != 0); // assumes each format has at least first plane
435 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
436 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
437 is_node->plane0_requirements = mem_reqs2.memoryRequirements;
438
439 if (plane_count >= 2) {
440 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
441 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
442 is_node->plane1_requirements = mem_reqs2.memoryRequirements;
443 }
444 if (plane_count >= 3) {
445 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
446 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
447 is_node->plane2_requirements = mem_reqs2.memoryRequirements;
448 }
449 }
locke-lunargd556cc32019-09-17 01:21:23 -0600450 }
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700451
Petr Kraus44f1c482020-04-25 20:09:25 +0200452 AddImageStateProps(*is_node, device, physical_device);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700453
sfricke-samsungedce77a2020-07-03 22:35:13 -0700454 is_node->unprotected = ((pCreateInfo->flags & VK_IMAGE_CREATE_PROTECTED_BIT) == 0);
455
locke-lunargd556cc32019-09-17 01:21:23 -0600456 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
457}
458
459void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
460 if (!image) return;
461 IMAGE_STATE *image_state = GetImageState(image);
462 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
463 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
464 // Clean up memory mapping, bindings and range references for image
465 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700466 RemoveImageMemoryRange(image, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600467 }
468 if (image_state->bind_swapchain) {
469 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
470 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -0600471 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -0600472 }
473 }
474 RemoveAliasingImage(image_state);
475 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500476 image_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600477 // Remove image from imageMap
478 imageMap.erase(image);
479}
480
481void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
482 VkImageLayout imageLayout, const VkClearColorValue *pColor,
483 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
484 auto cb_node = GetCBState(commandBuffer);
485 auto image_state = GetImageState(image);
486 if (cb_node && image_state) {
487 AddCommandBufferBindingImage(cb_node, image_state);
488 }
489}
490
491void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
492 VkImageLayout imageLayout,
493 const VkClearDepthStencilValue *pDepthStencil,
494 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
495 auto cb_node = GetCBState(commandBuffer);
496 auto image_state = GetImageState(image);
497 if (cb_node && image_state) {
498 AddCommandBufferBindingImage(cb_node, image_state);
499 }
500}
501
502void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
503 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
504 uint32_t regionCount, const VkImageCopy *pRegions) {
505 auto cb_node = GetCBState(commandBuffer);
506 auto src_image_state = GetImageState(srcImage);
507 auto dst_image_state = GetImageState(dstImage);
508
509 // Update bindings between images and cmd buffer
510 AddCommandBufferBindingImage(cb_node, src_image_state);
511 AddCommandBufferBindingImage(cb_node, dst_image_state);
512}
513
Jeff Leger178b1e52020-10-05 12:22:23 -0400514void ValidationStateTracker::PreCallRecordCmdCopyImage2KHR(VkCommandBuffer commandBuffer,
515 const VkCopyImageInfo2KHR *pCopyImageInfo) {
516 auto cb_node = GetCBState(commandBuffer);
517 auto src_image_state = GetImageState(pCopyImageInfo->srcImage);
518 auto dst_image_state = GetImageState(pCopyImageInfo->dstImage);
519
520 // Update bindings between images and cmd buffer
521 AddCommandBufferBindingImage(cb_node, src_image_state);
522 AddCommandBufferBindingImage(cb_node, dst_image_state);
523}
524
locke-lunargd556cc32019-09-17 01:21:23 -0600525void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
526 VkImageLayout srcImageLayout, VkImage dstImage,
527 VkImageLayout dstImageLayout, uint32_t regionCount,
528 const VkImageResolve *pRegions) {
529 auto cb_node = GetCBState(commandBuffer);
530 auto src_image_state = GetImageState(srcImage);
531 auto dst_image_state = GetImageState(dstImage);
532
533 // Update bindings between images and cmd buffer
534 AddCommandBufferBindingImage(cb_node, src_image_state);
535 AddCommandBufferBindingImage(cb_node, dst_image_state);
536}
537
Jeff Leger178b1e52020-10-05 12:22:23 -0400538void ValidationStateTracker::PreCallRecordCmdResolveImage2KHR(VkCommandBuffer commandBuffer,
539 const VkResolveImageInfo2KHR *pResolveImageInfo) {
540 auto cb_node = GetCBState(commandBuffer);
541 auto src_image_state = GetImageState(pResolveImageInfo->srcImage);
542 auto dst_image_state = GetImageState(pResolveImageInfo->dstImage);
543
544 // Update bindings between images and cmd buffer
545 AddCommandBufferBindingImage(cb_node, src_image_state);
546 AddCommandBufferBindingImage(cb_node, dst_image_state);
547}
548
locke-lunargd556cc32019-09-17 01:21:23 -0600549void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
550 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
551 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
552 auto cb_node = GetCBState(commandBuffer);
553 auto src_image_state = GetImageState(srcImage);
554 auto dst_image_state = GetImageState(dstImage);
555
556 // Update bindings between images and cmd buffer
557 AddCommandBufferBindingImage(cb_node, src_image_state);
558 AddCommandBufferBindingImage(cb_node, dst_image_state);
559}
560
Jeff Leger178b1e52020-10-05 12:22:23 -0400561void ValidationStateTracker::PreCallRecordCmdBlitImage2KHR(VkCommandBuffer commandBuffer,
562 const VkBlitImageInfo2KHR *pBlitImageInfo) {
563 auto cb_node = GetCBState(commandBuffer);
564 auto src_image_state = GetImageState(pBlitImageInfo->srcImage);
565 auto dst_image_state = GetImageState(pBlitImageInfo->dstImage);
566
567 // Update bindings between images and cmd buffer
568 AddCommandBufferBindingImage(cb_node, src_image_state);
569 AddCommandBufferBindingImage(cb_node, dst_image_state);
570}
571
locke-lunargd556cc32019-09-17 01:21:23 -0600572void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
573 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
574 VkResult result) {
575 if (result != VK_SUCCESS) return;
576 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500577 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600578
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700579 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
580 RecordCreateBufferANDROID(pCreateInfo, buffer_state.get());
581 }
locke-lunargd556cc32019-09-17 01:21:23 -0600582 // Get a set of requirements in the case the app does not
sfricke-samsungad90e722020-07-08 20:54:24 -0700583 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
locke-lunargd556cc32019-09-17 01:21:23 -0600584
sfricke-samsungedce77a2020-07-03 22:35:13 -0700585 buffer_state->unprotected = ((pCreateInfo->flags & VK_BUFFER_CREATE_PROTECTED_BIT) == 0);
586
locke-lunargd556cc32019-09-17 01:21:23 -0600587 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
588}
589
590void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
591 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
592 VkResult result) {
593 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500594 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
locke-lunarg25b6c352020-08-06 17:44:18 -0600595 auto buffer_view_state = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
596
597 VkFormatProperties format_properties;
598 DispatchGetPhysicalDeviceFormatProperties(physical_device, pCreateInfo->format, &format_properties);
599 buffer_view_state->format_features = format_properties.bufferFeatures;
600
601 bufferViewMap.insert(std::make_pair(*pView, std::move(buffer_view_state)));
locke-lunargd556cc32019-09-17 01:21:23 -0600602}
603
604void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
605 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
606 VkResult result) {
607 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500608 auto image_state = GetImageShared(pCreateInfo->image);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700609 auto image_view_state = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
610
611 // Add feature support according to Image View Format Features (vkspec.html#resources-image-view-format-features)
612 const VkImageTiling image_tiling = image_state->createInfo.tiling;
613 const VkFormat image_view_format = pCreateInfo->format;
614 if (image_state->has_ahb_format == true) {
615 // The ImageView uses same Image's format feature since they share same AHB
616 image_view_state->format_features = image_state->format_features;
617 } else if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
618 // Parameter validation should catch if this is used without VK_EXT_image_drm_format_modifier
619 assert(device_extensions.vk_ext_image_drm_format_modifier);
620 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
621 nullptr};
622 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state->image, &drm_format_properties);
623
624 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
625 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
626 nullptr};
627 format_properties_2.pNext = (void *)&drm_properties_list;
628 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
629
630 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300631 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier == drm_format_properties.drmFormatModifier) {
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700632 image_view_state->format_features |=
633 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300634 break;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700635 }
636 }
637 } else {
638 VkFormatProperties format_properties;
639 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_view_format, &format_properties);
640 image_view_state->format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
641 : format_properties.optimalTilingFeatures;
642 }
643
locke-lunarg9939d4b2020-10-26 20:11:08 -0600644 // filter_cubic_props is used in CmdDraw validation. But it takes a lot of performance if it does in CmdDraw.
645 image_view_state->filter_cubic_props = lvl_init_struct<VkFilterCubicImageViewImageFormatPropertiesEXT>();
646 if (IsExtEnabled(device_extensions.vk_ext_filter_cubic)) {
647 auto imageview_format_info = lvl_init_struct<VkPhysicalDeviceImageViewImageFormatInfoEXT>();
648 imageview_format_info.imageViewType = pCreateInfo->viewType;
649 auto image_format_info = lvl_init_struct<VkPhysicalDeviceImageFormatInfo2>(&imageview_format_info);
650 image_format_info.type = image_state->createInfo.imageType;
651 image_format_info.format = image_state->createInfo.format;
652 image_format_info.tiling = image_state->createInfo.tiling;
653 image_format_info.usage = image_state->createInfo.usage;
654 image_format_info.flags = image_state->createInfo.flags;
655
656 auto image_format_properties = lvl_init_struct<VkImageFormatProperties2>(&image_view_state->filter_cubic_props);
657
658 DispatchGetPhysicalDeviceImageFormatProperties2(physical_device, &image_format_info, &image_format_properties);
659 }
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700660 imageViewMap.insert(std::make_pair(*pView, std::move(image_view_state)));
locke-lunargd556cc32019-09-17 01:21:23 -0600661}
662
663void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
664 uint32_t regionCount, const VkBufferCopy *pRegions) {
665 auto cb_node = GetCBState(commandBuffer);
666 auto src_buffer_state = GetBufferState(srcBuffer);
667 auto dst_buffer_state = GetBufferState(dstBuffer);
668
669 // Update bindings between buffers and cmd buffer
670 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
671 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
672}
673
Jeff Leger178b1e52020-10-05 12:22:23 -0400674void ValidationStateTracker::PreCallRecordCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer,
675 const VkCopyBufferInfo2KHR *pCopyBufferInfos) {
676 auto cb_node = GetCBState(commandBuffer);
677 auto src_buffer_state = GetBufferState(pCopyBufferInfos->srcBuffer);
678 auto dst_buffer_state = GetBufferState(pCopyBufferInfos->dstBuffer);
679
680 // Update bindings between buffers and cmd buffer
681 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
682 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
683}
684
locke-lunargd556cc32019-09-17 01:21:23 -0600685void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
686 const VkAllocationCallbacks *pAllocator) {
687 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
688 if (!image_view_state) return;
689 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
690
691 // Any bound cmd buffers are now invalid
692 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500693 image_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600694 imageViewMap.erase(imageView);
695}
696
697void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
698 if (!buffer) return;
699 auto buffer_state = GetBufferState(buffer);
700 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
701
702 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
703 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700704 RemoveBufferMemoryRange(buffer, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600705 }
706 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500707 buffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600708 bufferMap.erase(buffer_state->buffer);
709}
710
711void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
712 const VkAllocationCallbacks *pAllocator) {
713 if (!bufferView) return;
714 auto buffer_view_state = GetBufferViewState(bufferView);
715 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
716
717 // Any bound cmd buffers are now invalid
718 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500719 buffer_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600720 bufferViewMap.erase(bufferView);
721}
722
723void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
724 VkDeviceSize size, uint32_t data) {
725 auto cb_node = GetCBState(commandBuffer);
726 auto buffer_state = GetBufferState(dstBuffer);
727 // Update bindings between buffer and cmd buffer
728 AddCommandBufferBindingBuffer(cb_node, buffer_state);
729}
730
731void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
732 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
733 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
734 auto cb_node = GetCBState(commandBuffer);
735 auto src_image_state = GetImageState(srcImage);
736 auto dst_buffer_state = GetBufferState(dstBuffer);
737
738 // Update bindings between buffer/image and cmd buffer
739 AddCommandBufferBindingImage(cb_node, src_image_state);
740 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
741}
742
Jeff Leger178b1e52020-10-05 12:22:23 -0400743void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
744 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) {
745 auto cb_node = GetCBState(commandBuffer);
746 auto src_image_state = GetImageState(pCopyImageToBufferInfo->srcImage);
747 auto dst_buffer_state = GetBufferState(pCopyImageToBufferInfo->dstBuffer);
748
749 // Update bindings between buffer/image and cmd buffer
750 AddCommandBufferBindingImage(cb_node, src_image_state);
751 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
752}
753
locke-lunargd556cc32019-09-17 01:21:23 -0600754void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
755 VkImageLayout dstImageLayout, uint32_t regionCount,
756 const VkBufferImageCopy *pRegions) {
757 auto cb_node = GetCBState(commandBuffer);
758 auto src_buffer_state = GetBufferState(srcBuffer);
759 auto dst_image_state = GetImageState(dstImage);
760
761 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
762 AddCommandBufferBindingImage(cb_node, dst_image_state);
763}
764
Jeff Leger178b1e52020-10-05 12:22:23 -0400765void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
766 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) {
767 auto cb_node = GetCBState(commandBuffer);
768 auto src_buffer_state = GetBufferState(pCopyBufferToImageInfo->srcBuffer);
769 auto dst_image_state = GetImageState(pCopyBufferToImageInfo->dstImage);
770
771 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
772 AddCommandBufferBindingImage(cb_node, dst_image_state);
773}
774
locke-lunargd556cc32019-09-17 01:21:23 -0600775// Get the image viewstate for a given framebuffer attachment
locke-lunargfc78e932020-11-19 17:06:24 -0700776IMAGE_VIEW_STATE *ValidationStateTracker::GetActiveAttachmentImageViewState(const CMD_BUFFER_STATE *cb, uint32_t index,
777 const CMD_BUFFER_STATE *primary_cb) {
778 if (primary_cb) {
779 assert(primary_cb->active_attachments && index != VK_ATTACHMENT_UNUSED && (index < primary_cb->active_attachments->size()));
780 return primary_cb->active_attachments->at(index);
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300781 }
locke-lunargfc78e932020-11-19 17:06:24 -0700782 assert(cb->active_attachments && index != VK_ATTACHMENT_UNUSED && (index < cb->active_attachments->size()));
783 return cb->active_attachments->at(index);
locke-lunargd556cc32019-09-17 01:21:23 -0600784}
785
786// Get the image viewstate for a given framebuffer attachment
locke-lunargfc78e932020-11-19 17:06:24 -0700787const IMAGE_VIEW_STATE *ValidationStateTracker::GetActiveAttachmentImageViewState(const CMD_BUFFER_STATE *cb, uint32_t index,
788 const CMD_BUFFER_STATE *primary_cb) const {
789 if (primary_cb) {
790 assert(primary_cb->active_attachments && index != VK_ATTACHMENT_UNUSED && (index < primary_cb->active_attachments->size()));
791 return primary_cb->active_attachments->at(index);
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300792 }
locke-lunargfc78e932020-11-19 17:06:24 -0700793 assert(cb->active_attachments && index != VK_ATTACHMENT_UNUSED && (index < cb->active_attachments->size()));
794 return cb->active_attachments->at(index);
locke-lunargd556cc32019-09-17 01:21:23 -0600795}
796
797void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
locke-lunargd556cc32019-09-17 01:21:23 -0600798 std::unordered_set<VkImage> *bound_images = nullptr;
799
locke-lunargb3584732019-10-28 20:18:36 -0600800 if (image_state->bind_swapchain) {
801 auto swapchain_state = GetSwapchainState(image_state->bind_swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -0600802 if (swapchain_state) {
locke-lunargb3584732019-10-28 20:18:36 -0600803 bound_images = &swapchain_state->images[image_state->bind_swapchain_imageIndex].bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600804 }
805 } else {
locke-lunargcf04d582019-11-26 00:31:50 -0700806 if (image_state->binding.mem_state) {
807 bound_images = &image_state->binding.mem_state->bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600808 }
809 }
810
811 if (bound_images) {
812 for (const auto &handle : *bound_images) {
813 if (handle != image_state->image) {
814 auto is = GetImageState(handle);
815 if (is && is->IsCompatibleAliasing(image_state)) {
816 auto inserted = is->aliasing_images.emplace(image_state->image);
817 if (inserted.second) {
818 image_state->aliasing_images.emplace(handle);
819 }
820 }
821 }
822 }
823 }
824}
825
826void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
827 for (const auto &image : image_state->aliasing_images) {
828 auto is = GetImageState(image);
829 if (is) {
830 is->aliasing_images.erase(image_state->image);
831 }
832 }
833 image_state->aliasing_images.clear();
834}
835
836void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
837 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
838 // reference. It doesn't need two ways clear.
839 for (const auto &handle : bound_images) {
840 auto is = GetImageState(handle);
841 if (is) {
842 is->aliasing_images.clear();
843 }
844 }
845}
846
Jeff Bolz310775c2019-10-09 00:46:33 -0500847const EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) const {
848 auto it = eventMap.find(event);
849 if (it == eventMap.end()) {
850 return nullptr;
851 }
852 return &it->second;
853}
854
locke-lunargd556cc32019-09-17 01:21:23 -0600855EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
856 auto it = eventMap.find(event);
857 if (it == eventMap.end()) {
858 return nullptr;
859 }
860 return &it->second;
861}
862
863const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
864 auto it = queueMap.find(queue);
865 if (it == queueMap.cend()) {
866 return nullptr;
867 }
868 return &it->second;
869}
870
871QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
872 auto it = queueMap.find(queue);
873 if (it == queueMap.end()) {
874 return nullptr;
875 }
876 return &it->second;
877}
878
879const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
880 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
881 auto it = phys_dev_map->find(phys);
882 if (it == phys_dev_map->end()) {
883 return nullptr;
884 }
885 return &it->second;
886}
887
888PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
889 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
890 auto it = phys_dev_map->find(phys);
891 if (it == phys_dev_map->end()) {
892 return nullptr;
893 }
894 return &it->second;
895}
896
897PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
898const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
899
900// Return ptr to memory binding for given handle of specified type
901template <typename State, typename Result>
902static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
903 switch (typed_handle.type) {
904 case kVulkanObjectTypeImage:
905 return state->GetImageState(typed_handle.Cast<VkImage>());
906 case kVulkanObjectTypeBuffer:
907 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
908 case kVulkanObjectTypeAccelerationStructureNV:
sourav parmarcd5fb182020-07-17 12:58:44 -0700909 return state->GetAccelerationStructureStateNV(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -0600910 default:
911 break;
912 }
913 return nullptr;
914}
915
916const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
917 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
918}
919
920BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
921 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
922}
923
924void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
925 assert(object != NULL);
926
John Zulauf79952712020-04-07 11:25:54 -0600927 auto fake_address = fake_memory.Alloc(pAllocateInfo->allocationSize);
928 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo, fake_address);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500929 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600930
Mike Schuchardt2df08912020-12-15 16:28:09 -0800931 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfo>(pAllocateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600932 if (dedicated) {
933 mem_info->is_dedicated = true;
934 mem_info->dedicated_buffer = dedicated->buffer;
935 mem_info->dedicated_image = dedicated->image;
936 }
937 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
938 if (export_info) {
939 mem_info->is_export = true;
940 mem_info->export_handle_type_flags = export_info->handleTypes;
941 }
sfricke-samsung23068272020-06-21 14:49:51 -0700942
Mark Lobodzinski7bda6ed2020-07-24 09:57:38 -0600943 auto alloc_flags = lvl_find_in_chain<VkMemoryAllocateFlagsInfo>(pAllocateInfo->pNext);
944 if (alloc_flags) {
945 auto dev_mask = alloc_flags->deviceMask;
946 if ((dev_mask != 0) && (dev_mask & (dev_mask - 1))) {
947 mem_info->multi_instance = true;
948 }
949 }
950 auto heap_index = phys_dev_mem_props.memoryTypes[mem_info->alloc_info.memoryTypeIndex].heapIndex;
Tony-LunarGb399fb02020-08-06 14:20:59 -0600951 mem_info->multi_instance |= (((phys_dev_mem_props.memoryHeaps[heap_index].flags & VK_MEMORY_HEAP_MULTI_INSTANCE_BIT) != 0) &&
952 physical_device_count > 1);
Mark Lobodzinski7bda6ed2020-07-24 09:57:38 -0600953
sfricke-samsung23068272020-06-21 14:49:51 -0700954 // Assumes validation already for only a single import operation in the pNext
955#ifdef VK_USE_PLATFORM_WIN32_KHR
956 auto win32_import = lvl_find_in_chain<VkImportMemoryWin32HandleInfoKHR>(pAllocateInfo->pNext);
957 if (win32_import) {
958 mem_info->is_import = true;
959 mem_info->import_handle_type_flags = win32_import->handleType;
960 }
961#endif
962 auto fd_import = lvl_find_in_chain<VkImportMemoryFdInfoKHR>(pAllocateInfo->pNext);
963 if (fd_import) {
964 mem_info->is_import = true;
965 mem_info->import_handle_type_flags = fd_import->handleType;
966 }
967 auto host_pointer_import = lvl_find_in_chain<VkImportMemoryHostPointerInfoEXT>(pAllocateInfo->pNext);
968 if (host_pointer_import) {
969 mem_info->is_import = true;
970 mem_info->import_handle_type_flags = host_pointer_import->handleType;
971 }
972#ifdef VK_USE_PLATFORM_ANDROID_KHR
973 // AHB Import doesn't have handle in the pNext struct
974 // It should be assumed that all imported AHB can only have the same, single handleType
975 auto ahb_import = lvl_find_in_chain<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
976 if ((ahb_import) && (ahb_import->buffer != nullptr)) {
977 mem_info->is_import_ahb = true;
978 mem_info->is_import = true;
979 mem_info->import_handle_type_flags = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
980 }
sfricke-samsung98acb882020-11-03 19:16:10 -0800981#endif // VK_USE_PLATFORM_ANDROID_KHR
sfricke-samsungedce77a2020-07-03 22:35:13 -0700982
983 const VkMemoryType memory_type = phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex];
984 mem_info->unprotected = ((memory_type.propertyFlags & VK_MEMORY_PROPERTY_PROTECTED_BIT) == 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600985}
986
987// Create binding link between given sampler and command buffer node
988void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600989 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600990 return;
991 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500992 AddCommandBufferBinding(sampler_state->cb_bindings,
993 VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600994}
995
996// Create binding link between given image node and command buffer node
997void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600998 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600999 return;
1000 }
1001 // Skip validation if this image was created through WSI
1002 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
1003 // First update cb binding for image
Jeff Bolzadbfa852019-10-04 13:53:30 -05001004 if (AddCommandBufferBinding(image_state->cb_bindings,
1005 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -06001006 // Now update CB binding in MemObj mini CB list
1007 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07001008 // Now update CBInfo's Mem reference list
1009 AddCommandBufferBinding(mem_binding->cb_bindings,
1010 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -06001011 }
1012 }
1013 }
1014}
1015
1016// Create binding link between given image view node and its image with command buffer node
1017void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001018 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -06001019 return;
1020 }
1021 // First add bindings for imageView
Jeff Bolzadbfa852019-10-04 13:53:30 -05001022 if (AddCommandBufferBinding(view_state->cb_bindings,
1023 VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -06001024 // Only need to continue if this is a new item
Jeff Bolzadbfa852019-10-04 13:53:30 -05001025 auto image_state = view_state->image_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -06001026 // Add bindings for image within imageView
1027 if (image_state) {
1028 AddCommandBufferBindingImage(cb_node, image_state);
1029 }
1030 }
1031}
1032
1033// Create binding link between given buffer node and command buffer node
1034void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001035 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -06001036 return;
1037 }
1038 // First update cb binding for buffer
Jeff Bolzadbfa852019-10-04 13:53:30 -05001039 if (AddCommandBufferBinding(buffer_state->cb_bindings,
1040 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -06001041 // Now update CB binding in MemObj mini CB list
1042 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07001043 // Now update CBInfo's Mem reference list
1044 AddCommandBufferBinding(mem_binding->cb_bindings,
1045 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -06001046 }
1047 }
1048}
1049
1050// Create binding link between given buffer view node and its buffer with command buffer node
1051void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001052 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -06001053 return;
1054 }
1055 // First add bindings for bufferView
Jeff Bolzadbfa852019-10-04 13:53:30 -05001056 if (AddCommandBufferBinding(view_state->cb_bindings,
1057 VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
1058 auto buffer_state = view_state->buffer_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -06001059 // Add bindings for buffer within bufferView
1060 if (buffer_state) {
1061 AddCommandBufferBindingBuffer(cb_node, buffer_state);
1062 }
1063 }
1064}
1065
1066// Create binding link between given acceleration structure and command buffer node
1067void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
1068 ACCELERATION_STRUCTURE_STATE *as_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001069 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -06001070 return;
1071 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001072 if (AddCommandBufferBinding(
1073 as_state->cb_bindings,
1074 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -06001075 // Now update CB binding in MemObj mini CB list
1076 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07001077 // Now update CBInfo's Mem reference list
1078 AddCommandBufferBinding(mem_binding->cb_bindings,
1079 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -06001080 }
1081 }
1082}
1083
sourav parmarcd5fb182020-07-17 12:58:44 -07001084// Create binding link between given acceleration structure and command buffer node
1085void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
1086 ACCELERATION_STRUCTURE_STATE_KHR *as_state) {
1087 if (disabled[command_buffer_state]) {
1088 return;
1089 }
1090 if (AddCommandBufferBinding(
1091 as_state->cb_bindings,
1092 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureKHR, as_state), cb_node)) {
1093 // Now update CB binding in MemObj mini CB list
1094 for (auto mem_binding : as_state->GetBoundMemory()) {
1095 // Now update CBInfo's Mem reference list
1096 AddCommandBufferBinding(mem_binding->cb_bindings,
1097 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
1098 }
1099 }
1100}
1101
locke-lunargd556cc32019-09-17 01:21:23 -06001102// Clear a single object binding from given memory object
locke-lunarg5f59e782019-12-19 10:32:23 -07001103void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
locke-lunargd556cc32019-09-17 01:21:23 -06001104 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
1105 if (mem_info) {
1106 mem_info->obj_bindings.erase(typed_handle);
1107 }
1108}
1109
1110// ClearMemoryObjectBindings clears the binding of objects to memory
1111// For the given object it pulls the memory bindings and makes sure that the bindings
1112// no longer refer to the object being cleared. This occurs when objects are destroyed.
1113void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
1114 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
1115 if (mem_binding) {
1116 if (!mem_binding->sparse) {
locke-lunarg5f59e782019-12-19 10:32:23 -07001117 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06001118 } else { // Sparse, clear all bindings
1119 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
locke-lunarg5f59e782019-12-19 10:32:23 -07001120 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06001121 }
1122 }
1123 }
1124}
1125
1126// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
1127// Corresponding valid usage checks are in ValidateSetMemBinding().
1128void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
1129 const VulkanTypedHandle &typed_handle) {
1130 assert(mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -06001131
1132 if (mem != VK_NULL_HANDLE) {
locke-lunargcf04d582019-11-26 00:31:50 -07001133 mem_binding->binding.mem_state = GetShared<DEVICE_MEMORY_STATE>(mem);
1134 if (mem_binding->binding.mem_state) {
locke-lunarg5f59e782019-12-19 10:32:23 -07001135 mem_binding->binding.offset = memory_offset;
1136 mem_binding->binding.size = mem_binding->requirements.size;
locke-lunargcf04d582019-11-26 00:31:50 -07001137 mem_binding->binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -06001138 // For image objects, make sure default memory state is correctly set
1139 // TODO : What's the best/correct way to handle this?
1140 if (kVulkanObjectTypeImage == typed_handle.type) {
1141 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
1142 if (image_state) {
1143 VkImageCreateInfo ici = image_state->createInfo;
1144 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
1145 // TODO:: More memory state transition stuff.
1146 }
1147 }
1148 }
locke-lunargcf04d582019-11-26 00:31:50 -07001149 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
locke-lunargd556cc32019-09-17 01:21:23 -06001150 }
1151 }
1152}
1153
1154// For NULL mem case, clear any previous binding Else...
1155// Make sure given object is in its object map
1156// IF a previous binding existed, update binding
1157// Add reference from objectInfo to memoryInfo
1158// Add reference off of object's binding info
1159// Return VK_TRUE if addition is successful, VK_FALSE otherwise
locke-lunargcf04d582019-11-26 00:31:50 -07001160bool ValidationStateTracker::SetSparseMemBinding(const VkDeviceMemory mem, const VkDeviceSize mem_offset,
1161 const VkDeviceSize mem_size, const VulkanTypedHandle &typed_handle) {
locke-lunargd556cc32019-09-17 01:21:23 -06001162 bool skip = VK_FALSE;
1163 // Handle NULL case separately, just clear previous binding & decrement reference
locke-lunargcf04d582019-11-26 00:31:50 -07001164 if (mem == VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -06001165 // TODO : This should cause the range of the resource to be unbound according to spec
1166 } else {
1167 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
1168 assert(mem_binding);
1169 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
1170 assert(mem_binding->sparse);
locke-lunargcf04d582019-11-26 00:31:50 -07001171 MEM_BINDING binding = {GetShared<DEVICE_MEMORY_STATE>(mem), mem_offset, mem_size};
1172 if (binding.mem_state) {
1173 binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -06001174 // Need to set mem binding for this object
1175 mem_binding->sparse_bindings.insert(binding);
1176 mem_binding->UpdateBoundMemorySet();
1177 }
1178 }
1179 }
1180 return skip;
1181}
1182
locke-lunarg540b2252020-08-03 13:23:36 -06001183void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, const VkPipelineBindPoint bind_point,
1184 const char *function) {
locke-lunargb8d7a7a2020-10-25 16:01:52 -06001185 const auto lv_bind_point = ConvertToLvlBindPoint(bind_point);
1186 auto &state = cb_state->lastBound[lv_bind_point];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001187 PIPELINE_STATE *pipe = state.pipeline_state;
locke-lunargd556cc32019-09-17 01:21:23 -06001188 if (VK_NULL_HANDLE != state.pipeline_layout) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001189 for (const auto &set_binding_pair : pipe->active_slots) {
1190 uint32_t set_index = set_binding_pair.first;
locke-lunargd556cc32019-09-17 01:21:23 -06001191 // Pull the set node
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001192 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[set_index].bound_descriptor_set;
locke-lunargd556cc32019-09-17 01:21:23 -06001193
Tony-LunarG77822802020-05-28 16:35:46 -06001194 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
locke-lunargd556cc32019-09-17 01:21:23 -06001195
Tony-LunarG77822802020-05-28 16:35:46 -06001196 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
1197 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
1198 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001199 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pipe);
Tony-LunarG77822802020-05-28 16:35:46 -06001200
1201 if (reduced_map.IsManyDescriptors()) {
1202 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001203 descriptor_set->UpdateValidationCache(*cb_state, *pipe, binding_req_map);
Tony-LunarG77822802020-05-28 16:35:46 -06001204 }
1205
1206 // We can skip updating the state if "nothing" has changed since the last validation.
1207 // See CoreChecks::ValidateCmdBufDrawState for more details.
1208 bool descriptor_set_changed =
1209 !reduced_map.IsManyDescriptors() ||
1210 // Update if descriptor set (or contents) has changed
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001211 state.per_set[set_index].validated_set != descriptor_set ||
1212 state.per_set[set_index].validated_set_change_count != descriptor_set->GetChangeCount() ||
Tony-LunarG77822802020-05-28 16:35:46 -06001213 (!disabled[image_layout_validation] &&
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001214 state.per_set[set_index].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
Tony-LunarG77822802020-05-28 16:35:46 -06001215 bool need_update = descriptor_set_changed ||
1216 // Update if previous bindingReqMap doesn't include new bindingReqMap
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001217 !std::includes(state.per_set[set_index].validated_set_binding_req_map.begin(),
1218 state.per_set[set_index].validated_set_binding_req_map.end(), binding_req_map.begin(),
Tony-LunarG77822802020-05-28 16:35:46 -06001219 binding_req_map.end());
1220
1221 if (need_update) {
1222 // Bind this set and its active descriptor resources to the command buffer
1223 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
1224 // Only record the bindings that haven't already been recorded
1225 BindingReqMap delta_reqs;
1226 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001227 state.per_set[set_index].validated_set_binding_req_map.begin(),
1228 state.per_set[set_index].validated_set_binding_req_map.end(),
Tony-LunarG77822802020-05-28 16:35:46 -06001229 std::inserter(delta_reqs, delta_reqs.begin()));
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001230 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pipe, delta_reqs, function);
Tony-LunarG77822802020-05-28 16:35:46 -06001231 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001232 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pipe, binding_req_map, function);
locke-lunargd556cc32019-09-17 01:21:23 -06001233 }
1234
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001235 state.per_set[set_index].validated_set = descriptor_set;
1236 state.per_set[set_index].validated_set_change_count = descriptor_set->GetChangeCount();
1237 state.per_set[set_index].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
Tony-LunarG77822802020-05-28 16:35:46 -06001238 if (reduced_map.IsManyDescriptors()) {
1239 // Check whether old == new before assigning, the equality check is much cheaper than
1240 // freeing and reallocating the map.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001241 if (state.per_set[set_index].validated_set_binding_req_map != set_binding_pair.second) {
1242 state.per_set[set_index].validated_set_binding_req_map = set_binding_pair.second;
Jeff Bolz56308942019-10-06 22:05:23 -05001243 }
Tony-LunarG77822802020-05-28 16:35:46 -06001244 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001245 state.per_set[set_index].validated_set_binding_req_map = BindingReqMap();
locke-lunargd556cc32019-09-17 01:21:23 -06001246 }
1247 }
1248 }
1249 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001250 if (!pipe->vertex_binding_descriptions_.empty()) {
locke-lunargd556cc32019-09-17 01:21:23 -06001251 cb_state->vertex_buffer_used = true;
1252 }
1253}
1254
1255// Remove set from setMap and delete the set
1256void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001257 descriptor_set->destroyed = true;
Jeff Bolzadbfa852019-10-04 13:53:30 -05001258 const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
Jeff Bolz41a1ced2019-10-11 11:40:49 -05001259 // Any bound cmd buffers are now invalid
Jeff Bolzadbfa852019-10-04 13:53:30 -05001260 InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
Jeff Bolz41a1ced2019-10-11 11:40:49 -05001261
locke-lunargd556cc32019-09-17 01:21:23 -06001262 setMap.erase(descriptor_set->GetSet());
1263}
1264
1265// Free all DS Pools including their Sets & related sub-structs
1266// NOTE : Calls to this function should be wrapped in mutex
1267void ValidationStateTracker::DeleteDescriptorSetPools() {
1268 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
1269 // Remove this pools' sets from setMap and delete them
1270 for (auto ds : ii->second->sets) {
1271 FreeDescriptorSet(ds);
1272 }
1273 ii->second->sets.clear();
1274 ii = descriptorPoolMap.erase(ii);
1275 }
1276}
1277
1278// For given object struct return a ptr of BASE_NODE type for its wrapping struct
1279BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001280 if (object_struct.node) {
1281#ifdef _DEBUG
1282 // assert that lookup would find the same object
1283 VulkanTypedHandle other = object_struct;
1284 other.node = nullptr;
1285 assert(object_struct.node == GetStateStructPtrFromObject(other));
1286#endif
1287 return object_struct.node;
1288 }
locke-lunargd556cc32019-09-17 01:21:23 -06001289 BASE_NODE *base_ptr = nullptr;
1290 switch (object_struct.type) {
1291 case kVulkanObjectTypeDescriptorSet: {
1292 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
1293 break;
1294 }
1295 case kVulkanObjectTypeSampler: {
1296 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
1297 break;
1298 }
1299 case kVulkanObjectTypeQueryPool: {
1300 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
1301 break;
1302 }
1303 case kVulkanObjectTypePipeline: {
1304 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
1305 break;
1306 }
1307 case kVulkanObjectTypeBuffer: {
1308 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
1309 break;
1310 }
1311 case kVulkanObjectTypeBufferView: {
1312 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
1313 break;
1314 }
1315 case kVulkanObjectTypeImage: {
1316 base_ptr = GetImageState(object_struct.Cast<VkImage>());
1317 break;
1318 }
1319 case kVulkanObjectTypeImageView: {
1320 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
1321 break;
1322 }
1323 case kVulkanObjectTypeEvent: {
1324 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
1325 break;
1326 }
1327 case kVulkanObjectTypeDescriptorPool: {
1328 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
1329 break;
1330 }
1331 case kVulkanObjectTypeCommandPool: {
1332 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
1333 break;
1334 }
1335 case kVulkanObjectTypeFramebuffer: {
1336 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
1337 break;
1338 }
1339 case kVulkanObjectTypeRenderPass: {
1340 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
1341 break;
1342 }
1343 case kVulkanObjectTypeDeviceMemory: {
1344 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
1345 break;
1346 }
1347 case kVulkanObjectTypeAccelerationStructureNV: {
sourav parmarcd5fb182020-07-17 12:58:44 -07001348 base_ptr = GetAccelerationStructureStateNV(object_struct.Cast<VkAccelerationStructureNV>());
1349 break;
1350 }
1351 case kVulkanObjectTypeAccelerationStructureKHR: {
1352 base_ptr = GetAccelerationStructureStateKHR(object_struct.Cast<VkAccelerationStructureKHR>());
locke-lunargd556cc32019-09-17 01:21:23 -06001353 break;
1354 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001355 case kVulkanObjectTypeUnknown:
1356 // This can happen if an element of the object_bindings vector has been
1357 // zeroed out, after an object is destroyed.
1358 break;
locke-lunargd556cc32019-09-17 01:21:23 -06001359 default:
1360 // TODO : Any other objects to be handled here?
1361 assert(0);
1362 break;
1363 }
1364 return base_ptr;
1365}
1366
sfricke-samsungbf1a2ed2020-06-14 23:31:00 -07001367// Gets union of all features defined by Potential Format Features
1368// except, does not handle the external format case for AHB as that only can be used for sampled images
sfricke-samsungbe3584f2020-04-22 14:58:06 -07001369VkFormatFeatureFlags ValidationStateTracker::GetPotentialFormatFeatures(VkFormat format) const {
1370 VkFormatFeatureFlags format_features = 0;
1371
1372 if (format != VK_FORMAT_UNDEFINED) {
1373 VkFormatProperties format_properties;
1374 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
1375 format_features |= format_properties.linearTilingFeatures;
1376 format_features |= format_properties.optimalTilingFeatures;
1377 if (device_extensions.vk_ext_image_drm_format_modifier) {
1378 // VK_KHR_get_physical_device_properties2 is required in this case
1379 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2};
1380 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
1381 nullptr};
1382 format_properties_2.pNext = (void *)&drm_properties_list;
1383 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
1384 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
1385 format_features |= drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
1386 }
1387 }
1388 }
1389
1390 return format_features;
1391}
1392
locke-lunargd556cc32019-09-17 01:21:23 -06001393// Tie the VulkanTypedHandle to the cmd buffer which includes:
1394// Add object_binding to cmd buffer
1395// Add cb_binding to object
Jeff Bolzadbfa852019-10-04 13:53:30 -05001396bool ValidationStateTracker::AddCommandBufferBinding(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_bindings,
locke-lunargd556cc32019-09-17 01:21:23 -06001397 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001398 if (disabled[command_buffer_state]) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001399 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001400 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001401 // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
1402 // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
1403 auto inserted = cb_bindings.insert({cb_node, -1});
1404 if (inserted.second) {
1405 cb_node->object_bindings.push_back(obj);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001406 inserted.first->second = static_cast<int>(cb_node->object_bindings.size()) - 1;
Jeff Bolzadbfa852019-10-04 13:53:30 -05001407 return true;
1408 }
1409 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001410}
1411
1412// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
1413void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
1414 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
1415 if (base_obj) base_obj->cb_bindings.erase(cb_node);
1416}
1417
1418// Reset the command buffer state
1419// Maintain the createInfo and set state to CB_NEW, but clear all other state
1420void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001421 CMD_BUFFER_STATE *cb_state = GetCBState(cb);
1422 if (cb_state) {
1423 cb_state->in_use.store(0);
locke-lunargd556cc32019-09-17 01:21:23 -06001424 // Reset CB state (note that createInfo is not cleared)
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001425 cb_state->commandBuffer = cb;
1426 memset(&cb_state->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
1427 memset(&cb_state->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
1428 cb_state->hasDrawCmd = false;
1429 cb_state->hasTraceRaysCmd = false;
1430 cb_state->hasBuildAccelerationStructureCmd = false;
1431 cb_state->hasDispatchCmd = false;
1432 cb_state->state = CB_NEW;
1433 cb_state->commandCount = 0;
1434 cb_state->submitCount = 0;
1435 cb_state->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
1436 cb_state->status = 0;
1437 cb_state->static_status = 0;
1438 cb_state->viewportMask = 0;
1439 cb_state->viewportWithCountMask = 0;
1440 cb_state->viewportWithCountCount = 0;
1441 cb_state->scissorMask = 0;
1442 cb_state->scissorWithCountMask = 0;
1443 cb_state->primitiveTopology = VK_PRIMITIVE_TOPOLOGY_MAX_ENUM;
locke-lunargd556cc32019-09-17 01:21:23 -06001444
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001445 for (auto &item : cb_state->lastBound) {
locke-lunargb8d7a7a2020-10-25 16:01:52 -06001446 item.reset();
locke-lunargd556cc32019-09-17 01:21:23 -06001447 }
1448
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001449 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo();
1450 cb_state->activeRenderPass = nullptr;
1451 cb_state->active_attachments = nullptr;
1452 cb_state->active_subpasses = nullptr;
1453 cb_state->attachments_view_states.clear();
1454 cb_state->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
1455 cb_state->activeSubpass = 0;
1456 cb_state->broken_bindings.clear();
1457 cb_state->waitedEvents.clear();
1458 cb_state->events.clear();
1459 cb_state->writeEventsBeforeWait.clear();
1460 cb_state->activeQueries.clear();
1461 cb_state->startedQueries.clear();
1462 cb_state->image_layout_map.clear();
1463 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
1464 cb_state->vertex_buffer_used = false;
1465 cb_state->primaryCommandBuffer = VK_NULL_HANDLE;
locke-lunargd556cc32019-09-17 01:21:23 -06001466 // If secondary, invalidate any primary command buffer that may call us.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001467 if (cb_state->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
1468 InvalidateLinkedCommandBuffers(cb_state->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -06001469 }
1470
1471 // Remove reverse command buffer links.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001472 for (auto sub_cb : cb_state->linkedCommandBuffers) {
1473 sub_cb->linkedCommandBuffers.erase(cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06001474 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001475 cb_state->linkedCommandBuffers.clear();
1476 cb_state->queue_submit_functions.clear();
1477 cb_state->cmd_execute_commands_functions.clear();
1478 cb_state->eventUpdates.clear();
1479 cb_state->queryUpdates.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001480
1481 // Remove object bindings
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001482 for (const auto &obj : cb_state->object_bindings) {
1483 RemoveCommandBufferBinding(obj, cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06001484 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001485 cb_state->object_bindings.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001486 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001487 for (auto framebuffer : cb_state->framebuffers) {
1488 framebuffer->cb_bindings.erase(cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06001489 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001490 cb_state->framebuffers.clear();
1491 cb_state->activeFramebuffer = VK_NULL_HANDLE;
1492 cb_state->index_buffer_binding.reset();
locke-lunargd556cc32019-09-17 01:21:23 -06001493
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001494 cb_state->qfo_transfer_image_barriers.Reset();
1495 cb_state->qfo_transfer_buffer_barriers.Reset();
locke-lunargd556cc32019-09-17 01:21:23 -06001496
1497 // Clean up the label data
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001498 ResetCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
1499 cb_state->debug_label.Reset();
1500 cb_state->validate_descriptorsets_in_queuesubmit.clear();
Attilio Provenzano02859b22020-02-27 14:17:28 +00001501
1502 // Best practices info
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001503 cb_state->small_indexed_draw_call_count = 0;
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06001504
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001505 cb_state->transform_feedback_active = false;
locke-lunargd556cc32019-09-17 01:21:23 -06001506 }
1507 if (command_buffer_reset_callback) {
1508 (*command_buffer_reset_callback)(cb);
1509 }
1510}
1511
1512void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
1513 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
1514 VkResult result) {
1515 if (VK_SUCCESS != result) return;
1516
1517 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
1518 if (nullptr == enabled_features_found) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001519 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001520 if (features2) {
1521 enabled_features_found = &(features2->features);
1522 }
1523 }
1524
1525 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
1526 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
1527 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
1528
1529 if (nullptr == enabled_features_found) {
1530 state_tracker->enabled_features.core = {};
1531 } else {
1532 state_tracker->enabled_features.core = *enabled_features_found;
1533 }
1534
1535 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
1536 // previously set them through an explicit API call.
1537 uint32_t count;
1538 auto pd_state = GetPhysicalDeviceState(gpu);
1539 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
1540 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
1541 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
1542 // Save local link to this device's physical device state
1543 state_tracker->physical_device_state = pd_state;
1544
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001545 const auto *vulkan_12_features = lvl_find_in_chain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
1546 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001547 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001548 } else {
sfricke-samsung27c70722020-05-02 08:42:39 -07001549 // Set Extension Feature Aliases to false as there is no struct to check
1550 state_tracker->enabled_features.core12.drawIndirectCount = VK_FALSE;
1551 state_tracker->enabled_features.core12.samplerMirrorClampToEdge = VK_FALSE;
1552 state_tracker->enabled_features.core12.descriptorIndexing = VK_FALSE;
1553 state_tracker->enabled_features.core12.samplerFilterMinmax = VK_FALSE;
1554 state_tracker->enabled_features.core12.shaderOutputLayer = VK_FALSE;
1555 state_tracker->enabled_features.core12.shaderOutputViewportIndex = VK_FALSE;
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001556 state_tracker->enabled_features.core12.subgroupBroadcastDynamicId = VK_FALSE;
sfricke-samsung27c70722020-05-02 08:42:39 -07001557
1558 // These structs are only allowed in pNext chain if there is no VkPhysicalDeviceVulkan12Features
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001559
1560 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
1561 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001562 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
1563 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
1564 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
1565 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001566 }
1567
1568 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
1569 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001570 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
1571 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001572 }
1573
1574 const auto *descriptor_indexing_features =
1575 lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
1576 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001577 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
1578 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
1579 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
1580 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
1581 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
1582 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
1583 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
1584 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
1585 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
1586 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
1587 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
1588 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
1589 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
1590 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
1591 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
1592 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
1593 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
1594 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
1595 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
1596 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
1597 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
1598 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
1599 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
1600 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
1601 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
1602 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
1603 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
1604 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
1605 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
1606 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
1607 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
1608 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
1609 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
1610 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
1611 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
1612 descriptor_indexing_features->descriptorBindingPartiallyBound;
1613 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
1614 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
1615 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001616 }
1617
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001618 const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001619 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001620 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001621 }
1622
1623 const auto *imageless_framebuffer_features =
1624 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
1625 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001626 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001627 }
1628
1629 const auto *uniform_buffer_standard_layout_features =
1630 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
1631 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001632 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
1633 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001634 }
1635
1636 const auto *subgroup_extended_types_features =
1637 lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
1638 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001639 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
1640 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001641 }
1642
1643 const auto *separate_depth_stencil_layouts_features =
1644 lvl_find_in_chain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
1645 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001646 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
1647 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001648 }
1649
1650 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
1651 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001652 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001653 }
1654
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001655 const auto *timeline_semaphore_features = lvl_find_in_chain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001656 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001657 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001658 }
1659
1660 const auto *buffer_device_address = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
1661 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001662 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
1663 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1664 buffer_device_address->bufferDeviceAddressCaptureReplay;
1665 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1666 buffer_device_address->bufferDeviceAddressMultiDevice;
1667 }
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001668
1669 const auto *atomic_int64_features = lvl_find_in_chain<VkPhysicalDeviceShaderAtomicInt64Features>(pCreateInfo->pNext);
1670 if (atomic_int64_features) {
1671 state_tracker->enabled_features.core12.shaderBufferInt64Atomics = atomic_int64_features->shaderBufferInt64Atomics;
1672 state_tracker->enabled_features.core12.shaderSharedInt64Atomics = atomic_int64_features->shaderSharedInt64Atomics;
1673 }
1674
1675 const auto *memory_model_features = lvl_find_in_chain<VkPhysicalDeviceVulkanMemoryModelFeatures>(pCreateInfo->pNext);
1676 if (memory_model_features) {
1677 state_tracker->enabled_features.core12.vulkanMemoryModel = memory_model_features->vulkanMemoryModel;
1678 state_tracker->enabled_features.core12.vulkanMemoryModelDeviceScope =
1679 memory_model_features->vulkanMemoryModelDeviceScope;
1680 state_tracker->enabled_features.core12.vulkanMemoryModelAvailabilityVisibilityChains =
1681 memory_model_features->vulkanMemoryModelAvailabilityVisibilityChains;
1682 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001683 }
1684
1685 const auto *vulkan_11_features = lvl_find_in_chain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
1686 if (vulkan_11_features) {
1687 state_tracker->enabled_features.core11 = *vulkan_11_features;
1688 } else {
1689 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
1690
1691 const auto *sixteen_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
1692 if (sixteen_bit_storage_features) {
1693 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
1694 sixteen_bit_storage_features->storageBuffer16BitAccess;
1695 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
1696 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
1697 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
1698 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
1699 }
1700
1701 const auto *multiview_features = lvl_find_in_chain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
1702 if (multiview_features) {
1703 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1704 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1705 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1706 }
1707
1708 const auto *variable_pointers_features = lvl_find_in_chain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
1709 if (variable_pointers_features) {
1710 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1711 variable_pointers_features->variablePointersStorageBuffer;
1712 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1713 }
1714
1715 const auto *protected_memory_features = lvl_find_in_chain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
1716 if (protected_memory_features) {
1717 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1718 }
1719
1720 const auto *ycbcr_conversion_features =
1721 lvl_find_in_chain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
1722 if (ycbcr_conversion_features) {
1723 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1724 }
1725
1726 const auto *shader_draw_parameters_features =
1727 lvl_find_in_chain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
1728 if (shader_draw_parameters_features) {
1729 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001730 }
1731 }
1732
locke-lunargd556cc32019-09-17 01:21:23 -06001733 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
Tony-LunarGca4891a2020-08-10 15:46:46 -06001734 if (device_group_ci) {
1735 state_tracker->physical_device_count = device_group_ci->physicalDeviceCount;
1736 state_tracker->device_group_create_info = *device_group_ci;
1737 } else {
1738 state_tracker->physical_device_count = 1;
1739 }
locke-lunargd556cc32019-09-17 01:21:23 -06001740
locke-lunargd556cc32019-09-17 01:21:23 -06001741 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
1742 if (exclusive_scissor_features) {
1743 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1744 }
1745
1746 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1747 if (shading_rate_image_features) {
1748 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1749 }
1750
1751 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1752 if (mesh_shader_features) {
1753 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1754 }
1755
1756 const auto *inline_uniform_block_features =
1757 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1758 if (inline_uniform_block_features) {
1759 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1760 }
1761
1762 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1763 if (transform_feedback_features) {
1764 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1765 }
1766
locke-lunargd556cc32019-09-17 01:21:23 -06001767 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1768 if (vtx_attrib_div_features) {
1769 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1770 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001771
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001772 const auto *buffer_device_address_ext = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
1773 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001774 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001775 }
1776
1777 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1778 if (cooperative_matrix_features) {
1779 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1780 }
1781
locke-lunargd556cc32019-09-17 01:21:23 -06001782 const auto *compute_shader_derivatives_features =
1783 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1784 if (compute_shader_derivatives_features) {
1785 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1786 }
1787
1788 const auto *fragment_shader_barycentric_features =
1789 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1790 if (fragment_shader_barycentric_features) {
1791 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1792 }
1793
1794 const auto *shader_image_footprint_features =
1795 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1796 if (shader_image_footprint_features) {
1797 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1798 }
1799
1800 const auto *fragment_shader_interlock_features =
1801 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1802 if (fragment_shader_interlock_features) {
1803 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1804 }
1805
1806 const auto *demote_to_helper_invocation_features =
1807 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1808 if (demote_to_helper_invocation_features) {
1809 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1810 }
1811
1812 const auto *texel_buffer_alignment_features =
1813 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1814 if (texel_buffer_alignment_features) {
1815 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1816 }
1817
locke-lunargd556cc32019-09-17 01:21:23 -06001818 const auto *pipeline_exe_props_features =
1819 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1820 if (pipeline_exe_props_features) {
1821 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1822 }
1823
Jeff Bolz82f854d2019-09-17 14:56:47 -05001824 const auto *dedicated_allocation_image_aliasing_features =
1825 lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
1826 if (dedicated_allocation_image_aliasing_features) {
1827 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1828 *dedicated_allocation_image_aliasing_features;
1829 }
1830
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001831 const auto *performance_query_features = lvl_find_in_chain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
1832 if (performance_query_features) {
1833 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1834 }
1835
Tobias Hector782bcde2019-11-28 16:19:42 +00001836 const auto *device_coherent_memory_features = lvl_find_in_chain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
1837 if (device_coherent_memory_features) {
1838 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1839 }
1840
sfricke-samsungcead0802020-01-30 22:20:10 -08001841 const auto *ycbcr_image_array_features = lvl_find_in_chain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
1842 if (ycbcr_image_array_features) {
1843 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1844 }
1845
sourav parmarcd5fb182020-07-17 12:58:44 -07001846 const auto *ray_query_features = lvl_find_in_chain<VkPhysicalDeviceRayQueryFeaturesKHR>(pCreateInfo->pNext);
1847 if (ray_query_features) {
1848 state_tracker->enabled_features.ray_query_features = *ray_query_features;
1849 }
1850
1851 const auto *ray_tracing_pipeline_features =
1852 lvl_find_in_chain<VkPhysicalDeviceRayTracingPipelineFeaturesKHR>(pCreateInfo->pNext);
1853 if (ray_tracing_pipeline_features) {
1854 state_tracker->enabled_features.ray_tracing_pipeline_features = *ray_tracing_pipeline_features;
1855 }
1856
1857 const auto *ray_tracing_acceleration_structure_features =
1858 lvl_find_in_chain<VkPhysicalDeviceAccelerationStructureFeaturesKHR>(pCreateInfo->pNext);
1859 if (ray_tracing_acceleration_structure_features) {
1860 state_tracker->enabled_features.ray_tracing_acceleration_structure_features = *ray_tracing_acceleration_structure_features;
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001861 }
1862
Jeff Bolz165818a2020-05-08 11:19:03 -05001863 const auto *robustness2_features = lvl_find_in_chain<VkPhysicalDeviceRobustness2FeaturesEXT>(pCreateInfo->pNext);
1864 if (robustness2_features) {
1865 state_tracker->enabled_features.robustness2_features = *robustness2_features;
1866 }
1867
janharaldfredriksen-arm3b793772020-05-12 18:55:53 +02001868 const auto *fragment_density_map_features =
1869 lvl_find_in_chain<VkPhysicalDeviceFragmentDensityMapFeaturesEXT>(pCreateInfo->pNext);
1870 if (fragment_density_map_features) {
1871 state_tracker->enabled_features.fragment_density_map_features = *fragment_density_map_features;
1872 }
1873
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001874 const auto *fragment_density_map_features2 =
1875 lvl_find_in_chain<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT>(pCreateInfo->pNext);
1876 if (fragment_density_map_features2) {
1877 state_tracker->enabled_features.fragment_density_map2_features = *fragment_density_map_features2;
1878 }
1879
sfricke-samsung0c4a06f2020-06-27 01:24:32 -07001880 const auto *astc_decode_features = lvl_find_in_chain<VkPhysicalDeviceASTCDecodeFeaturesEXT>(pCreateInfo->pNext);
1881 if (astc_decode_features) {
1882 state_tracker->enabled_features.astc_decode_features = *astc_decode_features;
1883 }
1884
Tony-LunarG7337b312020-04-15 16:40:25 -06001885 const auto *custom_border_color_features = lvl_find_in_chain<VkPhysicalDeviceCustomBorderColorFeaturesEXT>(pCreateInfo->pNext);
1886 if (custom_border_color_features) {
1887 state_tracker->enabled_features.custom_border_color_features = *custom_border_color_features;
1888 }
1889
sfricke-samsungfd661d62020-05-16 00:57:27 -07001890 const auto *pipeline_creation_cache_control_features =
1891 lvl_find_in_chain<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT>(pCreateInfo->pNext);
1892 if (pipeline_creation_cache_control_features) {
1893 state_tracker->enabled_features.pipeline_creation_cache_control_features = *pipeline_creation_cache_control_features;
1894 }
1895
Tobias Hector6663c9b2020-11-05 10:18:02 +00001896 const auto *fragment_shading_rate_features =
1897 lvl_find_in_chain<VkPhysicalDeviceFragmentShadingRateFeaturesKHR>(pCreateInfo->pNext);
1898 if (fragment_shading_rate_features) {
1899 state_tracker->enabled_features.fragment_shading_rate_features = *fragment_shading_rate_features;
1900 }
1901
Piers Daniell39842ee2020-07-10 16:42:33 -06001902 const auto *extended_dynamic_state_features =
1903 lvl_find_in_chain<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT>(pCreateInfo->pNext);
1904 if (extended_dynamic_state_features) {
1905 state_tracker->enabled_features.extended_dynamic_state_features = *extended_dynamic_state_features;
1906 }
1907
locke-lunarg3fa463a2020-10-23 16:39:04 -06001908 const auto *multiview_features = lvl_find_in_chain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
1909 if (multiview_features) {
1910 state_tracker->enabled_features.multiview_features = *multiview_features;
1911 }
1912
Nathaniel Cesariob3f2d702020-11-09 09:20:49 -07001913 const auto *portability_features = lvl_find_in_chain<VkPhysicalDevicePortabilitySubsetFeaturesKHR>(pCreateInfo->pNext);
1914 if (portability_features) {
1915 state_tracker->enabled_features.portability_subset_features = *portability_features;
1916 }
1917
sfricke-samsung0065ce02020-12-03 22:46:37 -08001918 const auto *shader_integer_functions2_features =
1919 lvl_find_in_chain<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>(pCreateInfo->pNext);
1920 if (shader_integer_functions2_features) {
1921 state_tracker->enabled_features.shader_integer_functions2_features = *shader_integer_functions2_features;
1922 }
1923
1924 const auto *shader_sm_builtins_feature = lvl_find_in_chain<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV>(pCreateInfo->pNext);
1925 if (shader_sm_builtins_feature) {
1926 state_tracker->enabled_features.shader_sm_builtins_feature = *shader_sm_builtins_feature;
1927 }
1928
1929 const auto *shader_atomic_float_feature = lvl_find_in_chain<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT>(pCreateInfo->pNext);
1930 if (shader_atomic_float_feature) {
1931 state_tracker->enabled_features.shader_atomic_float_feature = *shader_atomic_float_feature;
1932 }
1933
1934 const auto *shader_image_atomic_int64_feature =
1935 lvl_find_in_chain<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT>(pCreateInfo->pNext);
1936 if (shader_image_atomic_int64_feature) {
1937 state_tracker->enabled_features.shader_image_atomic_int64_feature = *shader_image_atomic_int64_feature;
1938 }
1939
locke-lunargd556cc32019-09-17 01:21:23 -06001940 // Store physical device properties and physical device mem limits into CoreChecks structs
1941 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1942 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001943 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1944 &state_tracker->phys_dev_props_core11);
1945 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1946 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001947
1948 const auto &dev_ext = state_tracker->device_extensions;
1949 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1950
1951 if (dev_ext.vk_khr_push_descriptor) {
1952 // Get the needed push_descriptor limits
1953 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1954 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1955 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1956 }
1957
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001958 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001959 VkPhysicalDeviceDescriptorIndexingProperties descriptor_indexing_prop;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001960 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1961 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1962 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1963 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1964 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1965 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1966 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1967 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1968 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1969 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1970 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1971 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1972 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1973 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1974 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1975 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1976 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1977 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1978 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1979 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1980 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1981 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1982 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1983 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1984 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1985 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1986 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
1987 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1988 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
1989 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
1990 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
1991 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
1992 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
1993 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
1994 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1995 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1996 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
1997 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
1998 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1999 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
2000 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
2001 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
2002 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
2003 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
2004 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
2005 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
2006 }
2007
locke-lunargd556cc32019-09-17 01:21:23 -06002008 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
2009 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
2010 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
2011 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002012
2013 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08002014 VkPhysicalDeviceDepthStencilResolveProperties depth_stencil_resolve_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002015 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
2016 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
2017 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
2018 depth_stencil_resolve_props.supportedStencilResolveModes;
2019 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
2020 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
2021 }
2022
locke-lunargd556cc32019-09-17 01:21:23 -06002023 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002024 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
sourav parmarcd5fb182020-07-17 12:58:44 -07002025 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing_pipeline, &phys_dev_props->ray_tracing_propsKHR);
2026 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_acceleration_structure, &phys_dev_props->acc_structure_props);
locke-lunargd556cc32019-09-17 01:21:23 -06002027 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
2028 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02002029 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map_2, &phys_dev_props->fragment_density_map2_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002030 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
sfricke-samsung8f658d42020-05-03 20:12:24 -07002031 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_sample_locations, &phys_dev_props->sample_locations_props);
Tony-LunarG7337b312020-04-15 16:40:25 -06002032 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_custom_border_color, &phys_dev_props->custom_border_color_props);
locke-lunarg3fa463a2020-10-23 16:39:04 -06002033 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_multiview, &phys_dev_props->multiview_props);
Nathaniel Cesario3291c912020-11-17 16:54:41 -07002034 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_portability_subset, &phys_dev_props->portability_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002035
2036 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08002037 VkPhysicalDeviceTimelineSemaphoreProperties timeline_semaphore_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002038 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
2039 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
2040 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
2041 }
2042
2043 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08002044 VkPhysicalDeviceFloatControlsProperties float_controls_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002045 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
2046 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
2047 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
2048 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
2049 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
2050 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
2051 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
2052 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
2053 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
2054 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
2055 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
2056 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
2057 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
2058 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
2059 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
2060 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
2061 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
2062 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
2063 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
2064 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
2065 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
2066 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07002067
locke-lunargd556cc32019-09-17 01:21:23 -06002068 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
2069 // Get the needed cooperative_matrix properties
2070 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
Mike Schuchardt2df08912020-12-15 16:28:09 -08002071 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2>(&cooperative_matrix_props);
locke-lunargd556cc32019-09-17 01:21:23 -06002072 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
2073 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
2074
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002075 uint32_t num_cooperative_matrix_properties = 0;
2076 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &num_cooperative_matrix_properties, NULL);
2077 state_tracker->cooperative_matrix_properties.resize(num_cooperative_matrix_properties,
locke-lunargd556cc32019-09-17 01:21:23 -06002078 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
2079
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002080 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &num_cooperative_matrix_properties,
locke-lunargd556cc32019-09-17 01:21:23 -06002081 state_tracker->cooperative_matrix_properties.data());
2082 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002083 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06002084 // Get the needed subgroup limits
2085 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
Mike Schuchardt2df08912020-12-15 16:28:09 -08002086 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2>(&subgroup_prop);
locke-lunargd556cc32019-09-17 01:21:23 -06002087 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
2088
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002089 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
2090 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
2091 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
2092 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06002093 }
2094
Tobias Hector6663c9b2020-11-05 10:18:02 +00002095 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_fragment_shading_rate, &phys_dev_props->fragment_shading_rate_props);
2096
locke-lunargd556cc32019-09-17 01:21:23 -06002097 // Store queue family data
2098 if (pCreateInfo->pQueueCreateInfos != nullptr) {
2099 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
sfricke-samsung590ae1e2020-04-25 01:18:05 -07002100 const VkDeviceQueueCreateInfo &queue_create_info = pCreateInfo->pQueueCreateInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06002101 state_tracker->queue_family_index_map.insert(
sfricke-samsung590ae1e2020-04-25 01:18:05 -07002102 std::make_pair(queue_create_info.queueFamilyIndex, queue_create_info.queueCount));
2103 state_tracker->queue_family_create_flags_map.insert(
2104 std::make_pair(queue_create_info.queueFamilyIndex, queue_create_info.flags));
locke-lunargd556cc32019-09-17 01:21:23 -06002105 }
2106 }
2107}
2108
2109void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
2110 if (!device) return;
2111
locke-lunargd556cc32019-09-17 01:21:23 -06002112 // Reset all command buffers before destroying them, to unlink object_bindings.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002113 for (auto &command_buffer : commandBufferMap) {
2114 ResetCommandBufferState(command_buffer.first);
locke-lunargd556cc32019-09-17 01:21:23 -06002115 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05002116 pipelineMap.clear();
2117 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06002118 commandBufferMap.clear();
2119
2120 // This will also delete all sets in the pool & remove them from setMap
2121 DeleteDescriptorSetPools();
2122 // All sets should be removed
2123 assert(setMap.empty());
2124 descriptorSetLayoutMap.clear();
2125 imageViewMap.clear();
2126 imageMap.clear();
2127 bufferViewMap.clear();
2128 bufferMap.clear();
2129 // Queues persist until device is destroyed
2130 queueMap.clear();
2131}
2132
2133// Loop through bound objects and increment their in_use counts.
2134void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
2135 for (auto obj : cb_node->object_bindings) {
2136 auto base_obj = GetStateStructPtrFromObject(obj);
2137 if (base_obj) {
2138 base_obj->in_use.fetch_add(1);
2139 }
2140 }
2141}
2142
2143// Track which resources are in-flight by atomically incrementing their "in_use" count
2144void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
2145 cb_node->submitCount++;
2146 cb_node->in_use.fetch_add(1);
2147
2148 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
2149 IncrementBoundObjects(cb_node);
2150 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
2151 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
2152 // should then be flagged prior to calling this function
2153 for (auto event : cb_node->writeEventsBeforeWait) {
2154 auto event_state = GetEventState(event);
2155 if (event_state) event_state->write_in_use++;
2156 }
2157}
2158
2159// Decrement in-use count for objects bound to command buffer
2160void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
2161 BASE_NODE *base_obj = nullptr;
2162 for (auto obj : cb_node->object_bindings) {
2163 base_obj = GetStateStructPtrFromObject(obj);
2164 if (base_obj) {
2165 base_obj->in_use.fetch_sub(1);
2166 }
2167 }
2168}
2169
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002170void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002171 std::unordered_map<VkQueue, uint64_t> other_queue_seqs;
2172 std::unordered_map<VkSemaphore, uint64_t> timeline_semaphore_counters;
locke-lunargd556cc32019-09-17 01:21:23 -06002173
2174 // Roll this queue forward, one submission at a time.
2175 while (pQueue->seq < seq) {
2176 auto &submission = pQueue->submissions.front();
2177
2178 for (auto &wait : submission.waitSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002179 auto semaphore_state = GetSemaphoreState(wait.semaphore);
2180 if (semaphore_state) {
2181 semaphore_state->in_use.fetch_sub(1);
locke-lunargd556cc32019-09-17 01:21:23 -06002182 }
Mike Schuchardt2df08912020-12-15 16:28:09 -08002183 if (wait.type == VK_SEMAPHORE_TYPE_TIMELINE) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002184 auto &last_counter = timeline_semaphore_counters[wait.semaphore];
2185 last_counter = std::max(last_counter, wait.payload);
Marshall Drew-Brook03847582020-11-06 15:10:45 -08002186 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002187 auto &last_seq = other_queue_seqs[wait.queue];
2188 last_seq = std::max(last_seq, wait.seq);
Marshall Drew-Brook03847582020-11-06 15:10:45 -08002189 }
locke-lunargd556cc32019-09-17 01:21:23 -06002190 }
2191
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01002192 for (auto &signal : submission.signalSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002193 auto semaphore_state = GetSemaphoreState(signal.semaphore);
2194 if (semaphore_state) {
2195 semaphore_state->in_use.fetch_sub(1);
Mike Schuchardt2df08912020-12-15 16:28:09 -08002196 if (semaphore_state->type == VK_SEMAPHORE_TYPE_TIMELINE && semaphore_state->payload < signal.payload) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002197 semaphore_state->payload = signal.payload;
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01002198 }
locke-lunargd556cc32019-09-17 01:21:23 -06002199 }
2200 }
2201
2202 for (auto &semaphore : submission.externalSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002203 auto semaphore_state = GetSemaphoreState(semaphore);
2204 if (semaphore_state) {
2205 semaphore_state->in_use.fetch_sub(1);
locke-lunargd556cc32019-09-17 01:21:23 -06002206 }
2207 }
2208
2209 for (auto cb : submission.cbs) {
2210 auto cb_node = GetCBState(cb);
2211 if (!cb_node) {
2212 continue;
2213 }
2214 // First perform decrement on general case bound objects
2215 DecrementBoundResources(cb_node);
2216 for (auto event : cb_node->writeEventsBeforeWait) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002217 auto event_node = eventMap.find(event);
2218 if (event_node != eventMap.end()) {
2219 event_node->second.write_in_use--;
locke-lunargd556cc32019-09-17 01:21:23 -06002220 }
2221 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002222 QueryMap local_query_to_state_map;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002223 VkQueryPool first_pool = VK_NULL_HANDLE;
Jeff Bolz310775c2019-10-09 00:46:33 -05002224 for (auto &function : cb_node->queryUpdates) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002225 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &local_query_to_state_map);
Jeff Bolz310775c2019-10-09 00:46:33 -05002226 }
2227
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002228 for (auto query_state_pair : local_query_to_state_map) {
2229 if (query_state_pair.second == QUERYSTATE_ENDED) {
2230 queryToStateMap[query_state_pair.first] = QUERYSTATE_AVAILABLE;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002231 }
locke-lunargd556cc32019-09-17 01:21:23 -06002232 }
locke-lunargd556cc32019-09-17 01:21:23 -06002233 cb_node->in_use.fetch_sub(1);
2234 }
2235
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002236 auto fence_state = GetFenceState(submission.fence);
2237 if (fence_state && fence_state->scope == kSyncScopeInternal) {
2238 fence_state->state = FENCE_RETIRED;
locke-lunargd556cc32019-09-17 01:21:23 -06002239 }
2240
2241 pQueue->submissions.pop_front();
2242 pQueue->seq++;
2243 }
2244
2245 // Roll other queues forward to the highest seq we saw a wait for
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002246 for (auto qs : other_queue_seqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002247 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002248 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002249 for (auto sc : timeline_semaphore_counters) {
Marshall Drew-Brook03847582020-11-06 15:10:45 -08002250 RetireTimelineSemaphore(sc.first, sc.second);
2251 }
locke-lunargd556cc32019-09-17 01:21:23 -06002252}
2253
2254// Submit a fence to a queue, delimiting previous fences and previous untracked
2255// work by it.
2256static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
2257 pFence->state = FENCE_INFLIGHT;
2258 pFence->signaler.first = pQueue->queue;
2259 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
2260}
2261
2262void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
2263 VkFence fence, VkResult result) {
Mark Lobodzinski09379db2020-05-07 08:22:01 -06002264 if (result != VK_SUCCESS) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002265 uint64_t early_retire_seq = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002266 auto queue_state = GetQueueState(queue);
2267 auto fence_state = GetFenceState(fence);
locke-lunargd556cc32019-09-17 01:21:23 -06002268
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002269 if (fence_state) {
2270 if (fence_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06002271 // Mark fence in use
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002272 SubmitFence(queue_state, fence_state, std::max(1u, submitCount));
locke-lunargd556cc32019-09-17 01:21:23 -06002273 if (!submitCount) {
2274 // If no submissions, but just dropping a fence on the end of the queue,
2275 // record an empty submission with just the fence, so we can determine
2276 // its completion.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002277 queue_state->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
2278 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002279 }
2280 } else {
2281 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002282 early_retire_seq = queue_state->seq + queue_state->submissions.size();
locke-lunargd556cc32019-09-17 01:21:23 -06002283 }
2284 }
2285
2286 // Now process each individual submit
2287 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
2288 std::vector<VkCommandBuffer> cbs;
2289 const VkSubmitInfo *submit = &pSubmits[submit_idx];
Jeremy Gebben404e6832020-09-29 14:58:07 -06002290 std::vector<SEMAPHORE_WAIT> semaphore_waits;
2291 std::vector<SEMAPHORE_SIGNAL> semaphore_signals;
2292 std::vector<VkSemaphore> semaphore_externals;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002293 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
Mike Schuchardt2df08912020-12-15 16:28:09 -08002294 auto *timeline_semaphore_submit = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfo>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002295 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
2296 VkSemaphore semaphore = submit->pWaitSemaphores[i];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002297 auto semaphore_state = GetSemaphoreState(semaphore);
2298 if (semaphore_state) {
2299 if (semaphore_state->scope == kSyncScopeInternal) {
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01002300 SEMAPHORE_WAIT wait;
2301 wait.semaphore = semaphore;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002302 wait.type = semaphore_state->type;
Mike Schuchardt2df08912020-12-15 16:28:09 -08002303 if (semaphore_state->type == VK_SEMAPHORE_TYPE_BINARY) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002304 if (semaphore_state->signaler.first != VK_NULL_HANDLE) {
2305 wait.queue = semaphore_state->signaler.first;
2306 wait.seq = semaphore_state->signaler.second;
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01002307 semaphore_waits.push_back(wait);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002308 semaphore_state->in_use.fetch_add(1);
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01002309 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002310 semaphore_state->signaler.first = VK_NULL_HANDLE;
2311 semaphore_state->signaled = false;
2312 } else if (semaphore_state->payload < timeline_semaphore_submit->pWaitSemaphoreValues[i]) {
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01002313 wait.queue = queue;
2314 wait.seq = next_seq;
2315 wait.payload = timeline_semaphore_submit->pWaitSemaphoreValues[i];
2316 semaphore_waits.push_back(wait);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002317 semaphore_state->in_use.fetch_add(1);
locke-lunargd556cc32019-09-17 01:21:23 -06002318 }
locke-lunargd556cc32019-09-17 01:21:23 -06002319 } else {
2320 semaphore_externals.push_back(semaphore);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002321 semaphore_state->in_use.fetch_add(1);
2322 if (semaphore_state->scope == kSyncScopeExternalTemporary) {
2323 semaphore_state->scope = kSyncScopeInternal;
locke-lunargd556cc32019-09-17 01:21:23 -06002324 }
2325 }
2326 }
2327 }
2328 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
2329 VkSemaphore semaphore = submit->pSignalSemaphores[i];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002330 auto semaphore_state = GetSemaphoreState(semaphore);
2331 if (semaphore_state) {
2332 if (semaphore_state->scope == kSyncScopeInternal) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002333 SEMAPHORE_SIGNAL signal;
2334 signal.semaphore = semaphore;
2335 signal.seq = next_seq;
Mike Schuchardt2df08912020-12-15 16:28:09 -08002336 if (semaphore_state->type == VK_SEMAPHORE_TYPE_BINARY) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002337 semaphore_state->signaler.first = queue;
2338 semaphore_state->signaler.second = next_seq;
2339 semaphore_state->signaled = true;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002340 } else {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002341 signal.payload = timeline_semaphore_submit->pSignalSemaphoreValues[i];
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002342 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002343 semaphore_state->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002344 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06002345 } else {
2346 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002347 early_retire_seq = std::max(early_retire_seq, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002348 }
2349 }
2350 }
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002351 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
2352 uint32_t perf_pass = perf_submit ? perf_submit->counterPassIndex : 0;
2353
locke-lunargd556cc32019-09-17 01:21:23 -06002354 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
2355 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
2356 if (cb_node) {
2357 cbs.push_back(submit->pCommandBuffers[i]);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002358 for (auto secondary_cmd_buffer : cb_node->linkedCommandBuffers) {
2359 cbs.push_back(secondary_cmd_buffer->commandBuffer);
2360 IncrementResources(secondary_cmd_buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002361 }
2362 IncrementResources(cb_node);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002363
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002364 VkQueryPool first_pool = VK_NULL_HANDLE;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002365 EventToStageMap local_event_to_stage_map;
2366 QueryMap local_query_to_state_map;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002367 for (auto &function : cb_node->queryUpdates) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002368 function(nullptr, /*do_validate*/ false, first_pool, perf_pass, &local_query_to_state_map);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002369 }
2370
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002371 for (auto query_state_pair : local_query_to_state_map) {
2372 queryToStateMap[query_state_pair.first] = query_state_pair.second;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002373 }
2374
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002375 for (auto &function : cb_node->eventUpdates) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002376 function(nullptr, /*do_validate*/ false, &local_event_to_stage_map);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002377 }
2378
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002379 for (auto event_stage_pair : local_event_to_stage_map) {
2380 eventMap[event_stage_pair.first].stageMask = event_stage_pair.second;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002381 }
locke-lunargd556cc32019-09-17 01:21:23 -06002382 }
2383 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002384
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002385 queue_state->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
2386 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, perf_pass);
locke-lunargd556cc32019-09-17 01:21:23 -06002387 }
2388
2389 if (early_retire_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002390 RetireWorkOnQueue(queue_state, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002391 }
2392}
2393
2394void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
2395 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
2396 VkResult result) {
2397 if (VK_SUCCESS == result) {
2398 AddMemObjInfo(device, *pMemory, pAllocateInfo);
2399 }
2400 return;
2401}
2402
2403void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
2404 if (!mem) return;
2405 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
2406 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
2407
2408 // Clear mem binding for any bound objects
2409 for (const auto &obj : mem_info->obj_bindings) {
2410 BINDABLE *bindable_state = nullptr;
2411 switch (obj.type) {
2412 case kVulkanObjectTypeImage:
2413 bindable_state = GetImageState(obj.Cast<VkImage>());
2414 break;
2415 case kVulkanObjectTypeBuffer:
2416 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
2417 break;
2418 case kVulkanObjectTypeAccelerationStructureNV:
sourav parmarcd5fb182020-07-17 12:58:44 -07002419 bindable_state = GetAccelerationStructureStateNV(obj.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002420 break;
2421
2422 default:
2423 // Should only have acceleration structure, buffer, or image objects bound to memory
2424 assert(0);
2425 }
2426
2427 if (bindable_state) {
Jeff Bolz41e29052020-03-29 22:33:55 -05002428 // Remove any sparse bindings bound to the resource that use this memory.
2429 for (auto it = bindable_state->sparse_bindings.begin(); it != bindable_state->sparse_bindings.end();) {
2430 auto nextit = it;
2431 nextit++;
2432
2433 auto &sparse_mem_binding = *it;
2434 if (sparse_mem_binding.mem_state.get() == mem_info) {
2435 bindable_state->sparse_bindings.erase(it);
2436 }
2437
2438 it = nextit;
2439 }
locke-lunargd556cc32019-09-17 01:21:23 -06002440 bindable_state->UpdateBoundMemorySet();
2441 }
2442 }
2443 // Any bound cmd buffers are now invalid
2444 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
2445 RemoveAliasingImages(mem_info->bound_images);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002446 mem_info->destroyed = true;
John Zulauf79952712020-04-07 11:25:54 -06002447 fake_memory.Free(mem_info->fake_base_address);
locke-lunargd556cc32019-09-17 01:21:23 -06002448 memObjMap.erase(mem);
2449}
2450
2451void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
2452 VkFence fence, VkResult result) {
2453 if (result != VK_SUCCESS) return;
2454 uint64_t early_retire_seq = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002455 auto fence_state = GetFenceState(fence);
2456 auto queue_state = GetQueueState(queue);
locke-lunargd556cc32019-09-17 01:21:23 -06002457
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002458 if (fence_state) {
2459 if (fence_state->scope == kSyncScopeInternal) {
2460 SubmitFence(queue_state, fence_state, std::max(1u, bindInfoCount));
locke-lunargd556cc32019-09-17 01:21:23 -06002461 if (!bindInfoCount) {
2462 // No work to do, just dropping a fence in the queue by itself.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002463 queue_state->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
2464 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002465 }
2466 } else {
2467 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002468 early_retire_seq = queue_state->seq + queue_state->submissions.size();
locke-lunargd556cc32019-09-17 01:21:23 -06002469 }
2470 }
2471
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002472 for (uint32_t bind_idx = 0; bind_idx < bindInfoCount; ++bind_idx) {
2473 const VkBindSparseInfo &bind_info = pBindInfo[bind_idx];
locke-lunargd556cc32019-09-17 01:21:23 -06002474 // Track objects tied to memory
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002475 for (uint32_t j = 0; j < bind_info.bufferBindCount; j++) {
2476 for (uint32_t k = 0; k < bind_info.pBufferBinds[j].bindCount; k++) {
2477 auto sparse_binding = bind_info.pBufferBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07002478 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002479 VulkanTypedHandle(bind_info.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -06002480 }
2481 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002482 for (uint32_t j = 0; j < bind_info.imageOpaqueBindCount; j++) {
2483 for (uint32_t k = 0; k < bind_info.pImageOpaqueBinds[j].bindCount; k++) {
2484 auto sparse_binding = bind_info.pImageOpaqueBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07002485 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002486 VulkanTypedHandle(bind_info.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
locke-lunargd556cc32019-09-17 01:21:23 -06002487 }
2488 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002489 for (uint32_t j = 0; j < bind_info.imageBindCount; j++) {
2490 for (uint32_t k = 0; k < bind_info.pImageBinds[j].bindCount; k++) {
2491 auto sparse_binding = bind_info.pImageBinds[j].pBinds[k];
locke-lunargd556cc32019-09-17 01:21:23 -06002492 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
2493 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
locke-lunargcf04d582019-11-26 00:31:50 -07002494 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, size,
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002495 VulkanTypedHandle(bind_info.pImageBinds[j].image, kVulkanObjectTypeImage));
locke-lunargd556cc32019-09-17 01:21:23 -06002496 }
2497 }
2498
2499 std::vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002500 std::vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06002501 std::vector<VkSemaphore> semaphore_externals;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002502 for (uint32_t i = 0; i < bind_info.waitSemaphoreCount; ++i) {
2503 VkSemaphore semaphore = bind_info.pWaitSemaphores[i];
2504 auto semaphore_state = GetSemaphoreState(semaphore);
2505 if (semaphore_state) {
2506 if (semaphore_state->scope == kSyncScopeInternal) {
2507 if (semaphore_state->signaler.first != VK_NULL_HANDLE) {
Marshall Drew-Brook03847582020-11-06 15:10:45 -08002508 semaphore_waits.push_back(
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002509 {semaphore, semaphore_state->type, semaphore_state->signaler.first, semaphore_state->signaler.second});
2510 semaphore_state->in_use.fetch_add(1);
locke-lunargd556cc32019-09-17 01:21:23 -06002511 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002512 semaphore_state->signaler.first = VK_NULL_HANDLE;
2513 semaphore_state->signaled = false;
locke-lunargd556cc32019-09-17 01:21:23 -06002514 } else {
2515 semaphore_externals.push_back(semaphore);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002516 semaphore_state->in_use.fetch_add(1);
2517 if (semaphore_state->scope == kSyncScopeExternalTemporary) {
2518 semaphore_state->scope = kSyncScopeInternal;
locke-lunargd556cc32019-09-17 01:21:23 -06002519 }
2520 }
2521 }
2522 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002523 for (uint32_t i = 0; i < bind_info.signalSemaphoreCount; ++i) {
2524 VkSemaphore semaphore = bind_info.pSignalSemaphores[i];
2525 auto semaphore_state = GetSemaphoreState(semaphore);
2526 if (semaphore_state) {
2527 if (semaphore_state->scope == kSyncScopeInternal) {
2528 semaphore_state->signaler.first = queue;
2529 semaphore_state->signaler.second = queue_state->seq + queue_state->submissions.size() + 1;
2530 semaphore_state->signaled = true;
2531 semaphore_state->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002532
2533 SEMAPHORE_SIGNAL signal;
2534 signal.semaphore = semaphore;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002535 signal.seq = semaphore_state->signaler.second;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002536 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06002537 } else {
2538 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002539 early_retire_seq = std::max(early_retire_seq, queue_state->seq + queue_state->submissions.size() + 1);
locke-lunargd556cc32019-09-17 01:21:23 -06002540 }
2541 }
2542 }
2543
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002544 queue_state->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals,
2545 semaphore_externals, bind_idx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE,
2546 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002547 }
2548
2549 if (early_retire_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002550 RetireWorkOnQueue(queue_state, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002551 }
2552}
2553
2554void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
2555 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
2556 VkResult result) {
2557 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002558 auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002559 semaphore_state->signaler.first = VK_NULL_HANDLE;
2560 semaphore_state->signaler.second = 0;
2561 semaphore_state->signaled = false;
2562 semaphore_state->scope = kSyncScopeInternal;
Mike Schuchardt2df08912020-12-15 16:28:09 -08002563 semaphore_state->type = VK_SEMAPHORE_TYPE_BINARY;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002564 semaphore_state->payload = 0;
Mike Schuchardt2df08912020-12-15 16:28:09 -08002565 auto semaphore_type_create_info = lvl_find_in_chain<VkSemaphoreTypeCreateInfo>(pCreateInfo->pNext);
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002566 if (semaphore_type_create_info) {
2567 semaphore_state->type = semaphore_type_create_info->semaphoreType;
2568 semaphore_state->payload = semaphore_type_create_info->initialValue;
2569 }
locke-lunargd556cc32019-09-17 01:21:23 -06002570 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
2571}
2572
Mike Schuchardt2df08912020-12-15 16:28:09 -08002573void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBits handle_type,
2574 VkSemaphoreImportFlags flags) {
locke-lunargd556cc32019-09-17 01:21:23 -06002575 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
2576 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08002577 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) &&
locke-lunargd556cc32019-09-17 01:21:23 -06002578 sema_node->scope == kSyncScopeInternal) {
2579 sema_node->scope = kSyncScopeExternalTemporary;
2580 } else {
2581 sema_node->scope = kSyncScopeExternalPermanent;
2582 }
2583 }
2584}
2585
Mike Schuchardt2df08912020-12-15 16:28:09 -08002586void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfo *pSignalInfo,
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002587 VkResult result) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002588 auto *semaphore_state = GetSemaphoreState(pSignalInfo->semaphore);
2589 semaphore_state->payload = pSignalInfo->value;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002590}
2591
locke-lunargd556cc32019-09-17 01:21:23 -06002592void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
2593 auto mem_info = GetDevMemState(mem);
2594 if (mem_info) {
2595 mem_info->mapped_range.offset = offset;
2596 mem_info->mapped_range.size = size;
2597 mem_info->p_driver_data = *ppData;
2598 }
2599}
2600
2601void ValidationStateTracker::RetireFence(VkFence fence) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002602 auto fence_state = GetFenceState(fence);
2603 if (fence_state && fence_state->scope == kSyncScopeInternal) {
2604 if (fence_state->signaler.first != VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -06002605 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002606 RetireWorkOnQueue(GetQueueState(fence_state->signaler.first), fence_state->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002607 } else {
2608 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
2609 // the fence as retired.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002610 fence_state->state = FENCE_RETIRED;
locke-lunargd556cc32019-09-17 01:21:23 -06002611 }
2612 }
2613}
2614
2615void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2616 VkBool32 waitAll, uint64_t timeout, VkResult result) {
2617 if (VK_SUCCESS != result) return;
2618
2619 // When we know that all fences are complete we can clean/remove their CBs
2620 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
2621 for (uint32_t i = 0; i < fenceCount; i++) {
2622 RetireFence(pFences[i]);
2623 }
2624 }
2625 // NOTE : Alternate case not handled here is when some fences have completed. In
2626 // this case for app to guarantee which fences completed it will have to call
2627 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
2628}
2629
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002630void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002631 auto semaphore_state = GetSemaphoreState(semaphore);
2632 if (semaphore_state) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002633 for (auto &pair : queueMap) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002634 QUEUE_STATE &queue_state = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002635 uint64_t max_seq = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002636 for (const auto &submission : queue_state.submissions) {
2637 for (const auto &signal_semaphore : submission.signalSemaphores) {
2638 if (signal_semaphore.semaphore == semaphore && signal_semaphore.payload <= until_payload) {
2639 if (signal_semaphore.seq > max_seq) {
2640 max_seq = signal_semaphore.seq;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002641 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002642 }
2643 }
2644 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06002645 if (max_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002646 RetireWorkOnQueue(&queue_state, max_seq);
Tony-LunarG47d5e272020-04-07 15:35:55 -06002647 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002648 }
2649 }
2650}
2651
John Zulauff89de662020-04-13 18:57:34 -06002652void ValidationStateTracker::RecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2653 VkResult result) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002654 if (VK_SUCCESS != result) return;
2655
2656 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
2657 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
2658 }
2659}
2660
John Zulauff89de662020-04-13 18:57:34 -06002661void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2662 VkResult result) {
2663 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2664}
2665
2666void ValidationStateTracker::PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo,
2667 uint64_t timeout, VkResult result) {
2668 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2669}
2670
Adrian Coca Lorentec7d76102020-09-28 13:58:16 +02002671void ValidationStateTracker::RecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2672 VkResult result) {
2673 if (VK_SUCCESS != result) return;
2674
2675 RetireTimelineSemaphore(semaphore, *pValue);
2676}
2677
2678void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2679 VkResult result) {
2680 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2681}
2682void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2683 VkResult result) {
2684 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2685}
2686
locke-lunargd556cc32019-09-17 01:21:23 -06002687void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
2688 if (VK_SUCCESS != result) return;
2689 RetireFence(fence);
2690}
2691
2692void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
2693 // Add queue to tracking set only if it is new
2694 auto queue_is_new = queues.emplace(queue);
2695 if (queue_is_new.second == true) {
2696 QUEUE_STATE *queue_state = &queueMap[queue];
2697 queue_state->queue = queue;
2698 queue_state->queueFamilyIndex = queue_family_index;
2699 queue_state->seq = 0;
2700 }
2701}
2702
2703void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2704 VkQueue *pQueue) {
2705 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2706}
2707
2708void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2709 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2710}
2711
2712void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2713 if (VK_SUCCESS != result) return;
2714 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002715 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002716}
2717
2718void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2719 if (VK_SUCCESS != result) return;
2720 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002721 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002722 }
2723}
2724
2725void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2726 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002727 auto fence_state = GetFenceState(fence);
2728 fence_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002729 fenceMap.erase(fence);
2730}
2731
2732void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2733 const VkAllocationCallbacks *pAllocator) {
2734 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002735 auto semaphore_state = GetSemaphoreState(semaphore);
2736 semaphore_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002737 semaphoreMap.erase(semaphore);
2738}
2739
2740void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2741 if (!event) return;
2742 EVENT_STATE *event_state = GetEventState(event);
2743 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
2744 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
2745 eventMap.erase(event);
2746}
2747
2748void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2749 const VkAllocationCallbacks *pAllocator) {
2750 if (!queryPool) return;
2751 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
2752 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
2753 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002754 qp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002755 queryPoolMap.erase(queryPool);
2756}
2757
2758// Object with given handle is being bound to memory w/ given mem_info struct.
2759// Track the newly bound memory range with given memoryOffset
2760// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
2761// and non-linear range incorrectly overlap.
locke-lunargd556cc32019-09-17 01:21:23 -06002762void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002763 VkDeviceSize memoryOffset) {
locke-lunargd556cc32019-09-17 01:21:23 -06002764 if (typed_handle.type == kVulkanObjectTypeImage) {
2765 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
2766 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002767 mem_info->bound_buffers.insert(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002768 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002769 mem_info->bound_acceleration_structures.insert(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002770 } else {
2771 // Unsupported object type
2772 assert(false);
2773 }
2774}
2775
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002776void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2777 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002778}
2779
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002780void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2781 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002782}
2783
2784void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002785 VkDeviceSize mem_offset) {
2786 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002787}
2788
2789// This function will remove the handle-to-index mapping from the appropriate map.
2790static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
2791 if (typed_handle.type == kVulkanObjectTypeImage) {
2792 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
2793 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002794 mem_info->bound_buffers.erase(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002795 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002796 mem_info->bound_acceleration_structures.erase(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002797 } else {
2798 // Unsupported object type
2799 assert(false);
2800 }
2801}
2802
2803void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
2804 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
2805}
2806
2807void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
2808 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
2809}
2810
locke-lunargd556cc32019-09-17 01:21:23 -06002811void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2812 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2813 if (buffer_state) {
2814 // Track bound memory range information
2815 auto mem_info = GetDevMemState(mem);
2816 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002817 InsertBufferMemoryRange(buffer, mem_info, memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06002818 }
2819 // Track objects tied to memory
2820 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
2821 }
2822}
2823
2824void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2825 VkDeviceSize memoryOffset, VkResult result) {
2826 if (VK_SUCCESS != result) return;
2827 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2828}
2829
2830void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002831 const VkBindBufferMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06002832 for (uint32_t i = 0; i < bindInfoCount; i++) {
2833 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2834 }
2835}
2836
2837void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002838 const VkBindBufferMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06002839 for (uint32_t i = 0; i < bindInfoCount; i++) {
2840 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2841 }
2842}
2843
Spencer Fricke6c127102020-04-16 06:25:20 -07002844void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer) {
locke-lunargd556cc32019-09-17 01:21:23 -06002845 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2846 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002847 buffer_state->memory_requirements_checked = true;
2848 }
2849}
2850
2851void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2852 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002853 RecordGetBufferMemoryRequirementsState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002854}
2855
2856void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002857 const VkBufferMemoryRequirementsInfo2 *pInfo,
2858 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002859 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002860}
2861
2862void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002863 const VkBufferMemoryRequirementsInfo2 *pInfo,
2864 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002865 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002866}
2867
Spencer Fricke6c127102020-04-16 06:25:20 -07002868void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002869 const VkImagePlaneMemoryRequirementsInfo *plane_info =
2870 (pInfo == nullptr) ? nullptr : lvl_find_in_chain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002871 IMAGE_STATE *image_state = GetImageState(image);
2872 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002873 if (plane_info != nullptr) {
2874 // Multi-plane image
2875 image_state->memory_requirements_checked = false; // Each image plane needs to be checked itself
2876 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
2877 image_state->plane0_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002878 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
2879 image_state->plane1_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002880 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
2881 image_state->plane2_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002882 }
2883 } else {
2884 // Single Plane image
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002885 image_state->memory_requirements_checked = true;
2886 }
locke-lunargd556cc32019-09-17 01:21:23 -06002887 }
2888}
2889
2890void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2891 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002892 RecordGetImageMemoryRequirementsState(image, nullptr);
locke-lunargd556cc32019-09-17 01:21:23 -06002893}
2894
2895void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2896 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002897 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002898}
2899
2900void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2901 const VkImageMemoryRequirementsInfo2 *pInfo,
2902 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002903 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002904}
2905
2906static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2907 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2908 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2909 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2910 image_state->sparse_metadata_required = true;
2911 }
2912}
2913
2914void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2915 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2916 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2917 auto image_state = GetImageState(image);
2918 image_state->get_sparse_reqs_called = true;
2919 if (!pSparseMemoryRequirements) return;
2920 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2921 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2922 }
2923}
2924
2925void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
Mike Schuchardt2df08912020-12-15 16:28:09 -08002926 VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount,
2927 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements) {
locke-lunargd556cc32019-09-17 01:21:23 -06002928 auto image_state = GetImageState(pInfo->image);
2929 image_state->get_sparse_reqs_called = true;
2930 if (!pSparseMemoryRequirements) return;
2931 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2932 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2933 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2934 }
2935}
2936
2937void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08002938 VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount,
2939 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements) {
locke-lunargd556cc32019-09-17 01:21:23 -06002940 auto image_state = GetImageState(pInfo->image);
2941 image_state->get_sparse_reqs_called = true;
2942 if (!pSparseMemoryRequirements) return;
2943 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2944 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2945 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2946 }
2947}
2948
2949void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2950 const VkAllocationCallbacks *pAllocator) {
2951 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002952 auto shader_module_state = GetShaderModuleState(shaderModule);
2953 shader_module_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002954 shaderModuleMap.erase(shaderModule);
2955}
2956
2957void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2958 const VkAllocationCallbacks *pAllocator) {
2959 if (!pipeline) return;
2960 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
2961 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
2962 // Any bound cmd buffers are now invalid
2963 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002964 pipeline_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002965 pipelineMap.erase(pipeline);
2966}
2967
2968void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2969 const VkAllocationCallbacks *pAllocator) {
2970 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002971 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
2972 pipeline_layout_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002973 pipelineLayoutMap.erase(pipelineLayout);
2974}
2975
2976void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2977 const VkAllocationCallbacks *pAllocator) {
2978 if (!sampler) return;
2979 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
2980 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
2981 // Any bound cmd buffers are now invalid
2982 if (sampler_state) {
2983 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
Yuly Novikov424cdd52020-05-26 16:45:12 -04002984
2985 if (sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2986 sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
2987 custom_border_color_sampler_count--;
2988 }
2989
2990 sampler_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002991 }
2992 samplerMap.erase(sampler);
2993}
2994
2995void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2996 const VkAllocationCallbacks *pAllocator) {
2997 if (!descriptorSetLayout) return;
2998 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2999 if (layout_it != descriptorSetLayoutMap.end()) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05003000 layout_it->second.get()->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003001 descriptorSetLayoutMap.erase(layout_it);
3002 }
3003}
3004
3005void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
3006 const VkAllocationCallbacks *pAllocator) {
3007 if (!descriptorPool) return;
3008 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
3009 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
3010 if (desc_pool_state) {
3011 // Any bound cmd buffers are now invalid
3012 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
3013 // Free sets that were in this pool
3014 for (auto ds : desc_pool_state->sets) {
3015 FreeDescriptorSet(ds);
3016 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003017 desc_pool_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003018 descriptorPoolMap.erase(descriptorPool);
3019 }
3020}
3021
3022// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
3023void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
3024 const VkCommandBuffer *command_buffers) {
3025 for (uint32_t i = 0; i < command_buffer_count; i++) {
John Zulaufd1f85d42020-04-15 12:23:15 -06003026 // Allow any derived class to clean up command buffer state
3027 if (command_buffer_free_callback) {
3028 (*command_buffer_free_callback)(command_buffers[i]);
3029 }
3030
locke-lunargd556cc32019-09-17 01:21:23 -06003031 auto cb_state = GetCBState(command_buffers[i]);
3032 // Remove references to command buffer's state and delete
3033 if (cb_state) {
3034 // reset prior to delete, removing various references to it.
3035 // TODO: fix this, it's insane.
3036 ResetCommandBufferState(cb_state->commandBuffer);
3037 // Remove the cb_state's references from COMMAND_POOL_STATEs
3038 pool_state->commandBuffers.erase(command_buffers[i]);
3039 // Remove the cb debug labels
3040 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
3041 // Remove CBState from CB map
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003042 cb_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003043 commandBufferMap.erase(cb_state->commandBuffer);
3044 }
3045 }
3046}
3047
3048void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
3049 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003050 auto pool = GetCommandPoolState(commandPool);
3051 FreeCommandBufferStates(pool, commandBufferCount, pCommandBuffers);
locke-lunargd556cc32019-09-17 01:21:23 -06003052}
3053
3054void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
3055 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
3056 VkResult result) {
3057 if (VK_SUCCESS != result) return;
sfricke-samsungc1543372020-08-18 22:37:27 -07003058 VkCommandPool command_pool = *pCommandPool;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003059 auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
sfricke-samsungc1543372020-08-18 22:37:27 -07003060 cmd_pool_state->commandPool = command_pool;
locke-lunargd556cc32019-09-17 01:21:23 -06003061 cmd_pool_state->createFlags = pCreateInfo->flags;
3062 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
sfricke-samsung0c45edc2020-07-01 22:19:53 -07003063 cmd_pool_state->unprotected = ((pCreateInfo->flags & VK_COMMAND_POOL_CREATE_PROTECTED_BIT) == 0);
sfricke-samsungc1543372020-08-18 22:37:27 -07003064 commandPoolMap[command_pool] = std::move(cmd_pool_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003065}
3066
3067void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
3068 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
3069 VkResult result) {
3070 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003071 auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003072 query_pool_state->createInfo = *pCreateInfo;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003073 query_pool_state->pool = *pQueryPool;
3074 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
3075 const auto *perf = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
Mark Lobodzinski7e948e42020-09-09 10:23:36 -06003076 query_pool_state->perf_counter_index_count = perf->counterIndexCount;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003077
Mark Lobodzinski7e948e42020-09-09 10:23:36 -06003078 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003079 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
3080 const auto &counter = counters.counters[perf->pCounterIndices[i]];
3081 switch (counter.scope) {
3082 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
3083 query_pool_state->has_perf_scope_command_buffer = true;
3084 break;
3085 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
3086 query_pool_state->has_perf_scope_render_pass = true;
3087 break;
3088 default:
3089 break;
3090 }
3091 }
3092
3093 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
3094 &query_pool_state->n_performance_passes);
3095 }
3096
locke-lunargd556cc32019-09-17 01:21:23 -06003097 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
3098
3099 QueryObject query_obj{*pQueryPool, 0u};
3100 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
3101 query_obj.query = i;
3102 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
3103 }
3104}
3105
3106void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
3107 const VkAllocationCallbacks *pAllocator) {
3108 if (!commandPool) return;
3109 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
3110 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
3111 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
3112 if (cp_state) {
3113 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
3114 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
3115 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003116 cp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003117 commandPoolMap.erase(commandPool);
3118 }
3119}
3120
3121void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
3122 VkCommandPoolResetFlags flags, VkResult result) {
3123 if (VK_SUCCESS != result) return;
3124 // Reset all of the CBs allocated from this pool
3125 auto command_pool_state = GetCommandPoolState(commandPool);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003126 for (auto cmd_buffer : command_pool_state->commandBuffers) {
3127 ResetCommandBufferState(cmd_buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06003128 }
3129}
3130
3131void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
3132 VkResult result) {
3133 for (uint32_t i = 0; i < fenceCount; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003134 auto fence_state = GetFenceState(pFences[i]);
3135 if (fence_state) {
3136 if (fence_state->scope == kSyncScopeInternal) {
3137 fence_state->state = FENCE_UNSIGNALED;
3138 } else if (fence_state->scope == kSyncScopeExternalTemporary) {
3139 fence_state->scope = kSyncScopeInternal;
locke-lunargd556cc32019-09-17 01:21:23 -06003140 }
3141 }
3142 }
3143}
3144
Jeff Bolzadbfa852019-10-04 13:53:30 -05003145// For given cb_nodes, invalidate them and track object causing invalidation.
3146// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
3147// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
3148// can also unlink objects from command buffers.
3149void ValidationStateTracker::InvalidateCommandBuffers(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_nodes,
3150 const VulkanTypedHandle &obj, bool unlink) {
3151 for (const auto &cb_node_pair : cb_nodes) {
3152 auto &cb_node = cb_node_pair.first;
3153 if (cb_node->state == CB_RECORDING) {
3154 cb_node->state = CB_INVALID_INCOMPLETE;
3155 } else if (cb_node->state == CB_RECORDED) {
3156 cb_node->state = CB_INVALID_COMPLETE;
3157 }
3158 cb_node->broken_bindings.push_back(obj);
3159
3160 // if secondary, then propagate the invalidation to the primaries that will call us.
3161 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
3162 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
3163 }
3164 if (unlink) {
3165 int index = cb_node_pair.second;
3166 assert(cb_node->object_bindings[index] == obj);
3167 cb_node->object_bindings[index] = VulkanTypedHandle();
3168 }
3169 }
3170 if (unlink) {
3171 cb_nodes.clear();
3172 }
3173}
3174
3175void ValidationStateTracker::InvalidateLinkedCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
3176 const VulkanTypedHandle &obj) {
locke-lunargd556cc32019-09-17 01:21:23 -06003177 for (auto cb_node : cb_nodes) {
3178 if (cb_node->state == CB_RECORDING) {
3179 cb_node->state = CB_INVALID_INCOMPLETE;
3180 } else if (cb_node->state == CB_RECORDED) {
3181 cb_node->state = CB_INVALID_COMPLETE;
3182 }
3183 cb_node->broken_bindings.push_back(obj);
3184
3185 // if secondary, then propagate the invalidation to the primaries that will call us.
3186 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003187 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
locke-lunargd556cc32019-09-17 01:21:23 -06003188 }
3189 }
3190}
3191
3192void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
3193 const VkAllocationCallbacks *pAllocator) {
3194 if (!framebuffer) return;
3195 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
3196 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
3197 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003198 framebuffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003199 frameBufferMap.erase(framebuffer);
3200}
3201
3202void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
3203 const VkAllocationCallbacks *pAllocator) {
3204 if (!renderPass) return;
3205 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
3206 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
3207 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003208 rp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003209 renderPassMap.erase(renderPass);
3210}
3211
3212void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
3213 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
3214 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003215 auto fence_state = std::make_shared<FENCE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003216 fence_state->fence = *pFence;
3217 fence_state->createInfo = *pCreateInfo;
3218 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
3219 fenceMap[*pFence] = std::move(fence_state);
3220}
3221
3222bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
3223 const VkGraphicsPipelineCreateInfo *pCreateInfos,
3224 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003225 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003226 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
3227 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
3228 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
3229 cgpl_state->pipe_state.reserve(count);
3230 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003231 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05003232 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003233 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003234 }
3235 return false;
3236}
3237
3238void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
3239 const VkGraphicsPipelineCreateInfo *pCreateInfos,
3240 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
3241 VkResult result, void *cgpl_state_data) {
3242 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
3243 // This API may create pipelines regardless of the return value
3244 for (uint32_t i = 0; i < count; i++) {
3245 if (pPipelines[i] != VK_NULL_HANDLE) {
3246 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3247 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
3248 }
3249 }
3250 cgpl_state->pipe_state.clear();
3251}
3252
3253bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
3254 const VkComputePipelineCreateInfo *pCreateInfos,
3255 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003256 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003257 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
3258 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
3259 ccpl_state->pipe_state.reserve(count);
3260 for (uint32_t i = 0; i < count; i++) {
3261 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003262 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06003263 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003264 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003265 }
3266 return false;
3267}
3268
3269void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
3270 const VkComputePipelineCreateInfo *pCreateInfos,
3271 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
3272 VkResult result, void *ccpl_state_data) {
3273 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
3274
3275 // This API may create pipelines regardless of the return value
3276 for (uint32_t i = 0; i < count; i++) {
3277 if (pPipelines[i] != VK_NULL_HANDLE) {
3278 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3279 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
3280 }
3281 }
3282 ccpl_state->pipe_state.clear();
3283}
3284
3285bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
3286 uint32_t count,
3287 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
3288 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003289 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003290 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
3291 crtpl_state->pipe_state.reserve(count);
3292 for (uint32_t i = 0; i < count; i++) {
3293 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003294 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05003295 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003296 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003297 }
3298 return false;
3299}
3300
3301void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
3302 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
3303 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
3304 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
3305 // This API may create pipelines regardless of the return value
3306 for (uint32_t i = 0; i < count; i++) {
3307 if (pPipelines[i] != VK_NULL_HANDLE) {
3308 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3309 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
3310 }
3311 }
3312 crtpl_state->pipe_state.clear();
3313}
3314
sourav parmarcd5fb182020-07-17 12:58:44 -07003315bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
3316 VkPipelineCache pipelineCache, uint32_t count,
Jeff Bolz443c2ca2020-03-19 12:11:51 -05003317 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
3318 const VkAllocationCallbacks *pAllocator,
3319 VkPipeline *pPipelines, void *crtpl_state_data) const {
3320 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
3321 crtpl_state->pipe_state.reserve(count);
3322 for (uint32_t i = 0; i < count; i++) {
3323 // Create and initialize internal tracking data structure
3324 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
3325 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
3326 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
3327 }
3328 return false;
3329}
3330
sourav parmarcd5fb182020-07-17 12:58:44 -07003331void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
3332 VkPipelineCache pipelineCache, uint32_t count,
3333 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
3334 const VkAllocationCallbacks *pAllocator,
3335 VkPipeline *pPipelines, VkResult result,
3336 void *crtpl_state_data) {
Jeff Bolz443c2ca2020-03-19 12:11:51 -05003337 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
3338 // This API may create pipelines regardless of the return value
3339 for (uint32_t i = 0; i < count; i++) {
3340 if (pPipelines[i] != VK_NULL_HANDLE) {
3341 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3342 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
3343 }
3344 }
3345 crtpl_state->pipe_state.clear();
3346}
3347
locke-lunargd556cc32019-09-17 01:21:23 -06003348void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
3349 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
3350 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003351 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003352 if (pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
3353 pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
Tony-LunarG7337b312020-04-15 16:40:25 -06003354 custom_border_color_sampler_count++;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003355 }
locke-lunargd556cc32019-09-17 01:21:23 -06003356}
3357
3358void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
3359 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
3360 const VkAllocationCallbacks *pAllocator,
3361 VkDescriptorSetLayout *pSetLayout, VkResult result) {
3362 if (VK_SUCCESS != result) return;
3363 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
3364}
3365
3366// For repeatable sorting, not very useful for "memory in range" search
3367struct PushConstantRangeCompare {
3368 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
3369 if (lhs->offset == rhs->offset) {
3370 if (lhs->size == rhs->size) {
3371 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
3372 return lhs->stageFlags < rhs->stageFlags;
3373 }
3374 // If the offsets are the same then sorting by the end of range is useful for validation
3375 return lhs->size < rhs->size;
3376 }
3377 return lhs->offset < rhs->offset;
3378 }
3379};
3380
3381static PushConstantRangesDict push_constant_ranges_dict;
3382
3383PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
3384 if (!info->pPushConstantRanges) {
3385 // Hand back the empty entry (creating as needed)...
3386 return push_constant_ranges_dict.look_up(PushConstantRanges());
3387 }
3388
3389 // Sort the input ranges to ensure equivalent ranges map to the same id
3390 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
3391 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
3392 sorted.insert(info->pPushConstantRanges + i);
3393 }
3394
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07003395 PushConstantRanges ranges;
3396 ranges.reserve(sorted.size());
locke-lunargd556cc32019-09-17 01:21:23 -06003397 for (const auto range : sorted) {
3398 ranges.emplace_back(*range);
3399 }
3400 return push_constant_ranges_dict.look_up(std::move(ranges));
3401}
3402
3403// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
3404static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
3405
3406// Dictionary of canonical form of the "compatible for set" records
3407static PipelineLayoutCompatDict pipeline_layout_compat_dict;
3408
3409static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
3410 const PipelineLayoutSetLayoutsId set_layouts_id) {
3411 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
3412}
3413
3414void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
3415 const VkAllocationCallbacks *pAllocator,
3416 VkPipelineLayout *pPipelineLayout, VkResult result) {
3417 if (VK_SUCCESS != result) return;
3418
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003419 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003420 pipeline_layout_state->layout = *pPipelineLayout;
3421 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
3422 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
3423 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05003424 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003425 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
3426 }
3427
3428 // Get canonical form IDs for the "compatible for set" contents
3429 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
3430 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
3431 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
3432
3433 // Create table of "compatible for set N" cannonical forms for trivial accept validation
3434 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
3435 pipeline_layout_state->compat_for_set.emplace_back(
3436 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
3437 }
3438 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
3439}
3440
3441void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
3442 const VkAllocationCallbacks *pAllocator,
3443 VkDescriptorPool *pDescriptorPool, VkResult result) {
3444 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003445 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003446}
3447
3448void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
3449 VkDescriptorPoolResetFlags flags, VkResult result) {
3450 if (VK_SUCCESS != result) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003451 DESCRIPTOR_POOL_STATE *pool = GetDescriptorPoolState(descriptorPool);
locke-lunargd556cc32019-09-17 01:21:23 -06003452 // TODO: validate flags
3453 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003454 for (auto ds : pool->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06003455 FreeDescriptorSet(ds);
3456 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003457 pool->sets.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06003458 // Reset available count for each type and available sets for this pool
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003459 for (auto it = pool->availableDescriptorTypeCount.begin(); it != pool->availableDescriptorTypeCount.end(); ++it) {
3460 pool->availableDescriptorTypeCount[it->first] = pool->maxDescriptorTypeCount[it->first];
locke-lunargd556cc32019-09-17 01:21:23 -06003461 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003462 pool->availableSets = pool->maxSets;
locke-lunargd556cc32019-09-17 01:21:23 -06003463}
3464
3465bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
3466 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003467 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003468 // Always update common data
3469 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
3470 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
3471 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
3472
3473 return false;
3474}
3475
3476// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
3477void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
3478 VkDescriptorSet *pDescriptorSets, VkResult result,
3479 void *ads_state_data) {
3480 if (VK_SUCCESS != result) return;
3481 // All the updates are contained in a single cvdescriptorset function
3482 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
3483 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
3484 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
3485}
3486
3487void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
3488 const VkDescriptorSet *pDescriptorSets) {
3489 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
3490 // Update available descriptor sets in pool
3491 pool_state->availableSets += count;
3492
3493 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
3494 for (uint32_t i = 0; i < count; ++i) {
3495 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
3496 auto descriptor_set = setMap[pDescriptorSets[i]].get();
3497 uint32_t type_index = 0, descriptor_count = 0;
3498 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
3499 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
3500 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
3501 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
3502 }
3503 FreeDescriptorSet(descriptor_set);
3504 pool_state->sets.erase(descriptor_set);
3505 }
3506 }
3507}
3508
3509void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
3510 const VkWriteDescriptorSet *pDescriptorWrites,
3511 uint32_t descriptorCopyCount,
3512 const VkCopyDescriptorSet *pDescriptorCopies) {
3513 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
3514 pDescriptorCopies);
3515}
3516
3517void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
3518 VkCommandBuffer *pCommandBuffer, VkResult result) {
3519 if (VK_SUCCESS != result) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003520 auto pool = GetCommandPoolShared(pCreateInfo->commandPool);
3521 if (pool) {
locke-lunargd556cc32019-09-17 01:21:23 -06003522 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
3523 // Add command buffer to its commandPool map
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003524 pool->commandBuffers.insert(pCommandBuffer[i]);
3525 auto cb_state = std::make_shared<CMD_BUFFER_STATE>();
3526 cb_state->createInfo = *pCreateInfo;
3527 cb_state->command_pool = pool;
3528 cb_state->unprotected = pool->unprotected;
locke-lunargd556cc32019-09-17 01:21:23 -06003529 // Add command buffer to map
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003530 commandBufferMap[pCommandBuffer[i]] = std::move(cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003531 ResetCommandBufferState(pCommandBuffer[i]);
3532 }
3533 }
3534}
3535
3536// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
3537void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003538 AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003539 cb_state);
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -07003540 // If imageless fb, skip fb binding
Mike Schuchardt2df08912020-12-15 16:28:09 -08003541 if (!fb_state || fb_state->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003542 const uint32_t attachment_count = fb_state->createInfo.attachmentCount;
3543 for (uint32_t attachment = 0; attachment < attachment_count; ++attachment) {
locke-lunargfc78e932020-11-19 17:06:24 -07003544 auto view_state = GetActiveAttachmentImageViewState(cb_state, attachment);
locke-lunargd556cc32019-09-17 01:21:23 -06003545 if (view_state) {
3546 AddCommandBufferBindingImageView(cb_state, view_state);
3547 }
3548 }
3549}
3550
locke-lunargfc78e932020-11-19 17:06:24 -07003551void UpdateSubpassAttachments(const safe_VkSubpassDescription2 &subpass, std::vector<SUBPASS_INFO> &subpasses) {
3552 for (uint32_t index = 0; index < subpass.inputAttachmentCount; ++index) {
3553 const uint32_t attachment_index = subpass.pInputAttachments[index].attachment;
3554 if (attachment_index != VK_ATTACHMENT_UNUSED) {
3555 subpasses[attachment_index].used = true;
3556 subpasses[attachment_index].usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
3557 subpasses[attachment_index].layout = subpass.pInputAttachments[index].layout;
3558 }
3559 }
3560
3561 for (uint32_t index = 0; index < subpass.colorAttachmentCount; ++index) {
3562 const uint32_t attachment_index = subpass.pColorAttachments[index].attachment;
3563 if (attachment_index != VK_ATTACHMENT_UNUSED) {
3564 subpasses[attachment_index].used = true;
3565 subpasses[attachment_index].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
3566 subpasses[attachment_index].layout = subpass.pColorAttachments[index].layout;
3567 }
3568 if (subpass.pResolveAttachments) {
3569 const uint32_t attachment_index2 = subpass.pResolveAttachments[index].attachment;
3570 if (attachment_index2 != VK_ATTACHMENT_UNUSED) {
3571 subpasses[attachment_index2].used = true;
3572 subpasses[attachment_index2].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
3573 subpasses[attachment_index2].layout = subpass.pResolveAttachments[index].layout;
3574 }
3575 }
3576 }
3577
3578 if (subpass.pDepthStencilAttachment) {
3579 const uint32_t attachment_index = subpass.pDepthStencilAttachment->attachment;
3580 if (attachment_index != VK_ATTACHMENT_UNUSED) {
3581 subpasses[attachment_index].used = true;
3582 subpasses[attachment_index].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
3583 subpasses[attachment_index].layout = subpass.pDepthStencilAttachment->layout;
3584 }
3585 }
3586}
3587
3588void UpdateAttachmentsView(ValidationStateTracker &tracker, CMD_BUFFER_STATE &cb_state, const FRAMEBUFFER_STATE &framebuffer,
3589 const VkRenderPassBeginInfo *pRenderPassBegin) {
3590 auto &attachments = *(cb_state.active_attachments.get());
3591 const bool imageless = (framebuffer.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) ? true : false;
3592 const VkRenderPassAttachmentBeginInfo *attachment_info_struct = nullptr;
3593 if (pRenderPassBegin) attachment_info_struct = lvl_find_in_chain<VkRenderPassAttachmentBeginInfo>(pRenderPassBegin->pNext);
3594
3595 for (uint32_t i = 0; i < attachments.size(); ++i) {
3596 if (imageless) {
3597 if (attachment_info_struct && i < attachment_info_struct->attachmentCount) {
3598 auto res = cb_state.attachments_view_states.insert(
3599 tracker.GetShared<IMAGE_VIEW_STATE>(attachment_info_struct->pAttachments[i]));
3600 attachments[i] = res.first->get();
3601 }
3602 } else {
3603 auto res = cb_state.attachments_view_states.insert(framebuffer.attachments_view_state[i]);
3604 attachments[i] = res.first->get();
3605 }
3606 }
3607}
3608
locke-lunargd556cc32019-09-17 01:21:23 -06003609void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
3610 const VkCommandBufferBeginInfo *pBeginInfo) {
3611 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3612 if (!cb_state) return;
locke-lunargfc78e932020-11-19 17:06:24 -07003613
locke-lunargd556cc32019-09-17 01:21:23 -06003614 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
3615 ResetCommandBufferState(commandBuffer);
3616 }
3617 // Set updated state here in case implicit reset occurs above
3618 cb_state->state = CB_RECORDING;
3619 cb_state->beginInfo = *pBeginInfo;
Tony-LunarG3c287f62020-12-17 12:39:49 -07003620 if (cb_state->beginInfo.pInheritanceInfo && (cb_state->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY)) {
locke-lunargd556cc32019-09-17 01:21:23 -06003621 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
3622 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
3623 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
3624 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
3625 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
locke-lunargaecf2152020-05-12 17:15:41 -06003626 cb_state->activeRenderPass = GetShared<RENDER_PASS_STATE>(cb_state->beginInfo.pInheritanceInfo->renderPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003627 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
locke-lunargfc78e932020-11-19 17:06:24 -07003628
locke-lunargaecf2152020-05-12 17:15:41 -06003629 if (cb_state->beginInfo.pInheritanceInfo->framebuffer) {
3630 cb_state->activeFramebuffer = GetShared<FRAMEBUFFER_STATE>(cb_state->beginInfo.pInheritanceInfo->framebuffer);
locke-lunargfc78e932020-11-19 17:06:24 -07003631 cb_state->active_subpasses = nullptr;
3632 cb_state->active_attachments = nullptr;
3633
3634 if (cb_state->activeFramebuffer) {
3635 cb_state->framebuffers.insert(cb_state->activeFramebuffer);
3636
3637 // Set cb_state->active_subpasses
3638 cb_state->active_subpasses =
3639 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
3640 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
3641 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
3642
3643 // Set cb_state->active_attachments & cb_state->attachments_view_states
3644 cb_state->active_attachments =
3645 std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
3646 UpdateAttachmentsView(*this, *cb_state, *cb_state->activeFramebuffer, nullptr);
3647
3648 // Connect this framebuffer and its children to this cmdBuffer
3649 AddFramebufferBinding(cb_state, cb_state->activeFramebuffer.get());
3650 }
locke-lunargaecf2152020-05-12 17:15:41 -06003651 }
locke-lunargd556cc32019-09-17 01:21:23 -06003652 }
3653 }
3654
3655 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
3656 if (chained_device_group_struct) {
3657 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
3658 } else {
3659 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
3660 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003661
3662 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06003663}
3664
3665void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
3666 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3667 if (!cb_state) return;
3668 // Cached validation is specific to a specific recording of a specific command buffer.
3669 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
3670 descriptor_set->ClearCachedValidation(cb_state);
3671 }
3672 cb_state->validated_descriptor_sets.clear();
3673 if (VK_SUCCESS == result) {
3674 cb_state->state = CB_RECORDED;
3675 }
3676}
3677
3678void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
3679 VkResult result) {
3680 if (VK_SUCCESS == result) {
3681 ResetCommandBufferState(commandBuffer);
3682 }
3683}
3684
3685CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
3686 // initially assume everything is static state
3687 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
3688
3689 if (ds) {
3690 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
locke-lunarg4189aa22020-10-21 00:23:48 -06003691 flags &= ~ConvertToCBStatusFlagBits(ds->pDynamicStates[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003692 }
3693 }
locke-lunargd556cc32019-09-17 01:21:23 -06003694 return flags;
3695}
3696
3697// Validation cache:
3698// CV is the bottommost implementor of this extension. Don't pass calls down.
3699// utility function to set collective state for pipeline
3700void SetPipelineState(PIPELINE_STATE *pPipe) {
3701 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
3702 if (pPipe->graphicsPipelineCI.pColorBlendState) {
3703 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
3704 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
3705 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3706 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3707 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3708 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3709 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3710 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3711 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3712 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
3713 pPipe->blendConstantsEnabled = true;
3714 }
3715 }
3716 }
3717 }
sfricke-samsung8f658d42020-05-03 20:12:24 -07003718 // Check if sample location is enabled
3719 if (pPipe->graphicsPipelineCI.pMultisampleState) {
3720 const VkPipelineSampleLocationsStateCreateInfoEXT *sample_location_state =
3721 lvl_find_in_chain<VkPipelineSampleLocationsStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pMultisampleState->pNext);
3722 if (sample_location_state != nullptr) {
3723 pPipe->sample_location_enabled = sample_location_state->sampleLocationsEnable;
3724 }
3725 }
locke-lunargd556cc32019-09-17 01:21:23 -06003726}
3727
locke-lunargb8be8222020-10-20 00:34:37 -06003728void UpdateSamplerDescriptorsUsedByImage(LAST_BOUND_STATE &last_bound_state) {
3729 if (!last_bound_state.pipeline_state) return;
3730 if (last_bound_state.per_set.empty()) return;
3731
3732 for (auto &slot : last_bound_state.pipeline_state->active_slots) {
3733 for (auto &req : slot.second) {
3734 for (auto &samplers : req.second.samplers_used_by_image) {
3735 for (auto &sampler : samplers) {
3736 if (sampler.first.sampler_slot.first < last_bound_state.per_set.size() &&
3737 last_bound_state.per_set[sampler.first.sampler_slot.first].bound_descriptor_set) {
3738 sampler.second = last_bound_state.per_set[sampler.first.sampler_slot.first]
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003739 .bound_descriptor_set->GetDescriptorFromBinding(sampler.first.sampler_slot.second,
3740 sampler.first.sampler_index);
locke-lunargb8be8222020-10-20 00:34:37 -06003741 }
3742 }
3743 }
3744 }
3745 }
3746}
3747
locke-lunargd556cc32019-09-17 01:21:23 -06003748void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
3749 VkPipeline pipeline) {
3750 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3751 assert(cb_state);
3752
3753 auto pipe_state = GetPipelineState(pipeline);
3754 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
3755 cb_state->status &= ~cb_state->static_status;
3756 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
3757 cb_state->status |= cb_state->static_status;
locke-lunarg4189aa22020-10-21 00:23:48 -06003758 cb_state->dynamic_status = CBSTATUS_ALL_STATE_SET & (~cb_state->static_status);
locke-lunargd556cc32019-09-17 01:21:23 -06003759 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003760 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout->layout);
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003761 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
3762 cb_state->lastBound[lv_bind_point].pipeline_state = pipe_state;
locke-lunargd556cc32019-09-17 01:21:23 -06003763 SetPipelineState(pipe_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003764 AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
locke-lunargb8be8222020-10-20 00:34:37 -06003765
3766 for (auto &slot : pipe_state->active_slots) {
3767 for (auto &req : slot.second) {
3768 for (auto &sampler : req.second.samplers_used_by_image) {
3769 for (auto &des : sampler) {
3770 des.second = nullptr;
3771 }
3772 }
3773 }
3774 }
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003775 UpdateSamplerDescriptorsUsedByImage(cb_state->lastBound[lv_bind_point]);
locke-lunargd556cc32019-09-17 01:21:23 -06003776}
3777
3778void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3779 uint32_t viewportCount, const VkViewport *pViewports) {
3780 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3781 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
3782 cb_state->status |= CBSTATUS_VIEWPORT_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003783 cb_state->static_status &= ~CBSTATUS_VIEWPORT_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003784}
3785
3786void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
3787 uint32_t exclusiveScissorCount,
3788 const VkRect2D *pExclusiveScissors) {
3789 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3790 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
3791 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
3792 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003793 cb_state->static_status &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003794}
3795
3796void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3797 VkImageLayout imageLayout) {
3798 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3799
3800 if (imageView != VK_NULL_HANDLE) {
3801 auto view_state = GetImageViewState(imageView);
3802 AddCommandBufferBindingImageView(cb_state, view_state);
3803 }
3804}
3805
3806void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3807 uint32_t viewportCount,
3808 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3809 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3810 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3811 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3812 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003813 cb_state->static_status &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003814}
3815
3816void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3817 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3818 const VkAllocationCallbacks *pAllocator,
3819 VkAccelerationStructureNV *pAccelerationStructure,
3820 VkResult result) {
3821 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003822 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003823
3824 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3825 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
3826 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3827 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
3828 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3829 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3830
3831 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
3832 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3833 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
3834 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3835 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3836 &as_state->build_scratch_memory_requirements);
3837
3838 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
3839 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3840 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
3841 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3842 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3843 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003844 as_state->allocator = pAllocator;
locke-lunargd556cc32019-09-17 01:21:23 -06003845 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3846}
3847
Jeff Bolz95176d02020-04-01 00:36:16 -05003848void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3849 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3850 const VkAllocationCallbacks *pAllocator,
3851 VkAccelerationStructureKHR *pAccelerationStructure,
3852 VkResult result) {
3853 if (VK_SUCCESS != result) return;
sourav parmarcd5fb182020-07-17 12:58:44 -07003854 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE_KHR>(*pAccelerationStructure, pCreateInfo);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003855 as_state->allocator = pAllocator;
sourav parmarcd5fb182020-07-17 12:58:44 -07003856 accelerationStructureMap_khr[*pAccelerationStructure] = std::move(as_state);
Jeff Bolz95176d02020-04-01 00:36:16 -05003857}
3858
sourav parmarcd5fb182020-07-17 12:58:44 -07003859void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructuresKHR(
3860 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
3861 const VkAccelerationStructureBuildRangeInfoKHR *const *ppBuildRangeInfos) {
3862 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3863 if (cb_state == nullptr) {
3864 return;
3865 }
3866 for (uint32_t i = 0; i < infoCount; ++i) {
3867 ACCELERATION_STRUCTURE_STATE_KHR *dst_as_state = GetAccelerationStructureStateKHR(pInfos[i].dstAccelerationStructure);
3868 ACCELERATION_STRUCTURE_STATE_KHR *src_as_state = GetAccelerationStructureStateKHR(pInfos[i].srcAccelerationStructure);
3869 if (dst_as_state != nullptr) {
3870 dst_as_state->built = true;
3871 dst_as_state->build_info_khr.initialize(&pInfos[i]);
3872 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3873 }
3874 if (src_as_state != nullptr) {
3875 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3876 }
3877 }
3878 cb_state->hasBuildAccelerationStructureCmd = true;
3879}
3880
3881void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructuresIndirectKHR(
3882 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
3883 const VkDeviceAddress *pIndirectDeviceAddresses, const uint32_t *pIndirectStrides,
3884 const uint32_t *const *ppMaxPrimitiveCounts) {
3885 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3886 if (cb_state == nullptr) {
3887 return;
3888 }
3889 for (uint32_t i = 0; i < infoCount; ++i) {
3890 ACCELERATION_STRUCTURE_STATE_KHR *dst_as_state = GetAccelerationStructureStateKHR(pInfos[i].dstAccelerationStructure);
3891 ACCELERATION_STRUCTURE_STATE_KHR *src_as_state = GetAccelerationStructureStateKHR(pInfos[i].srcAccelerationStructure);
3892 if (dst_as_state != nullptr) {
3893 dst_as_state->built = true;
3894 dst_as_state->build_info_khr.initialize(&pInfos[i]);
3895 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3896 }
3897 if (src_as_state != nullptr) {
3898 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3899 }
3900 }
3901 cb_state->hasBuildAccelerationStructureCmd = true;
3902}
locke-lunargd556cc32019-09-17 01:21:23 -06003903void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
Mike Schuchardt2df08912020-12-15 16:28:09 -08003904 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2 *pMemoryRequirements) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003905 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureStateNV(pInfo->accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003906 if (as_state != nullptr) {
3907 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
3908 as_state->memory_requirements = *pMemoryRequirements;
3909 as_state->memory_requirements_checked = true;
3910 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
3911 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
3912 as_state->build_scratch_memory_requirements_checked = true;
3913 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
3914 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
3915 as_state->update_scratch_memory_requirements_checked = true;
3916 }
3917 }
3918}
3919
sourav parmarcd5fb182020-07-17 12:58:44 -07003920void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
3921 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06003922 if (VK_SUCCESS != result) return;
3923 for (uint32_t i = 0; i < bindInfoCount; i++) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003924 const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06003925
sourav parmarcd5fb182020-07-17 12:58:44 -07003926 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureStateNV(info.accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003927 if (as_state) {
3928 // Track bound memory range information
3929 auto mem_info = GetDevMemState(info.memory);
3930 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07003931 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06003932 }
3933 // Track objects tied to memory
3934 SetMemBinding(info.memory, as_state, info.memoryOffset,
sourav parmarcd5fb182020-07-17 12:58:44 -07003935 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureNV));
locke-lunargd556cc32019-09-17 01:21:23 -06003936
3937 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05003938 // XXX TODO: Query device address for KHR extension
sourav parmarcd5fb182020-07-17 12:58:44 -07003939 if (enabled[gpu_validation]) {
locke-lunargd556cc32019-09-17 01:21:23 -06003940 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
3941 }
3942 }
3943 }
3944}
3945
3946void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
3947 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
3948 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
3949 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3950 if (cb_state == nullptr) {
3951 return;
3952 }
3953
sourav parmarcd5fb182020-07-17 12:58:44 -07003954 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureStateNV(dst);
3955 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureStateNV(src);
locke-lunargd556cc32019-09-17 01:21:23 -06003956 if (dst_as_state != nullptr) {
3957 dst_as_state->built = true;
3958 dst_as_state->build_info.initialize(pInfo);
3959 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3960 }
3961 if (src_as_state != nullptr) {
3962 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3963 }
3964 cb_state->hasBuildAccelerationStructureCmd = true;
3965}
3966
3967void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
3968 VkAccelerationStructureNV dst,
3969 VkAccelerationStructureNV src,
3970 VkCopyAccelerationStructureModeNV mode) {
3971 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3972 if (cb_state) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003973 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureStateNV(src);
3974 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureStateNV(dst);
locke-lunargd556cc32019-09-17 01:21:23 -06003975 if (dst_as_state != nullptr && src_as_state != nullptr) {
3976 dst_as_state->built = true;
3977 dst_as_state->build_info = src_as_state->build_info;
3978 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3979 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3980 }
3981 }
3982}
3983
Jeff Bolz95176d02020-04-01 00:36:16 -05003984void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
3985 VkAccelerationStructureKHR accelerationStructure,
3986 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06003987 if (!accelerationStructure) return;
sourav parmarcd5fb182020-07-17 12:58:44 -07003988 auto *as_state = GetAccelerationStructureStateKHR(accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003989 if (as_state) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003990 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06003991 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
3992 for (auto mem_binding : as_state->GetBoundMemory()) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003993 RemoveMemoryRange(obj_struct, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -06003994 }
3995 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003996 as_state->destroyed = true;
sourav parmarcd5fb182020-07-17 12:58:44 -07003997 accelerationStructureMap_khr.erase(accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003998 }
3999}
4000
Jeff Bolz95176d02020-04-01 00:36:16 -05004001void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
4002 VkAccelerationStructureNV accelerationStructure,
4003 const VkAllocationCallbacks *pAllocator) {
sourav parmarcd5fb182020-07-17 12:58:44 -07004004 if (!accelerationStructure) return;
4005 auto *as_state = GetAccelerationStructureStateNV(accelerationStructure);
4006 if (as_state) {
4007 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
4008 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
4009 for (auto mem_binding : as_state->GetBoundMemory()) {
4010 RemoveMemoryRange(obj_struct, mem_binding);
4011 }
4012 ClearMemoryObjectBindings(obj_struct);
4013 as_state->destroyed = true;
4014 accelerationStructureMap.erase(accelerationStructure);
4015 }
Jeff Bolz95176d02020-04-01 00:36:16 -05004016}
4017
Chris Mayer9ded5eb2019-09-19 16:33:26 +02004018void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
4019 uint32_t viewportCount,
4020 const VkViewportWScalingNV *pViewportWScalings) {
4021 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4022 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004023 cb_state->static_status &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02004024}
4025
locke-lunargd556cc32019-09-17 01:21:23 -06004026void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
4027 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4028 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004029 cb_state->static_status &= ~CBSTATUS_LINE_WIDTH_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004030}
4031
4032void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
4033 uint16_t lineStipplePattern) {
4034 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4035 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004036 cb_state->static_status &= ~CBSTATUS_LINE_STIPPLE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004037}
4038
4039void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
4040 float depthBiasClamp, float depthBiasSlopeFactor) {
4041 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4042 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004043 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004044}
4045
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004046void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
4047 const VkRect2D *pScissors) {
4048 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4049 cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
4050 cb_state->status |= CBSTATUS_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004051 cb_state->static_status &= ~CBSTATUS_SCISSOR_SET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004052}
4053
locke-lunargd556cc32019-09-17 01:21:23 -06004054void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
4055 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4056 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004057 cb_state->static_status &= ~CBSTATUS_BLEND_CONSTANTS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004058}
4059
4060void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
4061 float maxDepthBounds) {
4062 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4063 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004064 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004065}
4066
4067void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
4068 uint32_t compareMask) {
4069 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4070 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004071 cb_state->static_status &= ~CBSTATUS_STENCIL_READ_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004072}
4073
4074void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
4075 uint32_t writeMask) {
4076 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4077 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004078 cb_state->static_status &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004079}
4080
4081void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
4082 uint32_t reference) {
4083 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4084 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004085 cb_state->static_status &= ~CBSTATUS_STENCIL_REFERENCE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004086}
4087
4088// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
4089// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
4090// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
4091void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
4092 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
4093 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
4094 cvdescriptorset::DescriptorSet *push_descriptor_set,
4095 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
4096 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
4097 // Defensive
4098 assert(pipeline_layout);
4099 if (!pipeline_layout) return;
4100
4101 uint32_t required_size = first_set + set_count;
4102 const uint32_t last_binding_index = required_size - 1;
4103 assert(last_binding_index < pipeline_layout->compat_for_set.size());
4104
4105 // Some useful shorthand
locke-lunargb8d7a7a2020-10-25 16:01:52 -06004106 const auto lv_bind_point = ConvertToLvlBindPoint(pipeline_bind_point);
4107 auto &last_bound = cb_state->lastBound[lv_bind_point];
locke-lunargd556cc32019-09-17 01:21:23 -06004108 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
4109 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
4110
4111 // We need this three times in this function, but nowhere else
4112 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
4113 if (ds && ds->IsPushDescriptor()) {
4114 assert(ds == last_bound.push_descriptor_set.get());
4115 last_bound.push_descriptor_set = nullptr;
4116 return true;
4117 }
4118 return false;
4119 };
4120
4121 // Clean up the "disturbed" before and after the range to be set
4122 if (required_size < current_size) {
4123 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
4124 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
4125 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
4126 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
4127 }
4128 } else {
4129 // We're not disturbing past last, so leave the upper binding data alone.
4130 required_size = current_size;
4131 }
4132 }
4133
4134 // We resize if we need more set entries or if those past "last" are disturbed
4135 if (required_size != current_size) {
4136 last_bound.per_set.resize(required_size);
4137 }
4138
4139 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
4140 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
4141 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
4142 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
4143 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
4144 last_bound.per_set[set_idx].dynamicOffsets.clear();
4145 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
4146 }
4147 }
4148
4149 // Now update the bound sets with the input sets
4150 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
4151 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
4152 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
4153 cvdescriptorset::DescriptorSet *descriptor_set =
4154 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
4155
4156 // Record binding (or push)
4157 if (descriptor_set != last_bound.push_descriptor_set.get()) {
4158 // Only cleanup the push descriptors if they aren't the currently used set.
4159 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
4160 }
4161 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
4162 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
4163
4164 if (descriptor_set) {
4165 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
4166 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
4167 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
4168 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
4169 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
4170 input_dynamic_offsets = end_offset;
4171 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
4172 } else {
4173 last_bound.per_set[set_idx].dynamicOffsets.clear();
4174 }
4175 if (!descriptor_set->IsPushDescriptor()) {
4176 // Can't cache validation of push_descriptors
4177 cb_state->validated_descriptor_sets.insert(descriptor_set);
4178 }
4179 }
4180 }
4181}
4182
4183// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
4184void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
4185 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
4186 uint32_t firstSet, uint32_t setCount,
4187 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
4188 const uint32_t *pDynamicOffsets) {
4189 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4190 auto pipeline_layout = GetPipelineLayout(layout);
4191
4192 // Resize binding arrays
4193 uint32_t last_set_index = firstSet + setCount - 1;
locke-lunargb8d7a7a2020-10-25 16:01:52 -06004194 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
4195 if (last_set_index >= cb_state->lastBound[lv_bind_point].per_set.size()) {
4196 cb_state->lastBound[lv_bind_point].per_set.resize(last_set_index + 1);
locke-lunargd556cc32019-09-17 01:21:23 -06004197 }
4198
4199 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
4200 dynamicOffsetCount, pDynamicOffsets);
locke-lunargb8d7a7a2020-10-25 16:01:52 -06004201 cb_state->lastBound[lv_bind_point].pipeline_layout = layout;
locke-lunargd556cc32019-09-17 01:21:23 -06004202 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
locke-lunargb8d7a7a2020-10-25 16:01:52 -06004203 UpdateSamplerDescriptorsUsedByImage(cb_state->lastBound[lv_bind_point]);
locke-lunargd556cc32019-09-17 01:21:23 -06004204}
4205
4206void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
4207 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
4208 const VkWriteDescriptorSet *pDescriptorWrites) {
4209 const auto &pipeline_layout = GetPipelineLayout(layout);
4210 // Short circuit invalid updates
4211 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004212 !pipeline_layout->set_layouts[set]->IsPushDescriptor()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004213 return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004214 }
locke-lunargd556cc32019-09-17 01:21:23 -06004215
4216 // We need a descriptor set to update the bindings with, compatible with the passed layout
4217 const auto dsl = pipeline_layout->set_layouts[set];
locke-lunargb8d7a7a2020-10-25 16:01:52 -06004218 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
4219 auto &last_bound = cb_state->lastBound[lv_bind_point];
locke-lunargd556cc32019-09-17 01:21:23 -06004220 auto &push_descriptor_set = last_bound.push_descriptor_set;
4221 // If we are disturbing the current push_desriptor_set clear it
4222 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
John Zulaufd2c3dae2019-12-12 11:02:17 -07004223 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06004224 }
4225
4226 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
4227 nullptr);
4228 last_bound.pipeline_layout = layout;
4229
4230 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05004231 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06004232}
4233
4234void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
4235 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
4236 uint32_t set, uint32_t descriptorWriteCount,
4237 const VkWriteDescriptorSet *pDescriptorWrites) {
4238 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4239 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
4240}
4241
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06004242void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
4243 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
4244 const void *pValues) {
4245 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4246 if (cb_state != nullptr) {
4247 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
4248
4249 auto &push_constant_data = cb_state->push_constant_data;
4250 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
4251 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
locke-lunargde3f0fa2020-09-10 11:55:31 -06004252 cb_state->push_constant_pipeline_layout_set = layout;
4253
4254 auto flags = stageFlags;
4255 uint32_t bit_shift = 0;
4256 while (flags) {
4257 if (flags & 1) {
4258 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
4259 const auto it = cb_state->push_constant_data_update.find(flag);
4260
4261 if (it != cb_state->push_constant_data_update.end()) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06004262 std::memset(it->second.data() + offset, PC_Byte_Updated, static_cast<std::size_t>(size));
locke-lunargde3f0fa2020-09-10 11:55:31 -06004263 }
4264 }
4265 flags = flags >> 1;
4266 ++bit_shift;
4267 }
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06004268 }
4269}
4270
locke-lunargd556cc32019-09-17 01:21:23 -06004271void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4272 VkIndexType indexType) {
locke-lunargd556cc32019-09-17 01:21:23 -06004273 auto cb_state = GetCBState(commandBuffer);
4274
4275 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
Piers Daniell39842ee2020-07-10 16:42:33 -06004276 cb_state->static_status &= ~CBSTATUS_INDEX_BUFFER_BOUND;
locke-lunarg1ae57d62020-11-18 10:49:19 -07004277 cb_state->index_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(buffer);
4278 cb_state->index_buffer_binding.size = cb_state->index_buffer_binding.buffer_state->createInfo.size;
locke-lunargd556cc32019-09-17 01:21:23 -06004279 cb_state->index_buffer_binding.offset = offset;
4280 cb_state->index_buffer_binding.index_type = indexType;
4281 // Add binding for this index buffer to this commandbuffer
locke-lunarg1ae57d62020-11-18 10:49:19 -07004282 AddCommandBufferBindingBuffer(cb_state, cb_state->index_buffer_binding.buffer_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06004283}
4284
4285void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
4286 uint32_t bindingCount, const VkBuffer *pBuffers,
4287 const VkDeviceSize *pOffsets) {
4288 auto cb_state = GetCBState(commandBuffer);
4289
4290 uint32_t end = firstBinding + bindingCount;
4291 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
4292 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
4293 }
4294
4295 for (uint32_t i = 0; i < bindingCount; ++i) {
4296 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07004297 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06004298 vertex_buffer_binding.offset = pOffsets[i];
Piers Daniell39842ee2020-07-10 16:42:33 -06004299 vertex_buffer_binding.size = VK_WHOLE_SIZE;
4300 vertex_buffer_binding.stride = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06004301 // Add binding for this vertex buffer to this commandbuffer
Jeff Bolz165818a2020-05-08 11:19:03 -05004302 if (pBuffers[i]) {
locke-lunarg1ae57d62020-11-18 10:49:19 -07004303 AddCommandBufferBindingBuffer(cb_state, vertex_buffer_binding.buffer_state.get());
Jeff Bolz165818a2020-05-08 11:19:03 -05004304 }
locke-lunargd556cc32019-09-17 01:21:23 -06004305 }
4306}
4307
4308void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
4309 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
4310 auto cb_state = GetCBState(commandBuffer);
4311 auto dst_buffer_state = GetBufferState(dstBuffer);
4312
4313 // Update bindings between buffer and cmd buffer
4314 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
4315}
4316
Jeff Bolz310775c2019-10-09 00:46:33 -05004317bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags stageMask,
4318 EventToStageMap *localEventToStageMap) {
4319 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06004320 return false;
4321}
4322
4323void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
4324 VkPipelineStageFlags stageMask) {
4325 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4326 auto event_state = GetEventState(event);
4327 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05004328 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004329 }
4330 cb_state->events.push_back(event);
4331 if (!cb_state->waitedEvents.count(event)) {
4332 cb_state->writeEventsBeforeWait.push_back(event);
4333 }
Jeff Bolz310775c2019-10-09 00:46:33 -05004334 cb_state->eventUpdates.emplace_back(
4335 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
4336 return SetEventStageMask(event, stageMask, localEventToStageMap);
4337 });
locke-lunargd556cc32019-09-17 01:21:23 -06004338}
4339
4340void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
4341 VkPipelineStageFlags stageMask) {
4342 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4343 auto event_state = GetEventState(event);
4344 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05004345 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004346 }
4347 cb_state->events.push_back(event);
4348 if (!cb_state->waitedEvents.count(event)) {
4349 cb_state->writeEventsBeforeWait.push_back(event);
4350 }
4351
4352 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05004353 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
4354 return SetEventStageMask(event, VkPipelineStageFlags(0), localEventToStageMap);
4355 });
locke-lunargd556cc32019-09-17 01:21:23 -06004356}
4357
4358void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
4359 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
4360 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
4361 uint32_t bufferMemoryBarrierCount,
4362 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
4363 uint32_t imageMemoryBarrierCount,
4364 const VkImageMemoryBarrier *pImageMemoryBarriers) {
4365 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4366 for (uint32_t i = 0; i < eventCount; ++i) {
4367 auto event_state = GetEventState(pEvents[i]);
4368 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05004369 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
4370 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004371 }
4372 cb_state->waitedEvents.insert(pEvents[i]);
4373 cb_state->events.push_back(pEvents[i]);
4374 }
4375}
4376
Jeff Bolz310775c2019-10-09 00:46:33 -05004377bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
4378 (*localQueryToStateMap)[object] = value;
4379 return false;
4380}
4381
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004382bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
4383 QueryState value, QueryMap *localQueryToStateMap) {
Jeff Bolz310775c2019-10-09 00:46:33 -05004384 for (uint32_t i = 0; i < queryCount; i++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004385 QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
Jeff Bolz310775c2019-10-09 00:46:33 -05004386 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06004387 }
4388 return false;
4389}
4390
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004391QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex,
4392 uint32_t perfPass) const {
4393 QueryObject query = QueryObject(QueryObject(queryPool, queryIndex), perfPass);
locke-lunargd556cc32019-09-17 01:21:23 -06004394
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004395 auto iter = localQueryToStateMap->find(query);
4396 if (iter != localQueryToStateMap->end()) return iter->second;
Jeff Bolz310775c2019-10-09 00:46:33 -05004397
Jeff Bolz310775c2019-10-09 00:46:33 -05004398 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06004399}
4400
4401void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004402 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004403 cb_state->activeQueries.insert(query_obj);
4404 cb_state->startedQueries.insert(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004405 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
4406 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4407 QueryMap *localQueryToStateMap) {
4408 SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
4409 return false;
4410 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004411 auto pool_state = GetQueryPoolState(query_obj.pool);
4412 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
4413 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004414}
4415
4416void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
4417 VkFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004418 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004419 QueryObject query = {queryPool, slot};
4420 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4421 RecordCmdBeginQuery(cb_state, query);
4422}
4423
4424void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004425 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004426 cb_state->activeQueries.erase(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004427 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
4428 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4429 QueryMap *localQueryToStateMap) {
4430 return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
4431 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004432 auto pool_state = GetQueryPoolState(query_obj.pool);
4433 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
4434 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004435}
4436
4437void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004438 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004439 QueryObject query_obj = {queryPool, slot};
4440 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4441 RecordCmdEndQuery(cb_state, query_obj);
4442}
4443
4444void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4445 uint32_t firstQuery, uint32_t queryCount) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004446 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004447 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4448
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02004449 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
4450 QueryObject query = {queryPool, slot};
4451 cb_state->resetQueries.insert(query);
4452 }
4453
Jeff Bolz310775c2019-10-09 00:46:33 -05004454 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004455 bool do_validate, VkQueryPool &firstPerfQueryPool,
4456 uint32_t perfQueryPass,
4457 QueryMap *localQueryToStateMap) {
4458 return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06004459 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004460 auto pool_state = GetQueryPoolState(queryPool);
4461 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004462 cb_state);
4463}
4464
4465void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4466 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
4467 VkDeviceSize dstOffset, VkDeviceSize stride,
4468 VkQueryResultFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004469 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004470 auto cb_state = GetCBState(commandBuffer);
4471 auto dst_buff_state = GetBufferState(dstBuffer);
4472 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05004473 auto pool_state = GetQueryPoolState(queryPool);
4474 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004475 cb_state);
4476}
4477
4478void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
4479 VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004480 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004481 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeff Bolzadbfa852019-10-04 13:53:30 -05004482 auto pool_state = GetQueryPoolState(queryPool);
4483 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004484 cb_state);
4485 QueryObject query = {queryPool, slot};
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004486 cb_state->queryUpdates.emplace_back([query](const ValidationStateTracker *device_data, bool do_validate,
4487 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4488 QueryMap *localQueryToStateMap) {
4489 return SetQueryState(QueryObject(query, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
4490 });
locke-lunargd556cc32019-09-17 01:21:23 -06004491}
4492
4493void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
4494 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
4495 VkResult result) {
4496 if (VK_SUCCESS != result) return;
4497 // Shadow create info and store in map
Jeff Bolz6ae39612019-10-11 20:57:36 -05004498 auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
locke-lunargd556cc32019-09-17 01:21:23 -06004499
Mike Schuchardt2df08912020-12-15 16:28:09 -08004500 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) == 0) {
locke-lunarg1ae57d62020-11-18 10:49:19 -07004501 fb_state->attachments_view_state.resize(pCreateInfo->attachmentCount);
4502
locke-lunargd556cc32019-09-17 01:21:23 -06004503 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
locke-lunarg1ae57d62020-11-18 10:49:19 -07004504 fb_state->attachments_view_state[i] = GetShared<IMAGE_VIEW_STATE>(pCreateInfo->pAttachments[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06004505 }
4506 }
4507 frameBufferMap[*pFramebuffer] = std::move(fb_state);
4508}
4509
Mike Schuchardt2df08912020-12-15 16:28:09 -08004510void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2 *pCreateInfo,
locke-lunargd556cc32019-09-17 01:21:23 -06004511 RENDER_PASS_STATE *render_pass) {
4512 auto &subpass_to_node = render_pass->subpassToNode;
4513 subpass_to_node.resize(pCreateInfo->subpassCount);
4514 auto &self_dependencies = render_pass->self_dependencies;
4515 self_dependencies.resize(pCreateInfo->subpassCount);
John Zulauf4aff5d92020-02-21 08:29:35 -07004516 auto &subpass_dependencies = render_pass->subpass_dependencies;
4517 subpass_dependencies.resize(pCreateInfo->subpassCount);
locke-lunargd556cc32019-09-17 01:21:23 -06004518
4519 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
4520 subpass_to_node[i].pass = i;
4521 self_dependencies[i].clear();
John Zulauf4aff5d92020-02-21 08:29:35 -07004522 subpass_dependencies[i].pass = i;
locke-lunargd556cc32019-09-17 01:21:23 -06004523 }
4524 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004525 const auto &dependency = pCreateInfo->pDependencies[i];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004526 const auto src_subpass = dependency.srcSubpass;
4527 const auto dst_subpass = dependency.dstSubpass;
locke-lunargd556cc32019-09-17 01:21:23 -06004528 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
4529 if (dependency.srcSubpass == dependency.dstSubpass) {
4530 self_dependencies[dependency.srcSubpass].push_back(i);
4531 } else {
4532 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
4533 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
4534 }
4535 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004536 if (src_subpass == VK_SUBPASS_EXTERNAL) {
4537 assert(dst_subpass != VK_SUBPASS_EXTERNAL); // this is invalid per VUID-VkSubpassDependency-srcSubpass-00865
4538 subpass_dependencies[dst_subpass].barrier_from_external.emplace_back(&dependency);
4539 } else if (dst_subpass == VK_SUBPASS_EXTERNAL) {
4540 subpass_dependencies[src_subpass].barrier_to_external.emplace_back(&dependency);
John Zulauf4aff5d92020-02-21 08:29:35 -07004541 } else if (dependency.srcSubpass != dependency.dstSubpass) {
4542 // ignore self dependencies in prev and next
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004543 subpass_dependencies[src_subpass].next[&subpass_dependencies[dst_subpass]].emplace_back(&dependency);
4544 subpass_dependencies[dst_subpass].prev[&subpass_dependencies[src_subpass]].emplace_back(&dependency);
John Zulauf4aff5d92020-02-21 08:29:35 -07004545 }
4546 }
4547
4548 //
4549 // Determine "asynchrononous" subpassess
4550 // syncronization is only interested in asyncronous stages *earlier* that the current one... so we'll only look towards those.
4551 // NOTE: This is O(N^3), which we could shrink to O(N^2logN) using sets instead of arrays, but given that N is likely to be
4552 // small and the K for |= from the prev is must less than for set, we'll accept the brute force.
4553 std::vector<std::vector<bool>> pass_depends(pCreateInfo->subpassCount);
4554 for (uint32_t i = 1; i < pCreateInfo->subpassCount; ++i) {
4555 auto &depends = pass_depends[i];
4556 depends.resize(i);
4557 auto &subpass_dep = subpass_dependencies[i];
4558 for (const auto &prev : subpass_dep.prev) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004559 const auto prev_pass = prev.first->pass;
John Zulauf4aff5d92020-02-21 08:29:35 -07004560 const auto &prev_depends = pass_depends[prev_pass];
4561 for (uint32_t j = 0; j < prev_pass; j++) {
4562 depends[j] = depends[j] | prev_depends[j];
4563 }
4564 depends[prev_pass] = true;
4565 }
4566 for (uint32_t pass = 0; pass < subpass_dep.pass; pass++) {
4567 if (!depends[pass]) {
4568 subpass_dep.async.push_back(pass);
4569 }
4570 }
locke-lunargd556cc32019-09-17 01:21:23 -06004571 }
4572}
4573
John Zulauf4aff5d92020-02-21 08:29:35 -07004574static VkSubpassDependency2 ImplicitDependencyFromExternal(uint32_t subpass) {
4575 VkSubpassDependency2 from_external = {VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
4576 nullptr,
4577 VK_SUBPASS_EXTERNAL,
4578 subpass,
4579 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
4580 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
4581 0,
4582 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
4583 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
4584 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
4585 0,
4586 0};
4587 return from_external;
4588}
4589
4590static VkSubpassDependency2 ImplicitDependencyToExternal(uint32_t subpass) {
4591 VkSubpassDependency2 to_external = {VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
4592 nullptr,
4593 subpass,
4594 VK_SUBPASS_EXTERNAL,
4595 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
4596 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
4597 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
4598 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
4599 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
4600 0,
4601 0,
4602 0};
4603 return to_external;
4604}
4605
locke-lunargd556cc32019-09-17 01:21:23 -06004606void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
4607 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
4608 VkRenderPass *pRenderPass) {
4609 render_pass->renderPass = *pRenderPass;
4610 auto create_info = render_pass->createInfo.ptr();
4611
4612 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
4613
John Zulauf8863c332020-03-20 10:34:33 -06004614 struct AttachmentTracker { // This is really only of local interest, but a bit big for a lambda
4615 RENDER_PASS_STATE *const rp;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004616 std::vector<uint32_t> &first;
John Zulauf1507ee42020-05-18 11:33:09 -06004617 std::vector<bool> &first_is_transition;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004618 std::vector<uint32_t> &last;
John Zulauf8863c332020-03-20 10:34:33 -06004619 std::vector<std::vector<RENDER_PASS_STATE::AttachmentTransition>> &subpass_transitions;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004620 std::unordered_map<uint32_t, bool> &first_read;
4621 const uint32_t attachment_count;
John Zulauf8863c332020-03-20 10:34:33 -06004622 std::vector<VkImageLayout> attachment_layout;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004623 std::vector<std::vector<VkImageLayout>> subpass_attachment_layout;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004624 explicit AttachmentTracker(std::shared_ptr<RENDER_PASS_STATE> &render_pass)
John Zulauf8863c332020-03-20 10:34:33 -06004625 : rp(render_pass.get()),
4626 first(rp->attachment_first_subpass),
John Zulauf1507ee42020-05-18 11:33:09 -06004627 first_is_transition(rp->attachment_first_is_transition),
John Zulauf8863c332020-03-20 10:34:33 -06004628 last(rp->attachment_last_subpass),
4629 subpass_transitions(rp->subpass_transitions),
4630 first_read(rp->attachment_first_read),
4631 attachment_count(rp->createInfo.attachmentCount),
John Zulauf2bc1fde2020-04-24 15:09:51 -06004632 attachment_layout(),
4633 subpass_attachment_layout() {
John Zulaufbb9f07f2020-03-19 16:53:06 -06004634 first.resize(attachment_count, VK_SUBPASS_EXTERNAL);
John Zulauf1507ee42020-05-18 11:33:09 -06004635 first_is_transition.resize(attachment_count, false);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004636 last.resize(attachment_count, VK_SUBPASS_EXTERNAL);
John Zulauf8863c332020-03-20 10:34:33 -06004637 subpass_transitions.resize(rp->createInfo.subpassCount + 1); // Add an extra for EndRenderPass
4638 attachment_layout.reserve(attachment_count);
John Zulauf2bc1fde2020-04-24 15:09:51 -06004639 subpass_attachment_layout.resize(rp->createInfo.subpassCount);
4640 for (auto &subpass_layouts : subpass_attachment_layout) {
4641 subpass_layouts.resize(attachment_count, kInvalidLayout);
4642 }
4643
John Zulauf8863c332020-03-20 10:34:33 -06004644 for (uint32_t j = 0; j < attachment_count; j++) {
4645 attachment_layout.push_back(rp->createInfo.pAttachments[j].initialLayout);
4646 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004647 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004648
John Zulaufbb9f07f2020-03-19 16:53:06 -06004649 void Update(uint32_t subpass, const VkAttachmentReference2 *attach_ref, uint32_t count, bool is_read) {
4650 if (nullptr == attach_ref) return;
4651 for (uint32_t j = 0; j < count; ++j) {
4652 const auto attachment = attach_ref[j].attachment;
4653 if (attachment != VK_ATTACHMENT_UNUSED) {
John Zulauf8863c332020-03-20 10:34:33 -06004654 const auto layout = attach_ref[j].layout;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004655 // Take advantage of the fact that insert won't overwrite, so we'll only write the first time.
4656 first_read.insert(std::make_pair(attachment, is_read));
John Zulauf2bc1fde2020-04-24 15:09:51 -06004657 if (first[attachment] == VK_SUBPASS_EXTERNAL) {
4658 first[attachment] = subpass;
4659 const auto initial_layout = rp->createInfo.pAttachments[attachment].initialLayout;
John Zulauf1507ee42020-05-18 11:33:09 -06004660 if (initial_layout != layout) {
4661 subpass_transitions[subpass].emplace_back(VK_SUBPASS_EXTERNAL, attachment, initial_layout, layout);
4662 first_is_transition[attachment] = true;
4663 }
John Zulauf2bc1fde2020-04-24 15:09:51 -06004664 }
John Zulaufbb9f07f2020-03-19 16:53:06 -06004665 last[attachment] = subpass;
John Zulauf8863c332020-03-20 10:34:33 -06004666
John Zulauf2bc1fde2020-04-24 15:09:51 -06004667 for (const auto &prev : rp->subpass_dependencies[subpass].prev) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004668 const auto prev_pass = prev.first->pass;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004669 const auto prev_layout = subpass_attachment_layout[prev_pass][attachment];
4670 if ((prev_layout != kInvalidLayout) && (prev_layout != layout)) {
4671 subpass_transitions[subpass].emplace_back(prev_pass, attachment, prev_layout, layout);
4672 }
John Zulauf8863c332020-03-20 10:34:33 -06004673 }
John Zulauf2bc1fde2020-04-24 15:09:51 -06004674 attachment_layout[attachment] = layout;
John Zulauf8863c332020-03-20 10:34:33 -06004675 }
4676 }
4677 }
4678 void FinalTransitions() {
4679 auto &final_transitions = subpass_transitions[rp->createInfo.subpassCount];
4680
4681 for (uint32_t attachment = 0; attachment < attachment_count; ++attachment) {
4682 const auto final_layout = rp->createInfo.pAttachments[attachment].finalLayout;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004683 // Add final transitions for attachments that were used and change layout.
4684 if ((last[attachment] != VK_SUBPASS_EXTERNAL) && final_layout != attachment_layout[attachment]) {
4685 final_transitions.emplace_back(last[attachment], attachment, attachment_layout[attachment], final_layout);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004686 }
locke-lunargd556cc32019-09-17 01:21:23 -06004687 }
4688 }
John Zulaufbb9f07f2020-03-19 16:53:06 -06004689 };
John Zulauf8863c332020-03-20 10:34:33 -06004690 AttachmentTracker attachment_tracker(render_pass);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004691
4692 for (uint32_t subpass_index = 0; subpass_index < create_info->subpassCount; ++subpass_index) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08004693 const VkSubpassDescription2 &subpass = create_info->pSubpasses[subpass_index];
John Zulauf8863c332020-03-20 10:34:33 -06004694 attachment_tracker.Update(subpass_index, subpass.pColorAttachments, subpass.colorAttachmentCount, false);
4695 attachment_tracker.Update(subpass_index, subpass.pResolveAttachments, subpass.colorAttachmentCount, false);
4696 attachment_tracker.Update(subpass_index, subpass.pDepthStencilAttachment, 1, false);
4697 attachment_tracker.Update(subpass_index, subpass.pInputAttachments, subpass.inputAttachmentCount, true);
John Zulauf4aff5d92020-02-21 08:29:35 -07004698 }
John Zulauf8863c332020-03-20 10:34:33 -06004699 attachment_tracker.FinalTransitions();
John Zulauf4aff5d92020-02-21 08:29:35 -07004700
John Zulaufbb9f07f2020-03-19 16:53:06 -06004701 // Add implicit dependencies
John Zulauf8863c332020-03-20 10:34:33 -06004702 for (uint32_t attachment = 0; attachment < attachment_tracker.attachment_count; attachment++) {
4703 const auto first_use = attachment_tracker.first[attachment];
John Zulauf4aff5d92020-02-21 08:29:35 -07004704 if (first_use != VK_SUBPASS_EXTERNAL) {
4705 auto &subpass_dep = render_pass->subpass_dependencies[first_use];
John Zulaufbaea94f2020-09-15 17:55:16 -06004706 if (subpass_dep.barrier_from_external.size() == 0) {
4707 // Add implicit from barrier if they're aren't any
John Zulauf4aff5d92020-02-21 08:29:35 -07004708 subpass_dep.implicit_barrier_from_external.reset(
4709 new VkSubpassDependency2(ImplicitDependencyFromExternal(first_use)));
John Zulaufbaea94f2020-09-15 17:55:16 -06004710 subpass_dep.barrier_from_external.emplace_back(subpass_dep.implicit_barrier_from_external.get());
John Zulauf4aff5d92020-02-21 08:29:35 -07004711 }
4712 }
4713
John Zulauf8863c332020-03-20 10:34:33 -06004714 const auto last_use = attachment_tracker.last[attachment];
John Zulauf4aff5d92020-02-21 08:29:35 -07004715 if (last_use != VK_SUBPASS_EXTERNAL) {
4716 auto &subpass_dep = render_pass->subpass_dependencies[last_use];
John Zulaufbaea94f2020-09-15 17:55:16 -06004717 if (render_pass->subpass_dependencies[last_use].barrier_to_external.size() == 0) {
4718 // Add implicit to barrier if they're aren't any
John Zulauf4aff5d92020-02-21 08:29:35 -07004719 subpass_dep.implicit_barrier_to_external.reset(new VkSubpassDependency2(ImplicitDependencyToExternal(last_use)));
John Zulaufbaea94f2020-09-15 17:55:16 -06004720 subpass_dep.barrier_to_external.emplace_back(subpass_dep.implicit_barrier_to_external.get());
John Zulauf4aff5d92020-02-21 08:29:35 -07004721 }
locke-lunargd556cc32019-09-17 01:21:23 -06004722 }
4723 }
4724
4725 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
4726 renderPassMap[*pRenderPass] = std::move(render_pass);
4727}
4728
4729// Style note:
4730// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
4731// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
4732// construction or assignment.
4733void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
4734 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4735 VkResult result) {
4736 if (VK_SUCCESS != result) return;
4737 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
4738 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
4739}
4740
Mike Schuchardt2df08912020-12-15 16:28:09 -08004741void ValidationStateTracker::RecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07004742 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4743 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004744 if (VK_SUCCESS != result) return;
4745 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
4746 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
4747}
4748
Mike Schuchardt2df08912020-12-15 16:28:09 -08004749void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07004750 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4751 VkResult result) {
4752 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
4753}
4754
Mike Schuchardt2df08912020-12-15 16:28:09 -08004755void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07004756 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4757 VkResult result) {
4758 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
4759}
4760
locke-lunargd556cc32019-09-17 01:21:23 -06004761void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
4762 const VkRenderPassBeginInfo *pRenderPassBegin,
4763 const VkSubpassContents contents) {
4764 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunargaecf2152020-05-12 17:15:41 -06004765 auto render_pass_state = pRenderPassBegin ? GetShared<RENDER_PASS_STATE>(pRenderPassBegin->renderPass) : nullptr;
4766 auto framebuffer = pRenderPassBegin ? GetShared<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer) : nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004767
4768 if (render_pass_state) {
locke-lunargaecf2152020-05-12 17:15:41 -06004769 cb_state->activeFramebuffer = framebuffer;
locke-lunargd556cc32019-09-17 01:21:23 -06004770 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07004771 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06004772 cb_state->activeSubpass = 0;
4773 cb_state->activeSubpassContents = contents;
locke-lunargfc78e932020-11-19 17:06:24 -07004774
locke-lunargd556cc32019-09-17 01:21:23 -06004775 // Connect this RP to cmdBuffer
locke-lunargaecf2152020-05-12 17:15:41 -06004776 AddCommandBufferBinding(
4777 render_pass_state->cb_bindings,
4778 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state.get()), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004779
4780 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
4781 if (chained_device_group_struct) {
4782 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
4783 } else {
4784 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
4785 }
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004786
locke-lunargfc78e932020-11-19 17:06:24 -07004787 cb_state->active_subpasses = nullptr;
4788 cb_state->active_attachments = nullptr;
4789
4790 if (framebuffer) {
4791 cb_state->framebuffers.insert(framebuffer);
4792
4793 // Set cb_state->active_subpasses
4794 cb_state->active_subpasses =
4795 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
4796 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
4797 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
4798
4799 // Set cb_state->active_attachments & cb_state->attachments_view_states
4800 cb_state->active_attachments =
4801 std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(framebuffer->createInfo.attachmentCount);
4802 UpdateAttachmentsView(*this, *cb_state, *cb_state->activeFramebuffer, pRenderPassBegin);
4803
4804 // Connect this framebuffer and its children to this cmdBuffer
4805 AddFramebufferBinding(cb_state, framebuffer.get());
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004806 }
locke-lunargd556cc32019-09-17 01:21:23 -06004807 }
4808}
4809
4810void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
4811 const VkRenderPassBeginInfo *pRenderPassBegin,
4812 VkSubpassContents contents) {
4813 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
4814}
4815
4816void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
4817 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004818 const VkSubpassBeginInfo *pSubpassBeginInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06004819 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4820}
4821
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06004822void ValidationStateTracker::PostCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4823 uint32_t counterBufferCount,
4824 const VkBuffer *pCounterBuffers,
4825 const VkDeviceSize *pCounterBufferOffsets) {
4826 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4827
4828 cb_state->transform_feedback_active = true;
4829}
4830
4831void ValidationStateTracker::PostCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4832 uint32_t counterBufferCount, const VkBuffer *pCounterBuffers,
4833 const VkDeviceSize *pCounterBufferOffsets) {
4834 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4835
4836 cb_state->transform_feedback_active = false;
4837}
4838
Tony-LunarG977448c2019-12-02 14:52:02 -07004839void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
4840 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004841 const VkSubpassBeginInfo *pSubpassBeginInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004842 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4843}
4844
locke-lunargd556cc32019-09-17 01:21:23 -06004845void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4846 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4847 cb_state->activeSubpass++;
4848 cb_state->activeSubpassContents = contents;
locke-lunargfc78e932020-11-19 17:06:24 -07004849
4850 // Update cb_state->active_subpasses
4851 if (cb_state->activeRenderPass && cb_state->activeFramebuffer) {
4852 cb_state->active_subpasses = nullptr;
4853 cb_state->active_subpasses =
4854 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
4855
4856 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
4857 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
4858 }
locke-lunargd556cc32019-09-17 01:21:23 -06004859}
4860
4861void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4862 RecordCmdNextSubpass(commandBuffer, contents);
4863}
4864
4865void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004866 const VkSubpassBeginInfo *pSubpassBeginInfo,
4867 const VkSubpassEndInfo *pSubpassEndInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06004868 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4869}
4870
Tony-LunarG977448c2019-12-02 14:52:02 -07004871void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004872 const VkSubpassBeginInfo *pSubpassBeginInfo,
4873 const VkSubpassEndInfo *pSubpassEndInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004874 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4875}
4876
locke-lunargd556cc32019-09-17 01:21:23 -06004877void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
4878 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4879 cb_state->activeRenderPass = nullptr;
locke-lunargfc78e932020-11-19 17:06:24 -07004880 cb_state->active_attachments = nullptr;
4881 cb_state->active_subpasses = nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004882 cb_state->activeSubpass = 0;
4883 cb_state->activeFramebuffer = VK_NULL_HANDLE;
4884}
4885
4886void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
4887 RecordCmdEndRenderPassState(commandBuffer);
4888}
4889
4890void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004891 const VkSubpassEndInfo *pSubpassEndInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06004892 RecordCmdEndRenderPassState(commandBuffer);
4893}
4894
Tony-LunarG977448c2019-12-02 14:52:02 -07004895void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004896 const VkSubpassEndInfo *pSubpassEndInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004897 RecordCmdEndRenderPassState(commandBuffer);
4898}
locke-lunargd556cc32019-09-17 01:21:23 -06004899void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
4900 const VkCommandBuffer *pCommandBuffers) {
4901 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4902
4903 CMD_BUFFER_STATE *sub_cb_state = NULL;
4904 for (uint32_t i = 0; i < commandBuffersCount; i++) {
4905 sub_cb_state = GetCBState(pCommandBuffers[i]);
4906 assert(sub_cb_state);
4907 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
4908 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
4909 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
4910 // from the validation step to the recording step
4911 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
4912 }
4913 }
4914
4915 // Propagate inital layout and current layout state to the primary cmd buffer
4916 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
4917 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
4918 // for those other classes.
4919 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
4920 const auto image = sub_layout_map_entry.first;
4921 const auto *image_state = GetImageState(image);
4922 if (!image_state) continue; // Can't set layouts of a dead image
4923
4924 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
4925 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
4926 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
4927 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
4928 }
4929
4930 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
4931 cb_state->linkedCommandBuffers.insert(sub_cb_state);
4932 sub_cb_state->linkedCommandBuffers.insert(cb_state);
4933 for (auto &function : sub_cb_state->queryUpdates) {
4934 cb_state->queryUpdates.push_back(function);
4935 }
4936 for (auto &function : sub_cb_state->queue_submit_functions) {
4937 cb_state->queue_submit_functions.push_back(function);
4938 }
4939 }
4940}
4941
4942void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
4943 VkFlags flags, void **ppData, VkResult result) {
4944 if (VK_SUCCESS != result) return;
4945 RecordMappedMemory(mem, offset, size, ppData);
4946}
4947
4948void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
4949 auto mem_info = GetDevMemState(mem);
4950 if (mem_info) {
4951 mem_info->mapped_range = MemRange();
4952 mem_info->p_driver_data = nullptr;
4953 }
4954}
4955
4956void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
4957 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
4958 if (image_state) {
locke-lunargae26eac2020-04-16 15:29:05 -06004959 // An Android sepcial image cannot get VkSubresourceLayout until the image binds a memory.
4960 // See: VUID-vkGetImageSubresourceLayout-image-01895
4961 image_state->fragment_encoder =
4962 std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(new subresource_adapter::ImageRangeEncoder(*image_state));
locke-lunargd556cc32019-09-17 01:21:23 -06004963 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
4964 if (swapchain_info) {
4965 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
4966 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -06004967 swapchain->images[swapchain_info->imageIndex].bound_images.emplace(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -06004968 image_state->bind_swapchain = swapchain_info->swapchain;
4969 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
4970 }
4971 } else {
4972 // Track bound memory range information
4973 auto mem_info = GetDevMemState(bindInfo.memory);
4974 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07004975 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06004976 }
4977
4978 // Track objects tied to memory
4979 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
4980 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
4981 }
Tony-LunarG330cf4c2020-03-04 16:29:03 -07004982 if ((image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) || swapchain_info) {
locke-lunargd556cc32019-09-17 01:21:23 -06004983 AddAliasingImage(image_state);
4984 }
4985 }
4986}
4987
4988void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
4989 VkDeviceSize memoryOffset, VkResult result) {
4990 if (VK_SUCCESS != result) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004991 VkBindImageMemoryInfo bind_info = {};
4992 bind_info.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
4993 bind_info.image = image;
4994 bind_info.memory = mem;
4995 bind_info.memoryOffset = memoryOffset;
4996 UpdateBindImageMemoryState(bind_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004997}
4998
4999void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005000 const VkBindImageMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06005001 if (VK_SUCCESS != result) return;
5002 for (uint32_t i = 0; i < bindInfoCount; i++) {
5003 UpdateBindImageMemoryState(pBindInfos[i]);
5004 }
5005}
5006
5007void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005008 const VkBindImageMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06005009 if (VK_SUCCESS != result) return;
5010 for (uint32_t i = 0; i < bindInfoCount; i++) {
5011 UpdateBindImageMemoryState(pBindInfos[i]);
5012 }
5013}
5014
5015void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
5016 auto event_state = GetEventState(event);
5017 if (event_state) {
5018 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
5019 }
locke-lunargd556cc32019-09-17 01:21:23 -06005020}
5021
5022void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
5023 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
5024 VkResult result) {
5025 if (VK_SUCCESS != result) return;
5026 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
5027 pImportSemaphoreFdInfo->flags);
5028}
5029
5030void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005031 VkExternalSemaphoreHandleTypeFlagBits handle_type) {
locke-lunargd556cc32019-09-17 01:21:23 -06005032 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
Mike Schuchardt2df08912020-12-15 16:28:09 -08005033 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT) {
locke-lunargd556cc32019-09-17 01:21:23 -06005034 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
5035 semaphore_state->scope = kSyncScopeExternalPermanent;
5036 }
5037}
5038
5039#ifdef VK_USE_PLATFORM_WIN32_KHR
5040void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
5041 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
5042 if (VK_SUCCESS != result) return;
5043 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
5044 pImportSemaphoreWin32HandleInfo->flags);
5045}
5046
5047void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
5048 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
5049 HANDLE *pHandle, VkResult result) {
5050 if (VK_SUCCESS != result) return;
5051 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
5052}
5053
5054void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
5055 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
5056 if (VK_SUCCESS != result) return;
5057 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
5058 pImportFenceWin32HandleInfo->flags);
5059}
5060
5061void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
5062 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
5063 HANDLE *pHandle, VkResult result) {
5064 if (VK_SUCCESS != result) return;
5065 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
5066}
5067#endif
5068
5069void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
5070 VkResult result) {
5071 if (VK_SUCCESS != result) return;
5072 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
5073}
5074
Mike Schuchardt2df08912020-12-15 16:28:09 -08005075void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBits handle_type,
5076 VkFenceImportFlags flags) {
locke-lunargd556cc32019-09-17 01:21:23 -06005077 FENCE_STATE *fence_node = GetFenceState(fence);
5078 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08005079 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT || flags & VK_FENCE_IMPORT_TEMPORARY_BIT) &&
locke-lunargd556cc32019-09-17 01:21:23 -06005080 fence_node->scope == kSyncScopeInternal) {
5081 fence_node->scope = kSyncScopeExternalTemporary;
5082 } else {
5083 fence_node->scope = kSyncScopeExternalPermanent;
5084 }
5085 }
5086}
5087
5088void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
5089 VkResult result) {
5090 if (VK_SUCCESS != result) return;
5091 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
5092}
5093
Mike Schuchardt2df08912020-12-15 16:28:09 -08005094void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBits handle_type) {
locke-lunargd556cc32019-09-17 01:21:23 -06005095 FENCE_STATE *fence_state = GetFenceState(fence);
5096 if (fence_state) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08005097 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT) {
locke-lunargd556cc32019-09-17 01:21:23 -06005098 // Export with reference transference becomes external
5099 fence_state->scope = kSyncScopeExternalPermanent;
5100 } else if (fence_state->scope == kSyncScopeInternal) {
5101 // Export with copy transference has a side effect of resetting the fence
5102 fence_state->state = FENCE_UNSIGNALED;
5103 }
5104 }
5105}
5106
5107void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
5108 VkResult result) {
5109 if (VK_SUCCESS != result) return;
5110 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
5111}
5112
5113void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
5114 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
5115 if (VK_SUCCESS != result) return;
5116 eventMap[*pEvent].write_in_use = 0;
5117 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
5118}
5119
5120void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
5121 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
5122 SWAPCHAIN_NODE *old_swapchain_state) {
5123 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005124 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06005125 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
5126 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
5127 swapchain_state->shared_presentable = true;
5128 }
5129 surface_state->swapchain = swapchain_state.get();
5130 swapchainMap[*pSwapchain] = std::move(swapchain_state);
5131 } else {
5132 surface_state->swapchain = nullptr;
5133 }
5134 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
5135 if (old_swapchain_state) {
5136 old_swapchain_state->retired = true;
5137 }
5138 return;
5139}
5140
5141void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
5142 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
5143 VkResult result) {
5144 auto surface_state = GetSurfaceState(pCreateInfo->surface);
5145 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
5146 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
5147}
5148
5149void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
5150 const VkAllocationCallbacks *pAllocator) {
5151 if (!swapchain) return;
5152 auto swapchain_data = GetSwapchainState(swapchain);
5153 if (swapchain_data) {
5154 for (const auto &swapchain_image : swapchain_data->images) {
locke-lunargb3584732019-10-28 20:18:36 -06005155 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image, kVulkanObjectTypeImage));
5156 imageMap.erase(swapchain_image.image);
5157 RemoveAliasingImages(swapchain_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06005158 }
5159
5160 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
5161 if (surface_state) {
5162 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
5163 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005164 swapchain_data->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005165 swapchainMap.erase(swapchain);
5166 }
5167}
5168
sfricke-samsung5c1b7392020-12-13 22:17:15 -08005169void ValidationStateTracker::PostCallRecordCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
5170 const VkDisplayModeCreateInfoKHR *pCreateInfo,
5171 const VkAllocationCallbacks *pAllocator, VkDisplayModeKHR *pMode,
5172 VkResult result) {
5173 if (VK_SUCCESS != result) return;
5174 if (!pMode) return;
5175 auto display_mode_state = std::make_shared<DISPLAY_MODE_STATE>(*pMode);
5176 display_mode_state->physical_device = physicalDevice;
5177 display_mode_map[*pMode] = std::move(display_mode_state);
5178}
5179
locke-lunargd556cc32019-09-17 01:21:23 -06005180void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
5181 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
5182 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005183 auto semaphore_state = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
5184 if (semaphore_state) {
5185 semaphore_state->signaler.first = VK_NULL_HANDLE;
5186 semaphore_state->signaled = false;
locke-lunargd556cc32019-09-17 01:21:23 -06005187 }
5188 }
5189
5190 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
5191 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
5192 // confused itself just as much.
5193 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
5194 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
5195 // Mark the image as having been released to the WSI
5196 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
5197 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
locke-lunargb3584732019-10-28 20:18:36 -06005198 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
locke-lunargd556cc32019-09-17 01:21:23 -06005199 auto image_state = GetImageState(image);
5200 if (image_state) {
5201 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005202 if (image_state->shared_presentable) {
5203 image_state->layout_locked = true;
5204 }
locke-lunargd556cc32019-09-17 01:21:23 -06005205 }
5206 }
5207 }
5208 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
5209 // its semaphore waits) /never/ participate in any completion proof.
5210}
5211
5212void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
5213 const VkSwapchainCreateInfoKHR *pCreateInfos,
5214 const VkAllocationCallbacks *pAllocator,
5215 VkSwapchainKHR *pSwapchains, VkResult result) {
5216 if (pCreateInfos) {
5217 for (uint32_t i = 0; i < swapchainCount; i++) {
5218 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
5219 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
5220 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
5221 }
5222 }
5223}
5224
5225void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
5226 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005227 auto fence_state = GetFenceState(fence);
5228 if (fence_state && fence_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06005229 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
5230 // import
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005231 fence_state->state = FENCE_INFLIGHT;
5232 fence_state->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
locke-lunargd556cc32019-09-17 01:21:23 -06005233 }
5234
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005235 auto semaphore_state = GetSemaphoreState(semaphore);
5236 if (semaphore_state && semaphore_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06005237 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
5238 // temporary import
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005239 semaphore_state->signaled = true;
5240 semaphore_state->signaler.first = VK_NULL_HANDLE;
locke-lunargd556cc32019-09-17 01:21:23 -06005241 }
5242
5243 // Mark the image as acquired.
5244 auto swapchain_data = GetSwapchainState(swapchain);
5245 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
locke-lunargb3584732019-10-28 20:18:36 -06005246 auto image = swapchain_data->images[*pImageIndex].image;
locke-lunargd556cc32019-09-17 01:21:23 -06005247 auto image_state = GetImageState(image);
5248 if (image_state) {
5249 image_state->acquired = true;
5250 image_state->shared_presentable = swapchain_data->shared_presentable;
5251 }
5252 }
5253}
5254
5255void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
5256 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
5257 VkResult result) {
5258 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
5259 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
5260}
5261
5262void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
5263 uint32_t *pImageIndex, VkResult result) {
5264 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
5265 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
5266 pAcquireInfo->fence, pImageIndex);
5267}
5268
5269void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
5270 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
5271 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
5272 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
5273 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
5274 phys_device_state.phys_device = pPhysicalDevices[i];
5275 // Init actual features for each physical device
5276 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
5277 }
5278 }
5279}
5280
5281// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
5282static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005283 VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06005284 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
5285
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005286 if (pQueueFamilyProperties) { // Save queue family properties
locke-lunargd556cc32019-09-17 01:21:23 -06005287 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
5288 for (uint32_t i = 0; i < count; ++i) {
5289 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
5290 }
5291 }
5292}
5293
5294void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
5295 uint32_t *pQueueFamilyPropertyCount,
5296 VkQueueFamilyProperties *pQueueFamilyProperties) {
5297 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5298 assert(physical_device_state);
Mike Schuchardt2df08912020-12-15 16:28:09 -08005299 VkQueueFamilyProperties2 *pqfp = nullptr;
5300 std::vector<VkQueueFamilyProperties2> qfp;
locke-lunargd556cc32019-09-17 01:21:23 -06005301 qfp.resize(*pQueueFamilyPropertyCount);
5302 if (pQueueFamilyProperties) {
5303 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08005304 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2;
locke-lunargd556cc32019-09-17 01:21:23 -06005305 qfp[i].pNext = nullptr;
5306 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
5307 }
5308 pqfp = qfp.data();
5309 }
5310 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
5311}
5312
5313void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
Mike Schuchardt2df08912020-12-15 16:28:09 -08005314 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06005315 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5316 assert(physical_device_state);
5317 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
5318 pQueueFamilyProperties);
5319}
5320
5321void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08005322 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06005323 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5324 assert(physical_device_state);
5325 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
5326 pQueueFamilyProperties);
5327}
5328void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
5329 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005330 if (!surface) return;
5331 auto surface_state = GetSurfaceState(surface);
5332 surface_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005333 surface_map.erase(surface);
5334}
5335
5336void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005337 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06005338}
5339
5340void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
5341 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
5342 const VkAllocationCallbacks *pAllocator,
5343 VkSurfaceKHR *pSurface, VkResult result) {
5344 if (VK_SUCCESS != result) return;
5345 RecordVulkanSurface(pSurface);
5346}
5347
5348#ifdef VK_USE_PLATFORM_ANDROID_KHR
5349void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
5350 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
5351 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5352 VkResult result) {
5353 if (VK_SUCCESS != result) return;
5354 RecordVulkanSurface(pSurface);
5355}
5356#endif // VK_USE_PLATFORM_ANDROID_KHR
5357
5358#ifdef VK_USE_PLATFORM_IOS_MVK
5359void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
5360 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5361 VkResult result) {
5362 if (VK_SUCCESS != result) return;
5363 RecordVulkanSurface(pSurface);
5364}
5365#endif // VK_USE_PLATFORM_IOS_MVK
5366
5367#ifdef VK_USE_PLATFORM_MACOS_MVK
5368void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
5369 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
5370 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5371 VkResult result) {
5372 if (VK_SUCCESS != result) return;
5373 RecordVulkanSurface(pSurface);
5374}
5375#endif // VK_USE_PLATFORM_MACOS_MVK
5376
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07005377#ifdef VK_USE_PLATFORM_METAL_EXT
5378void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
5379 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
5380 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5381 VkResult result) {
5382 if (VK_SUCCESS != result) return;
5383 RecordVulkanSurface(pSurface);
5384}
5385#endif // VK_USE_PLATFORM_METAL_EXT
5386
locke-lunargd556cc32019-09-17 01:21:23 -06005387#ifdef VK_USE_PLATFORM_WAYLAND_KHR
5388void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
5389 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
5390 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5391 VkResult result) {
5392 if (VK_SUCCESS != result) return;
5393 RecordVulkanSurface(pSurface);
5394}
5395#endif // VK_USE_PLATFORM_WAYLAND_KHR
5396
5397#ifdef VK_USE_PLATFORM_WIN32_KHR
5398void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
5399 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
5400 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5401 VkResult result) {
5402 if (VK_SUCCESS != result) return;
5403 RecordVulkanSurface(pSurface);
5404}
5405#endif // VK_USE_PLATFORM_WIN32_KHR
5406
5407#ifdef VK_USE_PLATFORM_XCB_KHR
5408void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
5409 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5410 VkResult result) {
5411 if (VK_SUCCESS != result) return;
5412 RecordVulkanSurface(pSurface);
5413}
5414#endif // VK_USE_PLATFORM_XCB_KHR
5415
5416#ifdef VK_USE_PLATFORM_XLIB_KHR
5417void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
5418 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5419 VkResult result) {
5420 if (VK_SUCCESS != result) return;
5421 RecordVulkanSurface(pSurface);
5422}
5423#endif // VK_USE_PLATFORM_XLIB_KHR
5424
Niklas Haas8b84af12020-04-19 22:20:11 +02005425void ValidationStateTracker::PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance,
5426 const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
5427 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5428 VkResult result) {
5429 if (VK_SUCCESS != result) return;
5430 RecordVulkanSurface(pSurface);
5431}
5432
Cort23cf2282019-09-20 18:58:18 +02005433void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005434 VkPhysicalDeviceFeatures *pFeatures) {
5435 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Yilong Li358152a2020-07-08 02:16:45 -07005436 // Reset the features2 safe struct before setting up the features field.
5437 physical_device_state->features2 = safe_VkPhysicalDeviceFeatures2();
Cortffba2642019-09-20 22:09:41 +02005438 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02005439}
5440
5441void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005442 VkPhysicalDeviceFeatures2 *pFeatures) {
5443 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02005444 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02005445}
5446
5447void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005448 VkPhysicalDeviceFeatures2 *pFeatures) {
5449 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02005450 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02005451}
5452
locke-lunargd556cc32019-09-17 01:21:23 -06005453void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
5454 VkSurfaceKHR surface,
5455 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
5456 VkResult result) {
5457 if (VK_SUCCESS != result) return;
5458 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005459 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005460
5461 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
5462 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005463}
5464
5465void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
5466 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
5467 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
5468 if (VK_SUCCESS != result) return;
5469 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005470 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005471
5472 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
5473 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005474}
5475
5476void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
5477 VkSurfaceKHR surface,
5478 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
5479 VkResult result) {
5480 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005481 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
5482 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
5483 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
5484 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
5485 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
5486 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
5487 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
5488 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
5489 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
5490 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005491
5492 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
5493 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005494}
5495
5496void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
5497 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
5498 VkBool32 *pSupported, VkResult result) {
5499 if (VK_SUCCESS != result) return;
5500 auto surface_state = GetSurfaceState(surface);
5501 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
5502}
5503
5504void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
5505 VkSurfaceKHR surface,
5506 uint32_t *pPresentModeCount,
5507 VkPresentModeKHR *pPresentModes,
5508 VkResult result) {
5509 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5510
5511 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
5512 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005513
5514 if (*pPresentModeCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005515 if (*pPresentModeCount > physical_device_state->present_modes.size()) {
locke-lunargd556cc32019-09-17 01:21:23 -06005516 physical_device_state->present_modes.resize(*pPresentModeCount);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005517 }
locke-lunargd556cc32019-09-17 01:21:23 -06005518 }
5519 if (pPresentModes) {
locke-lunargd556cc32019-09-17 01:21:23 -06005520 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
5521 physical_device_state->present_modes[i] = pPresentModes[i];
5522 }
5523 }
5524}
5525
5526void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
5527 uint32_t *pSurfaceFormatCount,
5528 VkSurfaceFormatKHR *pSurfaceFormats,
5529 VkResult result) {
5530 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5531
5532 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005533
5534 if (*pSurfaceFormatCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005535 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size()) {
locke-lunargd556cc32019-09-17 01:21:23 -06005536 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005537 }
locke-lunargd556cc32019-09-17 01:21:23 -06005538 }
5539 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06005540 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
5541 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
5542 }
5543 }
5544}
5545
5546void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
5547 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
5548 uint32_t *pSurfaceFormatCount,
5549 VkSurfaceFormat2KHR *pSurfaceFormats,
5550 VkResult result) {
5551 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5552
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005553 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005554 if (*pSurfaceFormatCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005555 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size()) {
5556 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
5557 }
locke-lunargd556cc32019-09-17 01:21:23 -06005558 }
5559 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06005560 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005561 physical_device_state->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
locke-lunargd556cc32019-09-17 01:21:23 -06005562 }
5563 }
5564}
5565
5566void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
5567 const VkDebugUtilsLabelEXT *pLabelInfo) {
5568 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
5569}
5570
5571void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
5572 EndCmdDebugUtilsLabel(report_data, commandBuffer);
5573}
5574
5575void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
5576 const VkDebugUtilsLabelEXT *pLabelInfo) {
5577 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
5578
5579 // Squirrel away an easily accessible copy.
5580 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5581 cb_state->debug_label = LoggingLabel(pLabelInfo);
5582}
5583
5584void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
Mike Schuchardt2df08912020-12-15 16:28:09 -08005585 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06005586 if (NULL != pPhysicalDeviceGroupProperties) {
5587 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
5588 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
5589 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
5590 auto &phys_device_state = physical_device_map[cur_phys_dev];
5591 phys_device_state.phys_device = cur_phys_dev;
5592 // Init actual features for each physical device
5593 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
5594 }
5595 }
5596 }
5597}
5598
5599void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
Mike Schuchardt2df08912020-12-15 16:28:09 -08005600 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties,
locke-lunargd556cc32019-09-17 01:21:23 -06005601 VkResult result) {
5602 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5603 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
5604}
5605
5606void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08005607 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties,
locke-lunargd556cc32019-09-17 01:21:23 -06005608 VkResult result) {
5609 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5610 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
5611}
5612
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005613void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
5614 uint32_t queueFamilyIndex,
5615 uint32_t *pCounterCount,
5616 VkPerformanceCounterKHR *pCounters) {
5617 if (NULL == pCounters) return;
5618
5619 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5620 assert(physical_device_state);
5621
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005622 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queue_family_counters(new QUEUE_FAMILY_PERF_COUNTERS());
5623 queue_family_counters->counters.resize(*pCounterCount);
5624 for (uint32_t i = 0; i < *pCounterCount; i++) queue_family_counters->counters[i] = pCounters[i];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005625
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005626 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queue_family_counters);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005627}
5628
5629void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
5630 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
5631 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
5632 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5633 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
5634}
5635
5636void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
5637 VkResult result) {
5638 if (result == VK_SUCCESS) performance_lock_acquired = true;
5639}
5640
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005641void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
5642 performance_lock_acquired = false;
5643 for (auto &cmd_buffer : commandBufferMap) {
5644 cmd_buffer.second->performance_lock_released = true;
5645 }
5646}
5647
locke-lunargd556cc32019-09-17 01:21:23 -06005648void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005649 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06005650 const VkAllocationCallbacks *pAllocator) {
5651 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005652 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5653 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005654 desc_template_map.erase(descriptorUpdateTemplate);
5655}
5656
5657void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005658 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06005659 const VkAllocationCallbacks *pAllocator) {
5660 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005661 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5662 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005663 desc_template_map.erase(descriptorUpdateTemplate);
5664}
5665
Mike Schuchardt2df08912020-12-15 16:28:09 -08005666void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
5667 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate) {
locke-lunargd556cc32019-09-17 01:21:23 -06005668 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005669 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06005670 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
5671}
5672
Mike Schuchardt2df08912020-12-15 16:28:09 -08005673void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(VkDevice device,
5674 const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
5675 const VkAllocationCallbacks *pAllocator,
5676 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate,
5677 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06005678 if (VK_SUCCESS != result) return;
5679 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
5680}
5681
5682void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08005683 VkDevice device, const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
5684 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06005685 if (VK_SUCCESS != result) return;
5686 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
5687}
5688
5689void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005690 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06005691 const void *pData) {
5692 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
5693 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
5694 assert(0);
5695 } else {
5696 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
5697 // TODO: Record template push descriptor updates
5698 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
5699 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
5700 }
5701 }
5702}
5703
5704void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
5705 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
5706 const void *pData) {
5707 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
5708}
5709
5710void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005711 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06005712 const void *pData) {
5713 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
5714}
5715
Mike Schuchardt2df08912020-12-15 16:28:09 -08005716void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
5717 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
5718 VkPipelineLayout layout, uint32_t set,
5719 const void *pData) {
locke-lunargd556cc32019-09-17 01:21:23 -06005720 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5721
5722 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5723 if (template_state) {
5724 auto layout_data = GetPipelineLayout(layout);
5725 auto dsl = GetDslFromPipelineLayout(layout_data, set);
5726 const auto &template_ci = template_state->create_info;
Jeff Bolz6ae39612019-10-11 20:57:36 -05005727 if (dsl && !dsl->destroyed) {
locke-lunargd556cc32019-09-17 01:21:23 -06005728 // Decode the template into a set of write updates
5729 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
5730 dsl->GetDescriptorSetLayout());
5731 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
5732 static_cast<uint32_t>(decoded_template.desc_writes.size()),
5733 decoded_template.desc_writes.data());
5734 }
5735 }
5736}
5737
5738void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
5739 uint32_t *pPropertyCount, void *pProperties) {
5740 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5741 if (*pPropertyCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005742 physical_device_state->display_plane_property_count = *pPropertyCount;
5743 }
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005744 if (*pPropertyCount || pProperties) {
5745 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005746 }
5747}
5748
5749void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
5750 uint32_t *pPropertyCount,
5751 VkDisplayPlanePropertiesKHR *pProperties,
5752 VkResult result) {
5753 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5754 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5755}
5756
5757void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
5758 uint32_t *pPropertyCount,
5759 VkDisplayPlaneProperties2KHR *pProperties,
5760 VkResult result) {
5761 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5762 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5763}
5764
5765void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5766 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
5767 QueryObject query_obj = {queryPool, query, index};
5768 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5769 RecordCmdBeginQuery(cb_state, query_obj);
5770}
5771
5772void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5773 uint32_t query, uint32_t index) {
5774 QueryObject query_obj = {queryPool, query, index};
5775 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5776 RecordCmdEndQuery(cb_state, query_obj);
5777}
5778
5779void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
5780 VkSamplerYcbcrConversion ycbcr_conversion) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005781 auto ycbcr_state = std::make_shared<SAMPLER_YCBCR_CONVERSION_STATE>();
5782
locke-lunargd556cc32019-09-17 01:21:23 -06005783 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005784 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion, ycbcr_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06005785 }
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005786
5787 const VkFormat conversion_format = create_info->format;
5788
5789 if (conversion_format != VK_FORMAT_UNDEFINED) {
5790 // If format is VK_FORMAT_UNDEFINED, will be set by external AHB features
5791 ycbcr_state->format_features = GetPotentialFormatFeatures(conversion_format);
5792 }
5793
5794 ycbcr_state->chromaFilter = create_info->chromaFilter;
5795 ycbcr_state->format = conversion_format;
5796 samplerYcbcrConversionMap[ycbcr_conversion] = std::move(ycbcr_state);
locke-lunargd556cc32019-09-17 01:21:23 -06005797}
5798
5799void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
5800 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5801 const VkAllocationCallbacks *pAllocator,
5802 VkSamplerYcbcrConversion *pYcbcrConversion,
5803 VkResult result) {
5804 if (VK_SUCCESS != result) return;
5805 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5806}
5807
5808void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
5809 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5810 const VkAllocationCallbacks *pAllocator,
5811 VkSamplerYcbcrConversion *pYcbcrConversion,
5812 VkResult result) {
5813 if (VK_SUCCESS != result) return;
5814 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5815}
5816
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005817void ValidationStateTracker::RecordDestroySamplerYcbcrConversionState(VkSamplerYcbcrConversion ycbcr_conversion) {
5818 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
5819 RecordDestroySamplerYcbcrConversionANDROID(ycbcr_conversion);
5820 }
5821
5822 auto ycbcr_state = GetSamplerYcbcrConversionState(ycbcr_conversion);
5823 ycbcr_state->destroyed = true;
5824 samplerYcbcrConversionMap.erase(ycbcr_conversion);
5825}
5826
locke-lunargd556cc32019-09-17 01:21:23 -06005827void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
5828 const VkAllocationCallbacks *pAllocator) {
5829 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005830 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005831}
5832
5833void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
5834 VkSamplerYcbcrConversion ycbcrConversion,
5835 const VkAllocationCallbacks *pAllocator) {
5836 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005837 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005838}
5839
Tony-LunarG977448c2019-12-02 14:52:02 -07005840void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5841 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005842 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07005843 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06005844
5845 // Do nothing if the query pool has been destroyed.
5846 auto query_pool_state = GetQueryPoolState(queryPool);
5847 if (!query_pool_state) return;
5848
5849 // Reset the state of existing entries.
5850 QueryObject query_obj{queryPool, 0};
5851 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
5852 for (uint32_t i = 0; i < max_query_count; ++i) {
5853 query_obj.query = firstQuery + i;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005854 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005855 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005856 for (uint32_t pass_index = 0; pass_index < query_pool_state->n_performance_passes; pass_index++) {
5857 query_obj.perf_pass = pass_index;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005858 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005859 }
5860 }
locke-lunargd556cc32019-09-17 01:21:23 -06005861 }
5862}
5863
Tony-LunarG977448c2019-12-02 14:52:02 -07005864void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5865 uint32_t queryCount) {
5866 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5867}
5868
5869void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5870 uint32_t queryCount) {
5871 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5872}
5873
locke-lunargd556cc32019-09-17 01:21:23 -06005874void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
5875 const TEMPLATE_STATE *template_state, const void *pData) {
5876 // Translate the templated update into a normal update for validation...
5877 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
5878 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
5879 decoded_update.desc_writes.data(), 0, NULL);
5880}
5881
5882// Update the common AllocateDescriptorSetsData
5883void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005884 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005885 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05005886 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06005887 if (layout) {
5888 ds_data->layout_nodes[i] = layout;
5889 // Count total descriptors required per type
5890 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
5891 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005892 uint32_t type_index = static_cast<uint32_t>(binding_layout->descriptorType);
5893 ds_data->required_descriptors_by_type[type_index] += binding_layout->descriptorCount;
locke-lunargd556cc32019-09-17 01:21:23 -06005894 }
5895 }
5896 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
5897 }
5898}
5899
5900// Decrement allocated sets from the pool and insert new sets into set_map
5901void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
5902 const VkDescriptorSet *descriptor_sets,
5903 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
5904 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
5905 // Account for sets and individual descriptors allocated from pool
5906 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
5907 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
5908 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
5909 }
5910
Mike Schuchardt2df08912020-12-15 16:28:09 -08005911 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfo>(p_alloc_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06005912 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
5913
5914 // Create tracking object for each descriptor set; insert into global map and the pool's set.
5915 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
5916 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
5917
Jeff Bolz41a1ced2019-10-11 11:40:49 -05005918 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07005919 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06005920 pool_state->sets.insert(new_ds.get());
5921 new_ds->in_use.store(0);
5922 setMap[descriptor_sets[i]] = std::move(new_ds);
5923 }
5924}
5925
5926// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
Jeremy Kniager05631e72020-06-08 14:21:35 -06005927void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type,
locke-lunarg540b2252020-08-03 13:23:36 -06005928 VkPipelineBindPoint bind_point, const char *function) {
5929 UpdateDrawState(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005930 cb_state->hasDispatchCmd = true;
5931}
5932
locke-lunargd556cc32019-09-17 01:21:23 -06005933// Generic function to handle state update for all CmdDraw* type functions
locke-lunarg540b2252020-08-03 13:23:36 -06005934void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, VkPipelineBindPoint bind_point,
5935 const char *function) {
5936 UpdateStateCmdDrawDispatchType(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005937 cb_state->hasDrawCmd = true;
5938}
5939
5940void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
5941 uint32_t firstVertex, uint32_t firstInstance) {
5942 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005943 UpdateStateCmdDrawType(cb_state, CMD_DRAW, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDraw()");
locke-lunargd556cc32019-09-17 01:21:23 -06005944}
5945
5946void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
5947 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
5948 uint32_t firstInstance) {
5949 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005950 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXED, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexed()");
locke-lunargd556cc32019-09-17 01:21:23 -06005951}
5952
5953void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5954 uint32_t count, uint32_t stride) {
5955 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5956 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005957 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06005958 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5959}
5960
5961void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5962 VkDeviceSize offset, uint32_t count, uint32_t stride) {
5963 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5964 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005965 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexedIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06005966 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5967}
5968
5969void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
5970 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005971 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCH, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatch()");
locke-lunargd556cc32019-09-17 01:21:23 -06005972}
5973
5974void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5975 VkDeviceSize offset) {
5976 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005977 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCHINDIRECT, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatchIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06005978 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5979 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5980}
5981
Tony-LunarG977448c2019-12-02 14:52:02 -07005982void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5983 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
locke-lunarg540b2252020-08-03 13:23:36 -06005984 uint32_t stride, const char *function) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005985 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5986 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5987 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005988 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
Tony-LunarG977448c2019-12-02 14:52:02 -07005989 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5990 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5991}
5992
locke-lunargd556cc32019-09-17 01:21:23 -06005993void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5994 VkDeviceSize offset, VkBuffer countBuffer,
5995 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5996 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005997 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5998 "vkCmdDrawIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005999}
6000
6001void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
6002 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
6003 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06006004 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
6005 "vkCmdDrawIndirectCount()");
Tony-LunarG977448c2019-12-02 14:52:02 -07006006}
6007
6008void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
6009 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
locke-lunarg540b2252020-08-03 13:23:36 -06006010 uint32_t maxDrawCount, uint32_t stride, const char *function) {
locke-lunargd556cc32019-09-17 01:21:23 -06006011 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6012 BUFFER_STATE *buffer_state = GetBufferState(buffer);
6013 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006014 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
locke-lunargd556cc32019-09-17 01:21:23 -06006015 AddCommandBufferBindingBuffer(cb_state, buffer_state);
6016 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
6017}
6018
6019void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
6020 VkDeviceSize offset, VkBuffer countBuffer,
6021 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
6022 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06006023 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
6024 "vkCmdDrawIndexedIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07006025}
6026
6027void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
6028 VkDeviceSize offset, VkBuffer countBuffer,
6029 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
6030 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06006031 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
6032 "vkCmdDrawIndexedIndirectCount()");
locke-lunargd556cc32019-09-17 01:21:23 -06006033}
6034
6035void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
6036 uint32_t firstTask) {
6037 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006038 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSNV, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawMeshTasksNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06006039}
6040
6041void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
6042 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
6043 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006044 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
6045 "vkCmdDrawMeshTasksIndirectNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06006046 BUFFER_STATE *buffer_state = GetBufferState(buffer);
6047 if (buffer_state) {
6048 AddCommandBufferBindingBuffer(cb_state, buffer_state);
6049 }
6050}
6051
6052void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
6053 VkDeviceSize offset, VkBuffer countBuffer,
6054 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
6055 uint32_t stride) {
6056 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6057 BUFFER_STATE *buffer_state = GetBufferState(buffer);
6058 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006059 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTCOUNTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
6060 "vkCmdDrawMeshTasksIndirectCountNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06006061 if (buffer_state) {
6062 AddCommandBufferBindingBuffer(cb_state, buffer_state);
6063 }
6064 if (count_buffer_state) {
6065 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
6066 }
6067}
6068
6069void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
6070 const VkAllocationCallbacks *pAllocator,
6071 VkShaderModule *pShaderModule, VkResult result,
6072 void *csm_state_data) {
6073 if (VK_SUCCESS != result) return;
6074 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
6075
Tony-LunarG8a51b7d2020-07-01 15:57:23 -06006076 spv_target_env spirv_environment = PickSpirvEnv(api_version, (device_extensions.vk_khr_spirv_1_4 != kNotEnabled));
locke-lunargd556cc32019-09-17 01:21:23 -06006077 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05006078 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
6079 csm_state->unique_shader_id)
6080 : std::make_shared<SHADER_MODULE_STATE>();
locke-lunargde3f0fa2020-09-10 11:55:31 -06006081 SetPushConstantUsedInShader(*new_shader_module);
locke-lunargd556cc32019-09-17 01:21:23 -06006082 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
6083}
6084
6085void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05006086 PIPELINE_STATE::StageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06006087 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
locke-lunargde3f0fa2020-09-10 11:55:31 -06006088 stage_state->entry_point_name = pStage->pName;
6089 stage_state->shader_state = GetShared<SHADER_MODULE_STATE>(pStage->module);
6090 auto module = stage_state->shader_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -06006091 if (!module->has_valid_spirv) return;
6092
6093 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
6094 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
6095 if (entrypoint == module->end()) return;
6096
locke-lunarg654e3692020-06-04 17:19:15 -06006097 stage_state->stage_flag = pStage->stage;
6098
locke-lunargd556cc32019-09-17 01:21:23 -06006099 // Mark accessible ids
6100 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
6101 ProcessExecutionModes(module, entrypoint, pipeline);
6102
locke-lunarg63e4daf2020-08-17 17:53:25 -06006103 stage_state->descriptor_uses = CollectInterfaceByDescriptorSlot(
6104 module, stage_state->accessible_ids, &stage_state->has_writable_descriptor, &stage_state->has_atomic_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06006105 // Capture descriptor uses for the pipeline
locke-lunarg36045992020-08-20 16:54:37 -06006106 for (const auto &use : stage_state->descriptor_uses) {
locke-lunargd556cc32019-09-17 01:21:23 -06006107 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06006108 const uint32_t slot = use.first.first;
locke-lunarg351c9d82020-10-23 14:43:21 -06006109 pipeline->active_slots[slot][use.first.second].is_writable |= use.second.is_writable;
locke-lunarg36045992020-08-20 16:54:37 -06006110 auto &reqs = pipeline->active_slots[slot][use.first.second].reqs;
locke-lunargd556cc32019-09-17 01:21:23 -06006111 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
locke-lunarg25b6c352020-08-06 17:44:18 -06006112 if (use.second.is_atomic_operation) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_VIEW_ATOMIC_OPERATION);
locke-lunarg12d20992020-09-21 12:46:49 -06006113 if (use.second.is_sampler_implicitLod_dref_proj) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_IMPLICITLOD_DREF_PROJ);
locke-lunargae2a43c2020-09-22 17:21:57 -06006114 if (use.second.is_sampler_bias_offset) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_BIAS_OFFSET);
locke-lunarg12d20992020-09-21 12:46:49 -06006115
John Zulauf649edd52019-10-02 14:39:41 -06006116 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunarg36045992020-08-20 16:54:37 -06006117 if (use.second.samplers_used_by_image.size()) {
locke-lunarg654a9052020-10-13 16:28:42 -06006118 auto &samplers_used_by_image = pipeline->active_slots[slot][use.first.second].samplers_used_by_image;
6119 if (use.second.samplers_used_by_image.size() > samplers_used_by_image.size()) {
6120 samplers_used_by_image.resize(use.second.samplers_used_by_image.size());
6121 }
locke-lunarg654a9052020-10-13 16:28:42 -06006122 uint32_t image_index = 0;
6123 for (const auto &samplers : use.second.samplers_used_by_image) {
6124 for (const auto &sampler : samplers) {
locke-lunargb8be8222020-10-20 00:34:37 -06006125 samplers_used_by_image[image_index].emplace(sampler, nullptr);
locke-lunarg654a9052020-10-13 16:28:42 -06006126 }
6127 ++image_index;
6128 }
locke-lunarg36045992020-08-20 16:54:37 -06006129 }
locke-lunargd556cc32019-09-17 01:21:23 -06006130 }
locke-lunarg78486832020-09-09 19:39:42 -06006131
locke-lunarg96dc9632020-06-10 17:22:18 -06006132 if (pStage->stage == VK_SHADER_STAGE_FRAGMENT_BIT) {
6133 pipeline->fragmentShader_writable_output_location_list = CollectWritableOutputLocationinFS(*module, *pStage);
6134 }
locke-lunargd556cc32019-09-17 01:21:23 -06006135}
6136
6137void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
6138 if (cb_state == nullptr) {
6139 return;
6140 }
6141
6142 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
6143 if (pipeline_layout_state == nullptr) {
6144 return;
6145 }
6146
6147 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
6148 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
6149 cb_state->push_constant_data.clear();
locke-lunargde3f0fa2020-09-10 11:55:31 -06006150 cb_state->push_constant_data_update.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06006151 uint32_t size_needed = 0;
6152 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
locke-lunargde3f0fa2020-09-10 11:55:31 -06006153 auto size = push_constant_range.offset + push_constant_range.size;
6154 size_needed = std::max(size_needed, size);
6155
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006156 auto stage_flags = push_constant_range.stageFlags;
locke-lunargde3f0fa2020-09-10 11:55:31 -06006157 uint32_t bit_shift = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006158 while (stage_flags) {
6159 if (stage_flags & 1) {
locke-lunargde3f0fa2020-09-10 11:55:31 -06006160 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
6161 const auto it = cb_state->push_constant_data_update.find(flag);
6162
6163 if (it != cb_state->push_constant_data_update.end()) {
6164 if (it->second.size() < push_constant_range.offset) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06006165 it->second.resize(push_constant_range.offset, PC_Byte_Not_Set);
locke-lunargde3f0fa2020-09-10 11:55:31 -06006166 }
6167 if (it->second.size() < size) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06006168 it->second.resize(size, PC_Byte_Not_Updated);
locke-lunargde3f0fa2020-09-10 11:55:31 -06006169 }
6170 } else {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06006171 std::vector<uint8_t> bytes;
6172 bytes.resize(push_constant_range.offset, PC_Byte_Not_Set);
6173 bytes.resize(size, PC_Byte_Not_Updated);
locke-lunargde3f0fa2020-09-10 11:55:31 -06006174 cb_state->push_constant_data_update[flag] = bytes;
6175 }
6176 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006177 stage_flags = stage_flags >> 1;
locke-lunargde3f0fa2020-09-10 11:55:31 -06006178 ++bit_shift;
6179 }
locke-lunargd556cc32019-09-17 01:21:23 -06006180 }
6181 cb_state->push_constant_data.resize(size_needed, 0);
6182 }
6183}
John Zulauf22b0fbe2019-10-15 06:26:16 -06006184
6185void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
6186 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
6187 VkResult result) {
6188 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
6189 auto swapchain_state = GetSwapchainState(swapchain);
6190
6191 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
6192
6193 if (pSwapchainImages) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06006194 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
locke-lunargb3584732019-10-28 20:18:36 -06006195 if (swapchain_state->images[i].image != VK_NULL_HANDLE) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06006196
6197 // Add imageMap entries for each swapchain image
6198 VkImageCreateInfo image_ci;
6199 image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -06006200 image_ci.pNext = nullptr; // to be set later
6201 image_ci.flags = 0; // to be updated below
John Zulauf22b0fbe2019-10-15 06:26:16 -06006202 image_ci.imageType = VK_IMAGE_TYPE_2D;
6203 image_ci.format = swapchain_state->createInfo.imageFormat;
6204 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
6205 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
6206 image_ci.extent.depth = 1;
6207 image_ci.mipLevels = 1;
6208 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
6209 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
6210 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
6211 image_ci.usage = swapchain_state->createInfo.imageUsage;
6212 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
6213 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
6214 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
6215 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
6216
Mike Schuchardt2df08912020-12-15 16:28:09 -08006217 image_ci.pNext = lvl_find_in_chain<VkImageFormatListCreateInfo>(swapchain_state->createInfo.pNext);
John Zulauf22b0fbe2019-10-15 06:26:16 -06006218
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006219 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06006220 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006221 }
6222 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06006223 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006224 }
6225 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08006226 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006227 }
John Zulauf22b0fbe2019-10-15 06:26:16 -06006228
locke-lunarg296a3c92020-03-25 01:04:29 -06006229 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(device, pSwapchainImages[i], &image_ci);
John Zulauf22b0fbe2019-10-15 06:26:16 -06006230 auto &image_state = imageMap[pSwapchainImages[i]];
6231 image_state->valid = false;
6232 image_state->create_from_swapchain = swapchain;
6233 image_state->bind_swapchain = swapchain;
6234 image_state->bind_swapchain_imageIndex = i;
Tony-LunarGe64e4fe2020-02-17 16:21:55 -07006235 image_state->is_swapchain_image = true;
locke-lunargb3584732019-10-28 20:18:36 -06006236 swapchain_state->images[i].image = pSwapchainImages[i];
6237 swapchain_state->images[i].bound_images.emplace(pSwapchainImages[i]);
Petr Kraus44f1c482020-04-25 20:09:25 +02006238
6239 AddImageStateProps(*image_state, device, physical_device);
John Zulauf22b0fbe2019-10-15 06:26:16 -06006240 }
6241 }
6242
6243 if (*pSwapchainImageCount) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06006244 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
6245 }
6246}
sourav parmar35e7a002020-06-09 17:58:44 -07006247
sourav parmar35e7a002020-06-09 17:58:44 -07006248void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer,
6249 const VkCopyAccelerationStructureInfoKHR *pInfo) {
6250 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6251 if (cb_state) {
sourav parmarcd5fb182020-07-17 12:58:44 -07006252 ACCELERATION_STRUCTURE_STATE_KHR *src_as_state = GetAccelerationStructureStateKHR(pInfo->src);
6253 ACCELERATION_STRUCTURE_STATE_KHR *dst_as_state = GetAccelerationStructureStateKHR(pInfo->dst);
sourav parmar35e7a002020-06-09 17:58:44 -07006254 if (dst_as_state != nullptr && src_as_state != nullptr) {
6255 dst_as_state->built = true;
6256 dst_as_state->build_info_khr = src_as_state->build_info_khr;
6257 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
6258 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
6259 }
6260 }
6261}
Piers Daniell39842ee2020-07-10 16:42:33 -06006262
6263void ValidationStateTracker::PreCallRecordCmdSetCullModeEXT(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) {
6264 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6265 cb_state->status |= CBSTATUS_CULL_MODE_SET;
6266 cb_state->static_status &= ~CBSTATUS_CULL_MODE_SET;
6267}
6268
6269void ValidationStateTracker::PreCallRecordCmdSetFrontFaceEXT(VkCommandBuffer commandBuffer, VkFrontFace frontFace) {
6270 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6271 cb_state->status |= CBSTATUS_FRONT_FACE_SET;
6272 cb_state->static_status &= ~CBSTATUS_FRONT_FACE_SET;
6273}
6274
6275void ValidationStateTracker::PreCallRecordCmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer,
6276 VkPrimitiveTopology primitiveTopology) {
6277 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6278 cb_state->primitiveTopology = primitiveTopology;
6279 cb_state->status |= CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
6280 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
6281}
6282
6283void ValidationStateTracker::PreCallRecordCmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer, uint32_t viewportCount,
6284 const VkViewport *pViewports) {
6285 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6286 cb_state->viewportWithCountMask |= (1u << viewportCount) - 1u;
Tobias Hector6663c9b2020-11-05 10:18:02 +00006287 cb_state->viewportWithCountCount = viewportCount;
Piers Daniell39842ee2020-07-10 16:42:33 -06006288 cb_state->status |= CBSTATUS_VIEWPORT_WITH_COUNT_SET;
6289 cb_state->static_status &= ~CBSTATUS_VIEWPORT_WITH_COUNT_SET;
6290}
6291
6292void ValidationStateTracker::PreCallRecordCmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer, uint32_t scissorCount,
6293 const VkRect2D *pScissors) {
6294 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6295 cb_state->scissorWithCountMask |= (1u << scissorCount) - 1u;
6296 cb_state->status |= CBSTATUS_SCISSOR_WITH_COUNT_SET;
6297 cb_state->static_status &= ~CBSTATUS_SCISSOR_WITH_COUNT_SET;
6298}
6299
6300void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer, uint32_t firstBinding,
6301 uint32_t bindingCount, const VkBuffer *pBuffers,
6302 const VkDeviceSize *pOffsets, const VkDeviceSize *pSizes,
6303 const VkDeviceSize *pStrides) {
6304 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6305 if (pStrides) {
6306 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
6307 cb_state->static_status &= ~CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
6308 }
6309
6310 uint32_t end = firstBinding + bindingCount;
6311 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
6312 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
6313 }
6314
6315 for (uint32_t i = 0; i < bindingCount; ++i) {
6316 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07006317 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
Piers Daniell39842ee2020-07-10 16:42:33 -06006318 vertex_buffer_binding.offset = pOffsets[i];
6319 vertex_buffer_binding.size = (pSizes) ? pSizes[i] : VK_WHOLE_SIZE;
6320 vertex_buffer_binding.stride = (pStrides) ? pStrides[i] : 0;
6321 // Add binding for this vertex buffer to this commandbuffer
6322 if (pBuffers[i]) {
locke-lunarg1ae57d62020-11-18 10:49:19 -07006323 AddCommandBufferBindingBuffer(cb_state, vertex_buffer_binding.buffer_state.get());
Piers Daniell39842ee2020-07-10 16:42:33 -06006324 }
6325 }
6326}
6327
6328void ValidationStateTracker::PreCallRecordCmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) {
6329 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6330 cb_state->status |= CBSTATUS_DEPTH_TEST_ENABLE_SET;
6331 cb_state->static_status &= ~CBSTATUS_DEPTH_TEST_ENABLE_SET;
6332}
6333
6334void ValidationStateTracker::PreCallRecordCmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) {
6335 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6336 cb_state->status |= CBSTATUS_DEPTH_WRITE_ENABLE_SET;
6337 cb_state->static_status &= ~CBSTATUS_DEPTH_WRITE_ENABLE_SET;
6338}
6339
6340void ValidationStateTracker::PreCallRecordCmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) {
6341 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6342 cb_state->status |= CBSTATUS_DEPTH_COMPARE_OP_SET;
6343 cb_state->static_status &= ~CBSTATUS_DEPTH_COMPARE_OP_SET;
6344}
6345
6346void ValidationStateTracker::PreCallRecordCmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer,
6347 VkBool32 depthBoundsTestEnable) {
6348 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6349 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
6350 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
6351}
6352void ValidationStateTracker::PreCallRecordCmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) {
6353 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6354 cb_state->status |= CBSTATUS_STENCIL_TEST_ENABLE_SET;
6355 cb_state->static_status &= ~CBSTATUS_STENCIL_TEST_ENABLE_SET;
6356}
6357
6358void ValidationStateTracker::PreCallRecordCmdSetStencilOpEXT(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
6359 VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp,
6360 VkCompareOp compareOp) {
6361 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6362 cb_state->status |= CBSTATUS_STENCIL_OP_SET;
6363 cb_state->static_status &= ~CBSTATUS_STENCIL_OP_SET;
6364}
locke-lunarg4189aa22020-10-21 00:23:48 -06006365
6366void ValidationStateTracker::PreCallRecordCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle,
6367 uint32_t discardRectangleCount,
6368 const VkRect2D *pDiscardRectangles) {
6369 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6370 cb_state->status |= CBSTATUS_DISCARD_RECTANGLE_SET;
6371 cb_state->static_status &= ~CBSTATUS_DISCARD_RECTANGLE_SET;
6372}
6373
6374void ValidationStateTracker::PreCallRecordCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,
6375 const VkSampleLocationsInfoEXT *pSampleLocationsInfo) {
6376 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6377 cb_state->status |= CBSTATUS_SAMPLE_LOCATIONS_SET;
6378 cb_state->static_status &= ~CBSTATUS_SAMPLE_LOCATIONS_SET;
6379}
6380
6381void ValidationStateTracker::PreCallRecordCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer,
6382 VkCoarseSampleOrderTypeNV sampleOrderType,
6383 uint32_t customSampleOrderCount,
6384 const VkCoarseSampleOrderCustomNV *pCustomSampleOrders) {
6385 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6386 cb_state->status |= CBSTATUS_COARSE_SAMPLE_ORDER_SET;
6387 cb_state->static_status &= ~CBSTATUS_COARSE_SAMPLE_ORDER_SET;
6388}