blob: d6a64cdafddceb987b322d63a6f5f8bab5990204 [file] [log] [blame]
sfricke-samsung486a51e2021-01-02 00:10:15 -08001/* Copyright (c) 2015-2021 The Khronos Group Inc.
2 * Copyright (c) 2015-2021 Valve Corporation
3 * Copyright (c) 2015-2021 LunarG, Inc.
4 * Copyright (C) 2015-2021 Google Inc.
Tobias Hector6663c9b2020-11-05 10:18:02 +00005 * Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved.
locke-lunargd556cc32019-09-17 01:21:23 -06006 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Mark Lobodzinski <mark@lunarg.com>
20 * Author: Dave Houlton <daveh@lunarg.com>
21 * Shannon McPherson <shannon@lunarg.com>
Tobias Hector6663c9b2020-11-05 10:18:02 +000022 * Author: Tobias Hector <tobias.hector@amd.com>
locke-lunargd556cc32019-09-17 01:21:23 -060023 */
24
locke-lunargd556cc32019-09-17 01:21:23 -060025#include <cmath>
locke-lunargd556cc32019-09-17 01:21:23 -060026
27#include "vk_enum_string_helper.h"
28#include "vk_format_utils.h"
29#include "vk_layer_data.h"
30#include "vk_layer_utils.h"
31#include "vk_layer_logging.h"
32#include "vk_typemap_helper.h"
33
34#include "chassis.h"
35#include "state_tracker.h"
36#include "shader_validation.h"
Jeremy Gebben74aa7622020-12-15 11:18:00 -070037#include "sync_utils.h"
locke-lunargd556cc32019-09-17 01:21:23 -060038
John Zulauf890b50b2020-06-17 15:18:19 -060039const char *CommandTypeString(CMD_TYPE type) {
40 // Autogenerated as part of the vk_validation_error_message.h codegen
Mark Lobodzinski677dc0b2020-11-12 15:28:09 -070041 return kGeneratedCommandNameList[type];
John Zulauf890b50b2020-06-17 15:18:19 -060042}
43
locke-lunarg4189aa22020-10-21 00:23:48 -060044VkDynamicState ConvertToDynamicState(CBStatusFlagBits flag) {
45 switch (flag) {
46 case CBSTATUS_LINE_WIDTH_SET:
47 return VK_DYNAMIC_STATE_LINE_WIDTH;
48 case CBSTATUS_DEPTH_BIAS_SET:
49 return VK_DYNAMIC_STATE_DEPTH_BIAS;
50 case CBSTATUS_BLEND_CONSTANTS_SET:
51 return VK_DYNAMIC_STATE_BLEND_CONSTANTS;
52 case CBSTATUS_DEPTH_BOUNDS_SET:
53 return VK_DYNAMIC_STATE_DEPTH_BOUNDS;
54 case CBSTATUS_STENCIL_READ_MASK_SET:
55 return VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK;
56 case CBSTATUS_STENCIL_WRITE_MASK_SET:
57 return VK_DYNAMIC_STATE_STENCIL_WRITE_MASK;
58 case CBSTATUS_STENCIL_REFERENCE_SET:
59 return VK_DYNAMIC_STATE_STENCIL_REFERENCE;
60 case CBSTATUS_VIEWPORT_SET:
61 return VK_DYNAMIC_STATE_VIEWPORT;
62 case CBSTATUS_SCISSOR_SET:
63 return VK_DYNAMIC_STATE_SCISSOR;
64 case CBSTATUS_EXCLUSIVE_SCISSOR_SET:
65 return VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV;
66 case CBSTATUS_SHADING_RATE_PALETTE_SET:
67 return VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV;
68 case CBSTATUS_LINE_STIPPLE_SET:
69 return VK_DYNAMIC_STATE_LINE_STIPPLE_EXT;
70 case CBSTATUS_VIEWPORT_W_SCALING_SET:
71 return VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV;
72 case CBSTATUS_CULL_MODE_SET:
73 return VK_DYNAMIC_STATE_CULL_MODE_EXT;
74 case CBSTATUS_FRONT_FACE_SET:
75 return VK_DYNAMIC_STATE_FRONT_FACE_EXT;
76 case CBSTATUS_PRIMITIVE_TOPOLOGY_SET:
77 return VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT;
78 case CBSTATUS_VIEWPORT_WITH_COUNT_SET:
79 return VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT;
80 case CBSTATUS_SCISSOR_WITH_COUNT_SET:
81 return VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT;
82 case CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET:
83 return VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT;
84 case CBSTATUS_DEPTH_TEST_ENABLE_SET:
85 return VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT;
86 case CBSTATUS_DEPTH_WRITE_ENABLE_SET:
87 return VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT;
88 case CBSTATUS_DEPTH_COMPARE_OP_SET:
89 return VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT;
90 case CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET:
91 return VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT;
92 case CBSTATUS_STENCIL_TEST_ENABLE_SET:
93 return VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT;
94 case CBSTATUS_STENCIL_OP_SET:
95 return VK_DYNAMIC_STATE_STENCIL_OP_EXT;
96 case CBSTATUS_DISCARD_RECTANGLE_SET:
97 return VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT;
98 case CBSTATUS_SAMPLE_LOCATIONS_SET:
99 return VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT;
100 case CBSTATUS_COARSE_SAMPLE_ORDER_SET:
101 return VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV;
102 default:
103 // CBSTATUS_INDEX_BUFFER_BOUND is not in VkDynamicState
104 return VK_DYNAMIC_STATE_MAX_ENUM;
105 }
106 return VK_DYNAMIC_STATE_MAX_ENUM;
107}
108
109CBStatusFlagBits ConvertToCBStatusFlagBits(VkDynamicState state) {
110 switch (state) {
111 case VK_DYNAMIC_STATE_VIEWPORT:
112 return CBSTATUS_VIEWPORT_SET;
113 case VK_DYNAMIC_STATE_SCISSOR:
114 return CBSTATUS_SCISSOR_SET;
115 case VK_DYNAMIC_STATE_LINE_WIDTH:
116 return CBSTATUS_LINE_WIDTH_SET;
117 case VK_DYNAMIC_STATE_DEPTH_BIAS:
118 return CBSTATUS_DEPTH_BIAS_SET;
119 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
120 return CBSTATUS_BLEND_CONSTANTS_SET;
121 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
122 return CBSTATUS_DEPTH_BOUNDS_SET;
123 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
124 return CBSTATUS_STENCIL_READ_MASK_SET;
125 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
126 return CBSTATUS_STENCIL_WRITE_MASK_SET;
127 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
128 return CBSTATUS_STENCIL_REFERENCE_SET;
129 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
130 return CBSTATUS_VIEWPORT_W_SCALING_SET;
131 case VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT:
132 return CBSTATUS_DISCARD_RECTANGLE_SET;
133 case VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT:
134 return CBSTATUS_SAMPLE_LOCATIONS_SET;
135 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
136 return CBSTATUS_SHADING_RATE_PALETTE_SET;
137 case VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV:
138 return CBSTATUS_COARSE_SAMPLE_ORDER_SET;
139 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
140 return CBSTATUS_EXCLUSIVE_SCISSOR_SET;
141 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
142 return CBSTATUS_LINE_STIPPLE_SET;
143 case VK_DYNAMIC_STATE_CULL_MODE_EXT:
144 return CBSTATUS_CULL_MODE_SET;
145 case VK_DYNAMIC_STATE_FRONT_FACE_EXT:
146 return CBSTATUS_FRONT_FACE_SET;
147 case VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT:
148 return CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
149 case VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT:
150 return CBSTATUS_VIEWPORT_WITH_COUNT_SET;
151 case VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT:
152 return CBSTATUS_SCISSOR_WITH_COUNT_SET;
153 case VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT:
154 return CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
155 case VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT:
156 return CBSTATUS_DEPTH_TEST_ENABLE_SET;
157 case VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT:
158 return CBSTATUS_DEPTH_WRITE_ENABLE_SET;
159 case VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT:
160 return CBSTATUS_DEPTH_COMPARE_OP_SET;
161 case VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT:
162 return CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
163 case VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT:
164 return CBSTATUS_STENCIL_TEST_ENABLE_SET;
165 case VK_DYNAMIC_STATE_STENCIL_OP_EXT:
166 return CBSTATUS_STENCIL_OP_SET;
167 default:
168 return CBSTATUS_NONE;
169 }
170 return CBSTATUS_NONE;
171}
172
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -0600173void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
174 if (add_obj) {
175 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
176 // Call base class
177 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
178 }
179}
180
John Zulauf5c5e88d2019-12-26 11:22:02 -0700181uint32_t ResolveRemainingLevels(const VkImageSubresourceRange *range, uint32_t mip_levels) {
182 // Return correct number of mip levels taking into account VK_REMAINING_MIP_LEVELS
183 uint32_t mip_level_count = range->levelCount;
184 if (range->levelCount == VK_REMAINING_MIP_LEVELS) {
185 mip_level_count = mip_levels - range->baseMipLevel;
186 }
187 return mip_level_count;
188}
189
190uint32_t ResolveRemainingLayers(const VkImageSubresourceRange *range, uint32_t layers) {
191 // Return correct number of layers taking into account VK_REMAINING_ARRAY_LAYERS
192 uint32_t array_layer_count = range->layerCount;
193 if (range->layerCount == VK_REMAINING_ARRAY_LAYERS) {
194 array_layer_count = layers - range->baseArrayLayer;
195 }
196 return array_layer_count;
197}
198
199VkImageSubresourceRange NormalizeSubresourceRange(const VkImageCreateInfo &image_create_info,
200 const VkImageSubresourceRange &range) {
201 VkImageSubresourceRange norm = range;
202 norm.levelCount = ResolveRemainingLevels(&range, image_create_info.mipLevels);
203
Mike Schuchardt2df08912020-12-15 16:28:09 -0800204 // Special case for 3D images with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT flag bit, where <extent.depth> and
John Zulauf5c5e88d2019-12-26 11:22:02 -0700205 // <arrayLayers> can potentially alias.
Mike Schuchardt2df08912020-12-15 16:28:09 -0800206 uint32_t layer_limit = (0 != (image_create_info.flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT))
John Zulauf5c5e88d2019-12-26 11:22:02 -0700207 ? image_create_info.extent.depth
208 : image_create_info.arrayLayers;
209 norm.layerCount = ResolveRemainingLayers(&range, layer_limit);
210
211 // For multiplanar formats, IMAGE_ASPECT_COLOR is equivalent to adding the aspect of the individual planes
212 VkImageAspectFlags &aspect_mask = norm.aspectMask;
213 if (FormatIsMultiplane(image_create_info.format)) {
214 if (aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) {
215 aspect_mask &= ~VK_IMAGE_ASPECT_COLOR_BIT;
216 aspect_mask |= (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT);
217 if (FormatPlaneCount(image_create_info.format) > 2) {
218 aspect_mask |= VK_IMAGE_ASPECT_PLANE_2_BIT;
219 }
220 }
221 }
222 return norm;
223}
224
225VkImageSubresourceRange NormalizeSubresourceRange(const IMAGE_STATE &image_state, const VkImageSubresourceRange &range) {
226 const VkImageCreateInfo &image_create_info = image_state.createInfo;
227 return NormalizeSubresourceRange(image_create_info, range);
228}
229
John Zulauf2bc1fde2020-04-24 15:09:51 -0600230// NOTE: Beware the lifespan of the rp_begin when holding the return. If the rp_begin isn't a "safe" copy, "IMAGELESS"
231// attachments won't persist past the API entry point exit.
232std::pair<uint32_t, const VkImageView *> GetFramebufferAttachments(const VkRenderPassBeginInfo &rp_begin,
233 const FRAMEBUFFER_STATE &fb_state) {
234 const VkImageView *attachments = fb_state.createInfo.pAttachments;
235 uint32_t count = fb_state.createInfo.attachmentCount;
236 if (fb_state.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700237 const auto *framebuffer_attachments = LvlFindInChain<VkRenderPassAttachmentBeginInfo>(rp_begin.pNext);
John Zulauf2bc1fde2020-04-24 15:09:51 -0600238 if (framebuffer_attachments) {
239 attachments = framebuffer_attachments->pAttachments;
240 count = framebuffer_attachments->attachmentCount;
241 }
242 }
243 return std::make_pair(count, attachments);
244}
245
John Zulauf64ffe552021-02-06 10:25:07 -0700246template <typename ImageViewPointer, typename Get>
247std::vector<ImageViewPointer> GetAttachmentViewsImpl(const VkRenderPassBeginInfo &rp_begin, const FRAMEBUFFER_STATE &fb_state,
248 const Get &get_fn) {
249 std::vector<ImageViewPointer> views;
John Zulauf2bc1fde2020-04-24 15:09:51 -0600250
251 const auto count_attachment = GetFramebufferAttachments(rp_begin, fb_state);
252 const auto attachment_count = count_attachment.first;
253 const auto *attachments = count_attachment.second;
254 views.resize(attachment_count, nullptr);
255 for (uint32_t i = 0; i < attachment_count; i++) {
256 if (attachments[i] != VK_NULL_HANDLE) {
John Zulauf64ffe552021-02-06 10:25:07 -0700257 views[i] = get_fn(attachments[i]);
John Zulauf2bc1fde2020-04-24 15:09:51 -0600258 }
259 }
260 return views;
261}
262
John Zulauf64ffe552021-02-06 10:25:07 -0700263std::vector<const IMAGE_VIEW_STATE *> ValidationStateTracker::GetAttachmentViews(const VkRenderPassBeginInfo &rp_begin,
264 const FRAMEBUFFER_STATE &fb_state) const {
265 auto get_fn = [this](VkImageView handle) { return this->Get<IMAGE_VIEW_STATE>(handle); };
266 return GetAttachmentViewsImpl<const IMAGE_VIEW_STATE *>(rp_begin, fb_state, get_fn);
267}
268
269std::vector<std::shared_ptr<const IMAGE_VIEW_STATE>> ValidationStateTracker::GetSharedAttachmentViews(
270 const VkRenderPassBeginInfo &rp_begin, const FRAMEBUFFER_STATE &fb_state) const {
271 auto get_fn = [this](VkImageView handle) { return this->GetShared<IMAGE_VIEW_STATE>(handle); };
272 return GetAttachmentViewsImpl<std::shared_ptr<const IMAGE_VIEW_STATE>>(rp_begin, fb_state, get_fn);
273}
274
John Zulauf2bc1fde2020-04-24 15:09:51 -0600275std::vector<const IMAGE_VIEW_STATE *> ValidationStateTracker::GetCurrentAttachmentViews(const CMD_BUFFER_STATE &cb_state) const {
276 // Only valid *after* RecordBeginRenderPass and *before* RecordEndRenderpass as it relies on cb_state for the renderpass info.
277 std::vector<const IMAGE_VIEW_STATE *> views;
278
locke-lunargaecf2152020-05-12 17:15:41 -0600279 const auto *rp_state = cb_state.activeRenderPass.get();
John Zulauf2bc1fde2020-04-24 15:09:51 -0600280 if (!rp_state) return views;
281 const auto &rp_begin = *cb_state.activeRenderPassBeginInfo.ptr();
282 const auto *fb_state = Get<FRAMEBUFFER_STATE>(rp_begin.framebuffer);
283 if (!fb_state) return views;
284
285 return GetAttachmentViews(rp_begin, *fb_state);
286}
287
locke-lunarg3e127c72020-06-09 17:45:28 -0600288PIPELINE_STATE *GetCurrentPipelineFromCommandBuffer(const CMD_BUFFER_STATE &cmd, VkPipelineBindPoint pipelineBindPoint) {
locke-lunargb8d7a7a2020-10-25 16:01:52 -0600289 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
290 return cmd.lastBound[lv_bind_point].pipeline_state;
locke-lunarg3e127c72020-06-09 17:45:28 -0600291}
292
293void GetCurrentPipelineAndDesriptorSetsFromCommandBuffer(const CMD_BUFFER_STATE &cmd, VkPipelineBindPoint pipelineBindPoint,
294 const PIPELINE_STATE **rtn_pipe,
295 const std::vector<LAST_BOUND_STATE::PER_SET> **rtn_sets) {
locke-lunargb8d7a7a2020-10-25 16:01:52 -0600296 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
297 const auto &last_bound_it = cmd.lastBound[lv_bind_point];
298 if (!last_bound_it.IsUsing()) {
locke-lunarg3e127c72020-06-09 17:45:28 -0600299 return;
300 }
locke-lunargb8d7a7a2020-10-25 16:01:52 -0600301 *rtn_pipe = last_bound_it.pipeline_state;
302 *rtn_sets = &(last_bound_it.per_set);
locke-lunarg3e127c72020-06-09 17:45:28 -0600303}
304
locke-lunargd556cc32019-09-17 01:21:23 -0600305#ifdef VK_USE_PLATFORM_ANDROID_KHR
306// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
307// This could also move into a seperate core_validation_android.cpp file... ?
308
309void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700310 const VkExternalMemoryImageCreateInfo *emici = LvlFindInChain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600311 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
Spencer Fricke224c9852020-04-06 07:47:29 -0700312 is_node->external_ahb = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600313 }
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700314 const VkExternalFormatANDROID *ext_fmt_android = LvlFindInChain<VkExternalFormatANDROID>(create_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600315 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
316 is_node->has_ahb_format = true;
317 is_node->ahb_format = ext_fmt_android->externalFormat;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700318 // VUID 01894 will catch if not found in map
319 auto it = ahb_ext_formats_map.find(ext_fmt_android->externalFormat);
320 if (it != ahb_ext_formats_map.end()) {
321 is_node->format_features = it->second;
322 }
locke-lunargd556cc32019-09-17 01:21:23 -0600323 }
324}
325
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700326void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700327 const VkExternalMemoryBufferCreateInfo *embci = LvlFindInChain<VkExternalMemoryBufferCreateInfo>(create_info->pNext);
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700328 if (embci && (embci->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
329 bs_node->external_ahb = true;
330 }
331}
332
locke-lunargd556cc32019-09-17 01:21:23 -0600333void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700334 VkSamplerYcbcrConversion ycbcr_conversion,
335 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700336 const VkExternalFormatANDROID *ext_format_android = LvlFindInChain<VkExternalFormatANDROID>(create_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600337 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
338 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700339 // VUID 01894 will catch if not found in map
340 auto it = ahb_ext_formats_map.find(ext_format_android->externalFormat);
341 if (it != ahb_ext_formats_map.end()) {
342 ycbcr_state->format_features = it->second;
343 }
locke-lunargd556cc32019-09-17 01:21:23 -0600344 }
345};
346
347void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
348 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
349};
350
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700351void ValidationStateTracker::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
352 VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties, VkResult result) {
353 if (VK_SUCCESS != result) return;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700354 auto ahb_format_props = LvlFindInChain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700355 if (ahb_format_props) {
Jeremy Gebbenfc6f8152021-03-18 16:58:55 -0600356 ahb_ext_formats_map.emplace(ahb_format_props->externalFormat, ahb_format_props->formatFeatures);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700357 }
358}
359
locke-lunargd556cc32019-09-17 01:21:23 -0600360#else
361
362void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
363
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700364void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {}
365
locke-lunargd556cc32019-09-17 01:21:23 -0600366void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700367 VkSamplerYcbcrConversion ycbcr_conversion,
368 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state){};
locke-lunargd556cc32019-09-17 01:21:23 -0600369
370void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
371
372#endif // VK_USE_PLATFORM_ANDROID_KHR
373
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -0600374std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPipelineLayout(PIPELINE_LAYOUT_STATE const *layout_data,
375 uint32_t set) {
376 std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> dsl = nullptr;
377 if (layout_data && (set < layout_data->set_layouts.size())) {
378 dsl = layout_data->set_layouts[set];
379 }
380 return dsl;
381}
382
Petr Kraus44f1c482020-04-25 20:09:25 +0200383void AddImageStateProps(IMAGE_STATE &image_state, const VkDevice device, const VkPhysicalDevice physical_device) {
384 // Add feature support according to Image Format Features (vkspec.html#resources-image-format-features)
385 // if format is AHB external format then the features are already set
386 if (image_state.has_ahb_format == false) {
387 const VkImageTiling image_tiling = image_state.createInfo.tiling;
388 const VkFormat image_format = image_state.createInfo.format;
389 if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
390 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {
391 VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, nullptr};
392 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state.image, &drm_format_properties);
393
394 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
395 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
396 nullptr};
397 format_properties_2.pNext = (void *)&drm_properties_list;
398 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Lionel Landwerlin09351a72020-06-22 18:15:59 +0300399 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
400 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
401 drm_properties_list.pDrmFormatModifierProperties = &drm_properties[0];
402 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Petr Kraus44f1c482020-04-25 20:09:25 +0200403
404 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300405 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier ==
406 drm_format_properties.drmFormatModifier) {
407 image_state.format_features =
Petr Kraus44f1c482020-04-25 20:09:25 +0200408 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300409 break;
Petr Kraus44f1c482020-04-25 20:09:25 +0200410 }
411 }
412 } else {
413 VkFormatProperties format_properties;
414 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_format, &format_properties);
415 image_state.format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
416 : format_properties.optimalTilingFeatures;
417 }
418 }
419}
420
locke-lunargd556cc32019-09-17 01:21:23 -0600421void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
422 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
423 if (VK_SUCCESS != result) return;
locke-lunarg296a3c92020-03-25 01:04:29 -0600424 auto is_node = std::make_shared<IMAGE_STATE>(device, *pImage, pCreateInfo);
sfricke-samsung71bc6572020-04-29 15:49:43 -0700425 is_node->disjoint = ((pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT) != 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600426 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
427 RecordCreateImageANDROID(pCreateInfo, is_node.get());
428 }
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700429 const auto swapchain_info = LvlFindInChain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600430 if (swapchain_info) {
431 is_node->create_from_swapchain = swapchain_info->swapchain;
432 }
433
locke-lunargd556cc32019-09-17 01:21:23 -0600434 // Record the memory requirements in case they won't be queried
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700435 // External AHB memory can't be queried until after memory is bound
Spencer Fricke224c9852020-04-06 07:47:29 -0700436 if (is_node->external_ahb == false) {
sfricke-samsung71bc6572020-04-29 15:49:43 -0700437 if (is_node->disjoint == false) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700438 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
439 } else {
440 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
441 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
442 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
443 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
444 mem_req_info2.pNext = &image_plane_req;
445 mem_req_info2.image = *pImage;
446
447 assert(plane_count != 0); // assumes each format has at least first plane
448 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
449 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
450 is_node->plane0_requirements = mem_reqs2.memoryRequirements;
451
452 if (plane_count >= 2) {
453 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
454 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
455 is_node->plane1_requirements = mem_reqs2.memoryRequirements;
456 }
457 if (plane_count >= 3) {
458 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
459 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
460 is_node->plane2_requirements = mem_reqs2.memoryRequirements;
461 }
462 }
locke-lunargd556cc32019-09-17 01:21:23 -0600463 }
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700464
Petr Kraus44f1c482020-04-25 20:09:25 +0200465 AddImageStateProps(*is_node, device, physical_device);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700466
sfricke-samsungedce77a2020-07-03 22:35:13 -0700467 is_node->unprotected = ((pCreateInfo->flags & VK_IMAGE_CREATE_PROTECTED_BIT) == 0);
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700468 imageMap.emplace(*pImage, std::move(is_node));
locke-lunargd556cc32019-09-17 01:21:23 -0600469}
470
471void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
472 if (!image) return;
473 IMAGE_STATE *image_state = GetImageState(image);
474 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
475 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
476 // Clean up memory mapping, bindings and range references for image
John Zulauf79f06582021-02-27 18:38:39 -0700477 for (auto *mem_binding : image_state->GetBoundMemory()) {
John Zulaufd13b38e2021-03-05 08:17:38 -0700478 RemoveImageMemoryRange(image_state, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600479 }
480 if (image_state->bind_swapchain) {
481 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
482 if (swapchain) {
John Zulaufd13b38e2021-03-05 08:17:38 -0700483 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600484 }
485 }
486 RemoveAliasingImage(image_state);
487 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500488 image_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600489 // Remove image from imageMap
490 imageMap.erase(image);
491}
492
493void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
494 VkImageLayout imageLayout, const VkClearColorValue *pColor,
495 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
496 auto cb_node = GetCBState(commandBuffer);
497 auto image_state = GetImageState(image);
498 if (cb_node && image_state) {
499 AddCommandBufferBindingImage(cb_node, image_state);
500 }
501}
502
503void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
504 VkImageLayout imageLayout,
505 const VkClearDepthStencilValue *pDepthStencil,
506 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
507 auto cb_node = GetCBState(commandBuffer);
508 auto image_state = GetImageState(image);
509 if (cb_node && image_state) {
510 AddCommandBufferBindingImage(cb_node, image_state);
511 }
512}
513
514void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
515 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
516 uint32_t regionCount, const VkImageCopy *pRegions) {
517 auto cb_node = GetCBState(commandBuffer);
518 auto src_image_state = GetImageState(srcImage);
519 auto dst_image_state = GetImageState(dstImage);
520
521 // Update bindings between images and cmd buffer
522 AddCommandBufferBindingImage(cb_node, src_image_state);
523 AddCommandBufferBindingImage(cb_node, dst_image_state);
524}
525
Jeff Leger178b1e52020-10-05 12:22:23 -0400526void ValidationStateTracker::PreCallRecordCmdCopyImage2KHR(VkCommandBuffer commandBuffer,
527 const VkCopyImageInfo2KHR *pCopyImageInfo) {
528 auto cb_node = GetCBState(commandBuffer);
529 auto src_image_state = GetImageState(pCopyImageInfo->srcImage);
530 auto dst_image_state = GetImageState(pCopyImageInfo->dstImage);
531
532 // Update bindings between images and cmd buffer
533 AddCommandBufferBindingImage(cb_node, src_image_state);
534 AddCommandBufferBindingImage(cb_node, dst_image_state);
535}
536
locke-lunargd556cc32019-09-17 01:21:23 -0600537void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
538 VkImageLayout srcImageLayout, VkImage dstImage,
539 VkImageLayout dstImageLayout, uint32_t regionCount,
540 const VkImageResolve *pRegions) {
541 auto cb_node = GetCBState(commandBuffer);
542 auto src_image_state = GetImageState(srcImage);
543 auto dst_image_state = GetImageState(dstImage);
544
545 // Update bindings between images and cmd buffer
546 AddCommandBufferBindingImage(cb_node, src_image_state);
547 AddCommandBufferBindingImage(cb_node, dst_image_state);
548}
549
Jeff Leger178b1e52020-10-05 12:22:23 -0400550void ValidationStateTracker::PreCallRecordCmdResolveImage2KHR(VkCommandBuffer commandBuffer,
551 const VkResolveImageInfo2KHR *pResolveImageInfo) {
552 auto cb_node = GetCBState(commandBuffer);
553 auto src_image_state = GetImageState(pResolveImageInfo->srcImage);
554 auto dst_image_state = GetImageState(pResolveImageInfo->dstImage);
555
556 // Update bindings between images and cmd buffer
557 AddCommandBufferBindingImage(cb_node, src_image_state);
558 AddCommandBufferBindingImage(cb_node, dst_image_state);
559}
560
locke-lunargd556cc32019-09-17 01:21:23 -0600561void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
562 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
563 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
564 auto cb_node = GetCBState(commandBuffer);
565 auto src_image_state = GetImageState(srcImage);
566 auto dst_image_state = GetImageState(dstImage);
567
568 // Update bindings between images and cmd buffer
569 AddCommandBufferBindingImage(cb_node, src_image_state);
570 AddCommandBufferBindingImage(cb_node, dst_image_state);
571}
572
Jeff Leger178b1e52020-10-05 12:22:23 -0400573void ValidationStateTracker::PreCallRecordCmdBlitImage2KHR(VkCommandBuffer commandBuffer,
574 const VkBlitImageInfo2KHR *pBlitImageInfo) {
575 auto cb_node = GetCBState(commandBuffer);
576 auto src_image_state = GetImageState(pBlitImageInfo->srcImage);
577 auto dst_image_state = GetImageState(pBlitImageInfo->dstImage);
578
579 // Update bindings between images and cmd buffer
580 AddCommandBufferBindingImage(cb_node, src_image_state);
581 AddCommandBufferBindingImage(cb_node, dst_image_state);
582}
583
locke-lunargd556cc32019-09-17 01:21:23 -0600584void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
585 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
586 VkResult result) {
587 if (result != VK_SUCCESS) return;
588 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500589 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600590
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700591 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
592 RecordCreateBufferANDROID(pCreateInfo, buffer_state.get());
593 }
locke-lunargd556cc32019-09-17 01:21:23 -0600594 // Get a set of requirements in the case the app does not
sfricke-samsungad90e722020-07-08 20:54:24 -0700595 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
locke-lunargd556cc32019-09-17 01:21:23 -0600596
sfricke-samsungedce77a2020-07-03 22:35:13 -0700597 buffer_state->unprotected = ((pCreateInfo->flags & VK_BUFFER_CREATE_PROTECTED_BIT) == 0);
598
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700599 bufferMap.emplace(*pBuffer, std::move(buffer_state));
locke-lunargd556cc32019-09-17 01:21:23 -0600600}
601
602void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
603 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
604 VkResult result) {
605 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500606 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
locke-lunarg25b6c352020-08-06 17:44:18 -0600607 auto buffer_view_state = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
608
609 VkFormatProperties format_properties;
610 DispatchGetPhysicalDeviceFormatProperties(physical_device, pCreateInfo->format, &format_properties);
611 buffer_view_state->format_features = format_properties.bufferFeatures;
612
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700613 bufferViewMap.emplace(*pView, std::move(buffer_view_state));
locke-lunargd556cc32019-09-17 01:21:23 -0600614}
615
616void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
617 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
618 VkResult result) {
619 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500620 auto image_state = GetImageShared(pCreateInfo->image);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700621 auto image_view_state = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
622
623 // Add feature support according to Image View Format Features (vkspec.html#resources-image-view-format-features)
624 const VkImageTiling image_tiling = image_state->createInfo.tiling;
625 const VkFormat image_view_format = pCreateInfo->format;
626 if (image_state->has_ahb_format == true) {
627 // The ImageView uses same Image's format feature since they share same AHB
628 image_view_state->format_features = image_state->format_features;
629 } else if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
630 // Parameter validation should catch if this is used without VK_EXT_image_drm_format_modifier
631 assert(device_extensions.vk_ext_image_drm_format_modifier);
632 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
633 nullptr};
634 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state->image, &drm_format_properties);
635
636 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
637 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
638 nullptr};
639 format_properties_2.pNext = (void *)&drm_properties_list;
nyorain38a9d232021-03-06 13:06:12 +0100640
641 // First call is to get the number of modifiers compatible with the queried format
642 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
643
644 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
645 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
646 drm_properties_list.pDrmFormatModifierProperties = drm_properties.data();
647
648 // Second call, now with an allocated array in pDrmFormatModifierProperties, is to get the modifiers
649 // compatible with the queried format
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700650 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
651
652 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300653 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier == drm_format_properties.drmFormatModifier) {
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700654 image_view_state->format_features |=
655 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300656 break;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700657 }
658 }
659 } else {
660 VkFormatProperties format_properties;
661 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_view_format, &format_properties);
662 image_view_state->format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
663 : format_properties.optimalTilingFeatures;
664 }
665
sfricke-samsungd23f6e62021-01-17 09:05:47 -0800666 auto usage_create_info = LvlFindInChain<VkImageViewUsageCreateInfo>(pCreateInfo->pNext);
667 image_view_state->inherited_usage = (usage_create_info) ? usage_create_info->usage : image_state->createInfo.usage;
668
locke-lunarg9939d4b2020-10-26 20:11:08 -0600669 // filter_cubic_props is used in CmdDraw validation. But it takes a lot of performance if it does in CmdDraw.
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700670 image_view_state->filter_cubic_props = LvlInitStruct<VkFilterCubicImageViewImageFormatPropertiesEXT>();
locke-lunarg9939d4b2020-10-26 20:11:08 -0600671 if (IsExtEnabled(device_extensions.vk_ext_filter_cubic)) {
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700672 auto imageview_format_info = LvlInitStruct<VkPhysicalDeviceImageViewImageFormatInfoEXT>();
locke-lunarg9939d4b2020-10-26 20:11:08 -0600673 imageview_format_info.imageViewType = pCreateInfo->viewType;
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700674 auto image_format_info = LvlInitStruct<VkPhysicalDeviceImageFormatInfo2>(&imageview_format_info);
locke-lunarg9939d4b2020-10-26 20:11:08 -0600675 image_format_info.type = image_state->createInfo.imageType;
676 image_format_info.format = image_state->createInfo.format;
677 image_format_info.tiling = image_state->createInfo.tiling;
sfricke-samsungd23f6e62021-01-17 09:05:47 -0800678 image_format_info.usage = image_view_state->inherited_usage;
locke-lunarg9939d4b2020-10-26 20:11:08 -0600679 image_format_info.flags = image_state->createInfo.flags;
680
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700681 auto image_format_properties = LvlInitStruct<VkImageFormatProperties2>(&image_view_state->filter_cubic_props);
locke-lunarg9939d4b2020-10-26 20:11:08 -0600682
683 DispatchGetPhysicalDeviceImageFormatProperties2(physical_device, &image_format_info, &image_format_properties);
684 }
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700685 imageViewMap.emplace(*pView, std::move(image_view_state));
locke-lunargd556cc32019-09-17 01:21:23 -0600686}
687
688void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
689 uint32_t regionCount, const VkBufferCopy *pRegions) {
690 auto cb_node = GetCBState(commandBuffer);
691 auto src_buffer_state = GetBufferState(srcBuffer);
692 auto dst_buffer_state = GetBufferState(dstBuffer);
693
694 // Update bindings between buffers and cmd buffer
695 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
696 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
697}
698
Jeff Leger178b1e52020-10-05 12:22:23 -0400699void ValidationStateTracker::PreCallRecordCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer,
700 const VkCopyBufferInfo2KHR *pCopyBufferInfos) {
701 auto cb_node = GetCBState(commandBuffer);
702 auto src_buffer_state = GetBufferState(pCopyBufferInfos->srcBuffer);
703 auto dst_buffer_state = GetBufferState(pCopyBufferInfos->dstBuffer);
704
705 // Update bindings between buffers and cmd buffer
706 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
707 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
708}
709
locke-lunargd556cc32019-09-17 01:21:23 -0600710void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
711 const VkAllocationCallbacks *pAllocator) {
712 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
713 if (!image_view_state) return;
714 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
715
716 // Any bound cmd buffers are now invalid
717 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500718 image_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600719 imageViewMap.erase(imageView);
720}
721
722void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
723 if (!buffer) return;
724 auto buffer_state = GetBufferState(buffer);
725 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
726
727 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
locke-lunargd556cc32019-09-17 01:21:23 -0600728 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500729 buffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600730 bufferMap.erase(buffer_state->buffer);
731}
732
733void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
734 const VkAllocationCallbacks *pAllocator) {
735 if (!bufferView) return;
736 auto buffer_view_state = GetBufferViewState(bufferView);
737 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
738
739 // Any bound cmd buffers are now invalid
740 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500741 buffer_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600742 bufferViewMap.erase(bufferView);
743}
744
745void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
746 VkDeviceSize size, uint32_t data) {
747 auto cb_node = GetCBState(commandBuffer);
748 auto buffer_state = GetBufferState(dstBuffer);
749 // Update bindings between buffer and cmd buffer
750 AddCommandBufferBindingBuffer(cb_node, buffer_state);
751}
752
753void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
754 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
755 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
756 auto cb_node = GetCBState(commandBuffer);
757 auto src_image_state = GetImageState(srcImage);
758 auto dst_buffer_state = GetBufferState(dstBuffer);
759
760 // Update bindings between buffer/image and cmd buffer
761 AddCommandBufferBindingImage(cb_node, src_image_state);
762 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
763}
764
Jeff Leger178b1e52020-10-05 12:22:23 -0400765void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
766 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) {
767 auto cb_node = GetCBState(commandBuffer);
768 auto src_image_state = GetImageState(pCopyImageToBufferInfo->srcImage);
769 auto dst_buffer_state = GetBufferState(pCopyImageToBufferInfo->dstBuffer);
770
771 // Update bindings between buffer/image and cmd buffer
772 AddCommandBufferBindingImage(cb_node, src_image_state);
773 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
774}
775
locke-lunargd556cc32019-09-17 01:21:23 -0600776void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
777 VkImageLayout dstImageLayout, uint32_t regionCount,
778 const VkBufferImageCopy *pRegions) {
779 auto cb_node = GetCBState(commandBuffer);
780 auto src_buffer_state = GetBufferState(srcBuffer);
781 auto dst_image_state = GetImageState(dstImage);
782
783 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
784 AddCommandBufferBindingImage(cb_node, dst_image_state);
785}
786
Jeff Leger178b1e52020-10-05 12:22:23 -0400787void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
788 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) {
789 auto cb_node = GetCBState(commandBuffer);
790 auto src_buffer_state = GetBufferState(pCopyBufferToImageInfo->srcBuffer);
791 auto dst_image_state = GetImageState(pCopyBufferToImageInfo->dstImage);
792
793 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
794 AddCommandBufferBindingImage(cb_node, dst_image_state);
795}
796
locke-lunargd556cc32019-09-17 01:21:23 -0600797// Get the image viewstate for a given framebuffer attachment
locke-lunargfc78e932020-11-19 17:06:24 -0700798IMAGE_VIEW_STATE *ValidationStateTracker::GetActiveAttachmentImageViewState(const CMD_BUFFER_STATE *cb, uint32_t index,
799 const CMD_BUFFER_STATE *primary_cb) {
800 if (primary_cb) {
801 assert(primary_cb->active_attachments && index != VK_ATTACHMENT_UNUSED && (index < primary_cb->active_attachments->size()));
802 return primary_cb->active_attachments->at(index);
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300803 }
locke-lunargfc78e932020-11-19 17:06:24 -0700804 assert(cb->active_attachments && index != VK_ATTACHMENT_UNUSED && (index < cb->active_attachments->size()));
805 return cb->active_attachments->at(index);
locke-lunargd556cc32019-09-17 01:21:23 -0600806}
807
808// Get the image viewstate for a given framebuffer attachment
locke-lunargfc78e932020-11-19 17:06:24 -0700809const IMAGE_VIEW_STATE *ValidationStateTracker::GetActiveAttachmentImageViewState(const CMD_BUFFER_STATE *cb, uint32_t index,
810 const CMD_BUFFER_STATE *primary_cb) const {
811 if (primary_cb) {
812 assert(primary_cb->active_attachments && index != VK_ATTACHMENT_UNUSED && (index < primary_cb->active_attachments->size()));
813 return primary_cb->active_attachments->at(index);
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300814 }
locke-lunargfc78e932020-11-19 17:06:24 -0700815 assert(cb->active_attachments && index != VK_ATTACHMENT_UNUSED && (index < cb->active_attachments->size()));
816 return cb->active_attachments->at(index);
locke-lunargd556cc32019-09-17 01:21:23 -0600817}
818
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700819void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state, layer_data::unordered_set<IMAGE_STATE *> *bound_images) {
John Zulaufd13b38e2021-03-05 08:17:38 -0700820 assert(bound_images);
821 for (auto *bound_image : *bound_images) {
822 if (bound_image && (bound_image != image_state) && bound_image->IsCompatibleAliasing(image_state)) {
823 auto inserted = bound_image->aliasing_images.emplace(image_state);
824 if (inserted.second) {
825 image_state->aliasing_images.emplace(bound_image);
locke-lunargd556cc32019-09-17 01:21:23 -0600826 }
827 }
828 }
829}
830
831void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
John Zulaufd13b38e2021-03-05 08:17:38 -0700832 for (auto *alias_state : image_state->aliasing_images) {
833 assert(alias_state);
834 alias_state->aliasing_images.erase(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600835 }
836 image_state->aliasing_images.clear();
837}
838
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700839void ValidationStateTracker::RemoveAliasingImages(const layer_data::unordered_set<IMAGE_STATE *> &bound_images) {
locke-lunargd556cc32019-09-17 01:21:23 -0600840 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
841 // reference. It doesn't need two ways clear.
John Zulaufd13b38e2021-03-05 08:17:38 -0700842 for (auto *bound_image : bound_images) {
843 if (bound_image) {
844 bound_image->aliasing_images.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600845 }
846 }
847}
848
locke-lunargd556cc32019-09-17 01:21:23 -0600849const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
850 auto it = queueMap.find(queue);
851 if (it == queueMap.cend()) {
852 return nullptr;
853 }
854 return &it->second;
855}
856
857QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
858 auto it = queueMap.find(queue);
859 if (it == queueMap.end()) {
860 return nullptr;
861 }
862 return &it->second;
863}
864
865const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
866 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
867 auto it = phys_dev_map->find(phys);
868 if (it == phys_dev_map->end()) {
869 return nullptr;
870 }
871 return &it->second;
872}
873
874PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
875 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
876 auto it = phys_dev_map->find(phys);
877 if (it == phys_dev_map->end()) {
878 return nullptr;
879 }
880 return &it->second;
881}
882
883PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
884const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
885
886// Return ptr to memory binding for given handle of specified type
887template <typename State, typename Result>
888static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
889 switch (typed_handle.type) {
890 case kVulkanObjectTypeImage:
891 return state->GetImageState(typed_handle.Cast<VkImage>());
892 case kVulkanObjectTypeBuffer:
893 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
894 case kVulkanObjectTypeAccelerationStructureNV:
sourav parmarcd5fb182020-07-17 12:58:44 -0700895 return state->GetAccelerationStructureStateNV(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -0600896 default:
897 break;
898 }
899 return nullptr;
900}
901
902const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
903 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
904}
905
906BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
907 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
908}
909
910void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
911 assert(object != NULL);
912
John Zulauf79952712020-04-07 11:25:54 -0600913 auto fake_address = fake_memory.Alloc(pAllocateInfo->allocationSize);
914 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo, fake_address);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500915 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600916
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700917 auto dedicated = LvlFindInChain<VkMemoryDedicatedAllocateInfo>(pAllocateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600918 if (dedicated) {
919 mem_info->is_dedicated = true;
920 mem_info->dedicated_buffer = dedicated->buffer;
921 mem_info->dedicated_image = dedicated->image;
922 }
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700923 auto export_info = LvlFindInChain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600924 if (export_info) {
925 mem_info->is_export = true;
926 mem_info->export_handle_type_flags = export_info->handleTypes;
927 }
sfricke-samsung23068272020-06-21 14:49:51 -0700928
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700929 auto alloc_flags = LvlFindInChain<VkMemoryAllocateFlagsInfo>(pAllocateInfo->pNext);
Mark Lobodzinski7bda6ed2020-07-24 09:57:38 -0600930 if (alloc_flags) {
931 auto dev_mask = alloc_flags->deviceMask;
932 if ((dev_mask != 0) && (dev_mask & (dev_mask - 1))) {
933 mem_info->multi_instance = true;
934 }
935 }
936 auto heap_index = phys_dev_mem_props.memoryTypes[mem_info->alloc_info.memoryTypeIndex].heapIndex;
Tony-LunarGb399fb02020-08-06 14:20:59 -0600937 mem_info->multi_instance |= (((phys_dev_mem_props.memoryHeaps[heap_index].flags & VK_MEMORY_HEAP_MULTI_INSTANCE_BIT) != 0) &&
938 physical_device_count > 1);
Mark Lobodzinski7bda6ed2020-07-24 09:57:38 -0600939
sfricke-samsung23068272020-06-21 14:49:51 -0700940 // Assumes validation already for only a single import operation in the pNext
941#ifdef VK_USE_PLATFORM_WIN32_KHR
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700942 auto win32_import = LvlFindInChain<VkImportMemoryWin32HandleInfoKHR>(pAllocateInfo->pNext);
sfricke-samsung23068272020-06-21 14:49:51 -0700943 if (win32_import) {
944 mem_info->is_import = true;
945 mem_info->import_handle_type_flags = win32_import->handleType;
946 }
947#endif
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700948 auto fd_import = LvlFindInChain<VkImportMemoryFdInfoKHR>(pAllocateInfo->pNext);
sfricke-samsung23068272020-06-21 14:49:51 -0700949 if (fd_import) {
950 mem_info->is_import = true;
951 mem_info->import_handle_type_flags = fd_import->handleType;
952 }
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700953 auto host_pointer_import = LvlFindInChain<VkImportMemoryHostPointerInfoEXT>(pAllocateInfo->pNext);
sfricke-samsung23068272020-06-21 14:49:51 -0700954 if (host_pointer_import) {
955 mem_info->is_import = true;
956 mem_info->import_handle_type_flags = host_pointer_import->handleType;
957 }
958#ifdef VK_USE_PLATFORM_ANDROID_KHR
959 // AHB Import doesn't have handle in the pNext struct
960 // It should be assumed that all imported AHB can only have the same, single handleType
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700961 auto ahb_import = LvlFindInChain<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
sfricke-samsung23068272020-06-21 14:49:51 -0700962 if ((ahb_import) && (ahb_import->buffer != nullptr)) {
963 mem_info->is_import_ahb = true;
964 mem_info->is_import = true;
965 mem_info->import_handle_type_flags = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
966 }
sfricke-samsung98acb882020-11-03 19:16:10 -0800967#endif // VK_USE_PLATFORM_ANDROID_KHR
sfricke-samsungedce77a2020-07-03 22:35:13 -0700968
969 const VkMemoryType memory_type = phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex];
970 mem_info->unprotected = ((memory_type.propertyFlags & VK_MEMORY_PROPERTY_PROTECTED_BIT) == 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600971}
972
973// Create binding link between given sampler and command buffer node
974void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600975 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600976 return;
977 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500978 AddCommandBufferBinding(sampler_state->cb_bindings,
979 VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600980}
981
982// Create binding link between given image node and command buffer node
983void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600984 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600985 return;
986 }
987 // Skip validation if this image was created through WSI
988 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
989 // First update cb binding for image
Jeff Bolzadbfa852019-10-04 13:53:30 -0500990 if (AddCommandBufferBinding(image_state->cb_bindings,
991 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600992 // Now update CB binding in MemObj mini CB list
John Zulauf79f06582021-02-27 18:38:39 -0700993 for (auto *mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700994 // Now update CBInfo's Mem reference list
995 AddCommandBufferBinding(mem_binding->cb_bindings,
996 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600997 }
998 }
999 }
1000}
1001
1002// Create binding link between given image view node and its image with command buffer node
1003void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001004 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -06001005 return;
1006 }
1007 // First add bindings for imageView
Jeff Bolzadbfa852019-10-04 13:53:30 -05001008 if (AddCommandBufferBinding(view_state->cb_bindings,
1009 VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -06001010 // Only need to continue if this is a new item
Jeff Bolzadbfa852019-10-04 13:53:30 -05001011 auto image_state = view_state->image_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -06001012 // Add bindings for image within imageView
1013 if (image_state) {
1014 AddCommandBufferBindingImage(cb_node, image_state);
1015 }
1016 }
1017}
1018
1019// Create binding link between given buffer node and command buffer node
1020void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001021 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -06001022 return;
1023 }
1024 // First update cb binding for buffer
Jeff Bolzadbfa852019-10-04 13:53:30 -05001025 if (AddCommandBufferBinding(buffer_state->cb_bindings,
1026 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -06001027 // Now update CB binding in MemObj mini CB list
John Zulauf79f06582021-02-27 18:38:39 -07001028 for (auto *mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07001029 // Now update CBInfo's Mem reference list
1030 AddCommandBufferBinding(mem_binding->cb_bindings,
1031 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -06001032 }
1033 }
1034}
1035
1036// Create binding link between given buffer view node and its buffer with command buffer node
1037void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001038 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -06001039 return;
1040 }
1041 // First add bindings for bufferView
Jeff Bolzadbfa852019-10-04 13:53:30 -05001042 if (AddCommandBufferBinding(view_state->cb_bindings,
1043 VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
1044 auto buffer_state = view_state->buffer_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -06001045 // Add bindings for buffer within bufferView
1046 if (buffer_state) {
1047 AddCommandBufferBindingBuffer(cb_node, buffer_state);
1048 }
1049 }
1050}
1051
1052// Create binding link between given acceleration structure and command buffer node
1053void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
1054 ACCELERATION_STRUCTURE_STATE *as_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001055 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -06001056 return;
1057 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001058 if (AddCommandBufferBinding(
1059 as_state->cb_bindings,
1060 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -06001061 // Now update CB binding in MemObj mini CB list
John Zulauf79f06582021-02-27 18:38:39 -07001062 for (auto *mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07001063 // Now update CBInfo's Mem reference list
1064 AddCommandBufferBinding(mem_binding->cb_bindings,
1065 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -06001066 }
1067 }
1068}
1069
sourav parmarcd5fb182020-07-17 12:58:44 -07001070// Create binding link between given acceleration structure and command buffer node
1071void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
1072 ACCELERATION_STRUCTURE_STATE_KHR *as_state) {
1073 if (disabled[command_buffer_state]) {
1074 return;
1075 }
1076 if (AddCommandBufferBinding(
1077 as_state->cb_bindings,
1078 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureKHR, as_state), cb_node)) {
1079 // Now update CB binding in MemObj mini CB list
John Zulauf79f06582021-02-27 18:38:39 -07001080 for (auto *mem_binding : as_state->GetBoundMemory()) {
sourav parmarcd5fb182020-07-17 12:58:44 -07001081 // Now update CBInfo's Mem reference list
1082 AddCommandBufferBinding(mem_binding->cb_bindings,
1083 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
1084 }
1085 }
1086}
1087
locke-lunargd556cc32019-09-17 01:21:23 -06001088// Clear a single object binding from given memory object
locke-lunarg5f59e782019-12-19 10:32:23 -07001089void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
locke-lunargd556cc32019-09-17 01:21:23 -06001090 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
1091 if (mem_info) {
1092 mem_info->obj_bindings.erase(typed_handle);
1093 }
1094}
1095
1096// ClearMemoryObjectBindings clears the binding of objects to memory
1097// For the given object it pulls the memory bindings and makes sure that the bindings
1098// no longer refer to the object being cleared. This occurs when objects are destroyed.
1099void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
1100 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
1101 if (mem_binding) {
1102 if (!mem_binding->sparse) {
locke-lunarg5f59e782019-12-19 10:32:23 -07001103 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06001104 } else { // Sparse, clear all bindings
1105 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
locke-lunarg5f59e782019-12-19 10:32:23 -07001106 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06001107 }
1108 }
1109 }
1110}
1111
1112// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
1113// Corresponding valid usage checks are in ValidateSetMemBinding().
1114void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
1115 const VulkanTypedHandle &typed_handle) {
1116 assert(mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -06001117
1118 if (mem != VK_NULL_HANDLE) {
locke-lunargcf04d582019-11-26 00:31:50 -07001119 mem_binding->binding.mem_state = GetShared<DEVICE_MEMORY_STATE>(mem);
1120 if (mem_binding->binding.mem_state) {
locke-lunarg5f59e782019-12-19 10:32:23 -07001121 mem_binding->binding.offset = memory_offset;
1122 mem_binding->binding.size = mem_binding->requirements.size;
locke-lunargcf04d582019-11-26 00:31:50 -07001123 mem_binding->binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -06001124 // For image objects, make sure default memory state is correctly set
1125 // TODO : What's the best/correct way to handle this?
1126 if (kVulkanObjectTypeImage == typed_handle.type) {
1127 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
1128 if (image_state) {
1129 VkImageCreateInfo ici = image_state->createInfo;
1130 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
1131 // TODO:: More memory state transition stuff.
1132 }
1133 }
1134 }
locke-lunargcf04d582019-11-26 00:31:50 -07001135 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
locke-lunargd556cc32019-09-17 01:21:23 -06001136 }
1137 }
1138}
1139
1140// For NULL mem case, clear any previous binding Else...
1141// Make sure given object is in its object map
1142// IF a previous binding existed, update binding
1143// Add reference from objectInfo to memoryInfo
1144// Add reference off of object's binding info
1145// Return VK_TRUE if addition is successful, VK_FALSE otherwise
locke-lunargcf04d582019-11-26 00:31:50 -07001146bool ValidationStateTracker::SetSparseMemBinding(const VkDeviceMemory mem, const VkDeviceSize mem_offset,
1147 const VkDeviceSize mem_size, const VulkanTypedHandle &typed_handle) {
locke-lunargd556cc32019-09-17 01:21:23 -06001148 bool skip = VK_FALSE;
1149 // Handle NULL case separately, just clear previous binding & decrement reference
locke-lunargcf04d582019-11-26 00:31:50 -07001150 if (mem == VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -06001151 // TODO : This should cause the range of the resource to be unbound according to spec
1152 } else {
1153 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
1154 assert(mem_binding);
1155 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
1156 assert(mem_binding->sparse);
locke-lunargcf04d582019-11-26 00:31:50 -07001157 MEM_BINDING binding = {GetShared<DEVICE_MEMORY_STATE>(mem), mem_offset, mem_size};
1158 if (binding.mem_state) {
1159 binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -06001160 // Need to set mem binding for this object
1161 mem_binding->sparse_bindings.insert(binding);
1162 mem_binding->UpdateBoundMemorySet();
1163 }
1164 }
1165 }
1166 return skip;
1167}
1168
locke-lunarg540b2252020-08-03 13:23:36 -06001169void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, const VkPipelineBindPoint bind_point,
1170 const char *function) {
locke-lunargb8d7a7a2020-10-25 16:01:52 -06001171 const auto lv_bind_point = ConvertToLvlBindPoint(bind_point);
1172 auto &state = cb_state->lastBound[lv_bind_point];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001173 PIPELINE_STATE *pipe = state.pipeline_state;
locke-lunargd556cc32019-09-17 01:21:23 -06001174 if (VK_NULL_HANDLE != state.pipeline_layout) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001175 for (const auto &set_binding_pair : pipe->active_slots) {
1176 uint32_t set_index = set_binding_pair.first;
locke-lunargd556cc32019-09-17 01:21:23 -06001177 // Pull the set node
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001178 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[set_index].bound_descriptor_set;
locke-lunargd556cc32019-09-17 01:21:23 -06001179
Tony-LunarG77822802020-05-28 16:35:46 -06001180 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
locke-lunargd556cc32019-09-17 01:21:23 -06001181
Tony-LunarG77822802020-05-28 16:35:46 -06001182 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
1183 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
1184 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001185 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pipe);
Tony-LunarG77822802020-05-28 16:35:46 -06001186
1187 if (reduced_map.IsManyDescriptors()) {
1188 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001189 descriptor_set->UpdateValidationCache(*cb_state, *pipe, binding_req_map);
Tony-LunarG77822802020-05-28 16:35:46 -06001190 }
1191
1192 // We can skip updating the state if "nothing" has changed since the last validation.
1193 // See CoreChecks::ValidateCmdBufDrawState for more details.
1194 bool descriptor_set_changed =
1195 !reduced_map.IsManyDescriptors() ||
1196 // Update if descriptor set (or contents) has changed
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001197 state.per_set[set_index].validated_set != descriptor_set ||
1198 state.per_set[set_index].validated_set_change_count != descriptor_set->GetChangeCount() ||
Tony-LunarG77822802020-05-28 16:35:46 -06001199 (!disabled[image_layout_validation] &&
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001200 state.per_set[set_index].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
Tony-LunarG77822802020-05-28 16:35:46 -06001201 bool need_update = descriptor_set_changed ||
1202 // Update if previous bindingReqMap doesn't include new bindingReqMap
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001203 !std::includes(state.per_set[set_index].validated_set_binding_req_map.begin(),
1204 state.per_set[set_index].validated_set_binding_req_map.end(), binding_req_map.begin(),
Tony-LunarG77822802020-05-28 16:35:46 -06001205 binding_req_map.end());
1206
1207 if (need_update) {
1208 // Bind this set and its active descriptor resources to the command buffer
1209 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
1210 // Only record the bindings that haven't already been recorded
1211 BindingReqMap delta_reqs;
1212 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001213 state.per_set[set_index].validated_set_binding_req_map.begin(),
1214 state.per_set[set_index].validated_set_binding_req_map.end(),
Jeremy Gebbencbf22862021-03-03 12:01:22 -07001215 layer_data::insert_iterator<BindingReqMap>(delta_reqs, delta_reqs.begin()));
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001216 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pipe, delta_reqs, function);
Tony-LunarG77822802020-05-28 16:35:46 -06001217 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001218 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pipe, binding_req_map, function);
locke-lunargd556cc32019-09-17 01:21:23 -06001219 }
1220
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001221 state.per_set[set_index].validated_set = descriptor_set;
1222 state.per_set[set_index].validated_set_change_count = descriptor_set->GetChangeCount();
1223 state.per_set[set_index].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
Tony-LunarG77822802020-05-28 16:35:46 -06001224 if (reduced_map.IsManyDescriptors()) {
1225 // Check whether old == new before assigning, the equality check is much cheaper than
1226 // freeing and reallocating the map.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001227 if (state.per_set[set_index].validated_set_binding_req_map != set_binding_pair.second) {
1228 state.per_set[set_index].validated_set_binding_req_map = set_binding_pair.second;
Jeff Bolz56308942019-10-06 22:05:23 -05001229 }
Tony-LunarG77822802020-05-28 16:35:46 -06001230 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001231 state.per_set[set_index].validated_set_binding_req_map = BindingReqMap();
locke-lunargd556cc32019-09-17 01:21:23 -06001232 }
1233 }
1234 }
1235 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001236 if (!pipe->vertex_binding_descriptions_.empty()) {
locke-lunargd556cc32019-09-17 01:21:23 -06001237 cb_state->vertex_buffer_used = true;
1238 }
1239}
1240
1241// Remove set from setMap and delete the set
1242void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001243 descriptor_set->destroyed = true;
Jeff Bolzadbfa852019-10-04 13:53:30 -05001244 const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
Jeff Bolz41a1ced2019-10-11 11:40:49 -05001245 // Any bound cmd buffers are now invalid
Jeff Bolzadbfa852019-10-04 13:53:30 -05001246 InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
Jeff Bolz41a1ced2019-10-11 11:40:49 -05001247
locke-lunargd556cc32019-09-17 01:21:23 -06001248 setMap.erase(descriptor_set->GetSet());
1249}
1250
1251// Free all DS Pools including their Sets & related sub-structs
1252// NOTE : Calls to this function should be wrapped in mutex
1253void ValidationStateTracker::DeleteDescriptorSetPools() {
1254 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
1255 // Remove this pools' sets from setMap and delete them
John Zulauf79f06582021-02-27 18:38:39 -07001256 for (auto *ds : ii->second->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06001257 FreeDescriptorSet(ds);
1258 }
1259 ii->second->sets.clear();
1260 ii = descriptorPoolMap.erase(ii);
1261 }
1262}
1263
1264// For given object struct return a ptr of BASE_NODE type for its wrapping struct
1265BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001266 if (object_struct.node) {
1267#ifdef _DEBUG
1268 // assert that lookup would find the same object
1269 VulkanTypedHandle other = object_struct;
1270 other.node = nullptr;
1271 assert(object_struct.node == GetStateStructPtrFromObject(other));
1272#endif
1273 return object_struct.node;
1274 }
locke-lunargd556cc32019-09-17 01:21:23 -06001275 BASE_NODE *base_ptr = nullptr;
1276 switch (object_struct.type) {
1277 case kVulkanObjectTypeDescriptorSet: {
1278 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
1279 break;
1280 }
1281 case kVulkanObjectTypeSampler: {
1282 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
1283 break;
1284 }
1285 case kVulkanObjectTypeQueryPool: {
1286 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
1287 break;
1288 }
1289 case kVulkanObjectTypePipeline: {
1290 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
1291 break;
1292 }
1293 case kVulkanObjectTypeBuffer: {
1294 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
1295 break;
1296 }
1297 case kVulkanObjectTypeBufferView: {
1298 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
1299 break;
1300 }
1301 case kVulkanObjectTypeImage: {
1302 base_ptr = GetImageState(object_struct.Cast<VkImage>());
1303 break;
1304 }
1305 case kVulkanObjectTypeImageView: {
1306 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
1307 break;
1308 }
1309 case kVulkanObjectTypeEvent: {
1310 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
1311 break;
1312 }
1313 case kVulkanObjectTypeDescriptorPool: {
1314 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
1315 break;
1316 }
1317 case kVulkanObjectTypeCommandPool: {
1318 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
1319 break;
1320 }
1321 case kVulkanObjectTypeFramebuffer: {
1322 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
1323 break;
1324 }
1325 case kVulkanObjectTypeRenderPass: {
1326 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
1327 break;
1328 }
1329 case kVulkanObjectTypeDeviceMemory: {
1330 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
1331 break;
1332 }
1333 case kVulkanObjectTypeAccelerationStructureNV: {
sourav parmarcd5fb182020-07-17 12:58:44 -07001334 base_ptr = GetAccelerationStructureStateNV(object_struct.Cast<VkAccelerationStructureNV>());
1335 break;
1336 }
1337 case kVulkanObjectTypeAccelerationStructureKHR: {
1338 base_ptr = GetAccelerationStructureStateKHR(object_struct.Cast<VkAccelerationStructureKHR>());
locke-lunargd556cc32019-09-17 01:21:23 -06001339 break;
1340 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001341 case kVulkanObjectTypeUnknown:
1342 // This can happen if an element of the object_bindings vector has been
1343 // zeroed out, after an object is destroyed.
1344 break;
locke-lunargd556cc32019-09-17 01:21:23 -06001345 default:
1346 // TODO : Any other objects to be handled here?
1347 assert(0);
1348 break;
1349 }
1350 return base_ptr;
1351}
1352
sfricke-samsungbf1a2ed2020-06-14 23:31:00 -07001353// Gets union of all features defined by Potential Format Features
1354// except, does not handle the external format case for AHB as that only can be used for sampled images
sfricke-samsungbe3584f2020-04-22 14:58:06 -07001355VkFormatFeatureFlags ValidationStateTracker::GetPotentialFormatFeatures(VkFormat format) const {
1356 VkFormatFeatureFlags format_features = 0;
1357
1358 if (format != VK_FORMAT_UNDEFINED) {
1359 VkFormatProperties format_properties;
1360 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
1361 format_features |= format_properties.linearTilingFeatures;
1362 format_features |= format_properties.optimalTilingFeatures;
1363 if (device_extensions.vk_ext_image_drm_format_modifier) {
1364 // VK_KHR_get_physical_device_properties2 is required in this case
1365 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2};
1366 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
1367 nullptr};
1368 format_properties_2.pNext = (void *)&drm_properties_list;
Marc Alcala Prieto773871c2021-02-04 19:24:43 +01001369
1370 // First call is to get the number of modifiers compatible with the queried format
sfricke-samsungbe3584f2020-04-22 14:58:06 -07001371 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
Marc Alcala Prieto773871c2021-02-04 19:24:43 +01001372
1373 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
1374 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
1375 drm_properties_list.pDrmFormatModifierProperties = drm_properties.data();
1376
1377 // Second call, now with an allocated array in pDrmFormatModifierProperties, is to get the modifiers
1378 // compatible with the queried format
1379 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
1380
sfricke-samsungbe3584f2020-04-22 14:58:06 -07001381 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
1382 format_features |= drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
1383 }
1384 }
1385 }
1386
1387 return format_features;
1388}
1389
locke-lunargd556cc32019-09-17 01:21:23 -06001390// Tie the VulkanTypedHandle to the cmd buffer which includes:
1391// Add object_binding to cmd buffer
1392// Add cb_binding to object
Jeremy Gebbencbf22862021-03-03 12:01:22 -07001393bool ValidationStateTracker::AddCommandBufferBinding(BASE_NODE::BindingsType &cb_bindings,
locke-lunargd556cc32019-09-17 01:21:23 -06001394 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001395 if (disabled[command_buffer_state]) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001396 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001397 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001398 // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
1399 // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
Jeremy Gebbenfc6f8152021-03-18 16:58:55 -06001400 auto inserted = cb_bindings.emplace(cb_node, -1);
Jeff Bolzadbfa852019-10-04 13:53:30 -05001401 if (inserted.second) {
1402 cb_node->object_bindings.push_back(obj);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001403 inserted.first->second = static_cast<int>(cb_node->object_bindings.size()) - 1;
Jeff Bolzadbfa852019-10-04 13:53:30 -05001404 return true;
1405 }
1406 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001407}
1408
1409// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
1410void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
1411 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
1412 if (base_obj) base_obj->cb_bindings.erase(cb_node);
1413}
1414
1415// Reset the command buffer state
1416// Maintain the createInfo and set state to CB_NEW, but clear all other state
1417void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001418 CMD_BUFFER_STATE *cb_state = GetCBState(cb);
1419 if (cb_state) {
1420 cb_state->in_use.store(0);
locke-lunargd556cc32019-09-17 01:21:23 -06001421 // Reset CB state (note that createInfo is not cleared)
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001422 cb_state->commandBuffer = cb;
1423 memset(&cb_state->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
1424 memset(&cb_state->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
1425 cb_state->hasDrawCmd = false;
1426 cb_state->hasTraceRaysCmd = false;
1427 cb_state->hasBuildAccelerationStructureCmd = false;
1428 cb_state->hasDispatchCmd = false;
1429 cb_state->state = CB_NEW;
1430 cb_state->commandCount = 0;
1431 cb_state->submitCount = 0;
1432 cb_state->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
1433 cb_state->status = 0;
1434 cb_state->static_status = 0;
1435 cb_state->viewportMask = 0;
1436 cb_state->viewportWithCountMask = 0;
1437 cb_state->viewportWithCountCount = 0;
1438 cb_state->scissorMask = 0;
1439 cb_state->scissorWithCountMask = 0;
1440 cb_state->primitiveTopology = VK_PRIMITIVE_TOPOLOGY_MAX_ENUM;
locke-lunargd556cc32019-09-17 01:21:23 -06001441
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001442 for (auto &item : cb_state->lastBound) {
locke-lunargb8d7a7a2020-10-25 16:01:52 -06001443 item.reset();
locke-lunargd556cc32019-09-17 01:21:23 -06001444 }
1445
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001446 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo();
1447 cb_state->activeRenderPass = nullptr;
1448 cb_state->active_attachments = nullptr;
1449 cb_state->active_subpasses = nullptr;
1450 cb_state->attachments_view_states.clear();
1451 cb_state->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
1452 cb_state->activeSubpass = 0;
1453 cb_state->broken_bindings.clear();
1454 cb_state->waitedEvents.clear();
1455 cb_state->events.clear();
1456 cb_state->writeEventsBeforeWait.clear();
1457 cb_state->activeQueries.clear();
1458 cb_state->startedQueries.clear();
1459 cb_state->image_layout_map.clear();
1460 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
1461 cb_state->vertex_buffer_used = false;
1462 cb_state->primaryCommandBuffer = VK_NULL_HANDLE;
locke-lunargd556cc32019-09-17 01:21:23 -06001463 // If secondary, invalidate any primary command buffer that may call us.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001464 if (cb_state->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
1465 InvalidateLinkedCommandBuffers(cb_state->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -06001466 }
1467
1468 // Remove reverse command buffer links.
John Zulauf79f06582021-02-27 18:38:39 -07001469 for (auto *sub_cb : cb_state->linkedCommandBuffers) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001470 sub_cb->linkedCommandBuffers.erase(cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06001471 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001472 cb_state->linkedCommandBuffers.clear();
1473 cb_state->queue_submit_functions.clear();
1474 cb_state->cmd_execute_commands_functions.clear();
1475 cb_state->eventUpdates.clear();
1476 cb_state->queryUpdates.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001477
1478 // Remove object bindings
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001479 for (const auto &obj : cb_state->object_bindings) {
1480 RemoveCommandBufferBinding(obj, cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06001481 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001482 cb_state->object_bindings.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001483 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
John Zulauf79f06582021-02-27 18:38:39 -07001484 for (auto &framebuffer : cb_state->framebuffers) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001485 framebuffer->cb_bindings.erase(cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06001486 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001487 cb_state->framebuffers.clear();
1488 cb_state->activeFramebuffer = VK_NULL_HANDLE;
1489 cb_state->index_buffer_binding.reset();
locke-lunargd556cc32019-09-17 01:21:23 -06001490
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001491 cb_state->qfo_transfer_image_barriers.Reset();
1492 cb_state->qfo_transfer_buffer_barriers.Reset();
locke-lunargd556cc32019-09-17 01:21:23 -06001493
1494 // Clean up the label data
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001495 ResetCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
1496 cb_state->debug_label.Reset();
1497 cb_state->validate_descriptorsets_in_queuesubmit.clear();
Attilio Provenzano02859b22020-02-27 14:17:28 +00001498
1499 // Best practices info
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001500 cb_state->small_indexed_draw_call_count = 0;
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06001501
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001502 cb_state->transform_feedback_active = false;
locke-lunargd556cc32019-09-17 01:21:23 -06001503 }
1504 if (command_buffer_reset_callback) {
1505 (*command_buffer_reset_callback)(cb);
1506 }
1507}
1508
1509void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
1510 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
1511 VkResult result) {
1512 if (VK_SUCCESS != result) return;
1513
1514 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
1515 if (nullptr == enabled_features_found) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001516 const auto *features2 = LvlFindInChain<VkPhysicalDeviceFeatures2>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001517 if (features2) {
1518 enabled_features_found = &(features2->features);
1519 }
1520 }
1521
1522 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
1523 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
1524 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
1525
1526 if (nullptr == enabled_features_found) {
1527 state_tracker->enabled_features.core = {};
1528 } else {
1529 state_tracker->enabled_features.core = *enabled_features_found;
1530 }
1531
1532 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
1533 // previously set them through an explicit API call.
1534 uint32_t count;
1535 auto pd_state = GetPhysicalDeviceState(gpu);
1536 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
1537 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
1538 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
1539 // Save local link to this device's physical device state
1540 state_tracker->physical_device_state = pd_state;
1541
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001542 const auto *vulkan_12_features = LvlFindInChain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001543 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001544 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001545 } else {
sfricke-samsung27c70722020-05-02 08:42:39 -07001546 // Set Extension Feature Aliases to false as there is no struct to check
1547 state_tracker->enabled_features.core12.drawIndirectCount = VK_FALSE;
1548 state_tracker->enabled_features.core12.samplerMirrorClampToEdge = VK_FALSE;
1549 state_tracker->enabled_features.core12.descriptorIndexing = VK_FALSE;
1550 state_tracker->enabled_features.core12.samplerFilterMinmax = VK_FALSE;
1551 state_tracker->enabled_features.core12.shaderOutputLayer = VK_FALSE;
1552 state_tracker->enabled_features.core12.shaderOutputViewportIndex = VK_FALSE;
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001553 state_tracker->enabled_features.core12.subgroupBroadcastDynamicId = VK_FALSE;
sfricke-samsung27c70722020-05-02 08:42:39 -07001554
1555 // These structs are only allowed in pNext chain if there is no VkPhysicalDeviceVulkan12Features
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001556
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001557 const auto *eight_bit_storage_features = LvlFindInChain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001558 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001559 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
1560 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
1561 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
1562 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001563 }
1564
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001565 const auto *float16_int8_features = LvlFindInChain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001566 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001567 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
1568 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001569 }
1570
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001571 const auto *descriptor_indexing_features = LvlFindInChain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001572 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001573 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
1574 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
1575 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
1576 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
1577 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
1578 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
1579 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
1580 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
1581 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
1582 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
1583 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
1584 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
1585 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
1586 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
1587 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
1588 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
1589 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
1590 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
1591 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
1592 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
1593 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
1594 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
1595 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
1596 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
1597 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
1598 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
1599 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
1600 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
1601 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
1602 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
1603 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
1604 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
1605 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
1606 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
1607 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
1608 descriptor_indexing_features->descriptorBindingPartiallyBound;
1609 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
1610 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
1611 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001612 }
1613
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001614 const auto *scalar_block_layout_features = LvlFindInChain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001615 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001616 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001617 }
1618
1619 const auto *imageless_framebuffer_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001620 LvlFindInChain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001621 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001622 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001623 }
1624
1625 const auto *uniform_buffer_standard_layout_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001626 LvlFindInChain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001627 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001628 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
1629 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001630 }
1631
1632 const auto *subgroup_extended_types_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001633 LvlFindInChain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001634 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001635 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
1636 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001637 }
1638
1639 const auto *separate_depth_stencil_layouts_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001640 LvlFindInChain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001641 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001642 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
1643 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001644 }
1645
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001646 const auto *host_query_reset_features = LvlFindInChain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001647 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001648 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001649 }
1650
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001651 const auto *timeline_semaphore_features = LvlFindInChain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001652 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001653 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001654 }
1655
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001656 const auto *buffer_device_address = LvlFindInChain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001657 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001658 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
1659 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1660 buffer_device_address->bufferDeviceAddressCaptureReplay;
1661 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1662 buffer_device_address->bufferDeviceAddressMultiDevice;
1663 }
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001664
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001665 const auto *atomic_int64_features = LvlFindInChain<VkPhysicalDeviceShaderAtomicInt64Features>(pCreateInfo->pNext);
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001666 if (atomic_int64_features) {
1667 state_tracker->enabled_features.core12.shaderBufferInt64Atomics = atomic_int64_features->shaderBufferInt64Atomics;
1668 state_tracker->enabled_features.core12.shaderSharedInt64Atomics = atomic_int64_features->shaderSharedInt64Atomics;
1669 }
1670
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001671 const auto *memory_model_features = LvlFindInChain<VkPhysicalDeviceVulkanMemoryModelFeatures>(pCreateInfo->pNext);
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001672 if (memory_model_features) {
1673 state_tracker->enabled_features.core12.vulkanMemoryModel = memory_model_features->vulkanMemoryModel;
1674 state_tracker->enabled_features.core12.vulkanMemoryModelDeviceScope =
1675 memory_model_features->vulkanMemoryModelDeviceScope;
1676 state_tracker->enabled_features.core12.vulkanMemoryModelAvailabilityVisibilityChains =
1677 memory_model_features->vulkanMemoryModelAvailabilityVisibilityChains;
1678 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001679 }
1680
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001681 const auto *vulkan_11_features = LvlFindInChain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001682 if (vulkan_11_features) {
1683 state_tracker->enabled_features.core11 = *vulkan_11_features;
1684 } else {
1685 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
1686
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001687 const auto *sixteen_bit_storage_features = LvlFindInChain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001688 if (sixteen_bit_storage_features) {
1689 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
1690 sixteen_bit_storage_features->storageBuffer16BitAccess;
1691 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
1692 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
1693 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
1694 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
1695 }
1696
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001697 const auto *multiview_features = LvlFindInChain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001698 if (multiview_features) {
1699 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1700 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1701 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1702 }
1703
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001704 const auto *variable_pointers_features = LvlFindInChain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001705 if (variable_pointers_features) {
1706 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1707 variable_pointers_features->variablePointersStorageBuffer;
1708 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1709 }
1710
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001711 const auto *protected_memory_features = LvlFindInChain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001712 if (protected_memory_features) {
1713 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1714 }
1715
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001716 const auto *ycbcr_conversion_features = LvlFindInChain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001717 if (ycbcr_conversion_features) {
1718 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1719 }
1720
1721 const auto *shader_draw_parameters_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001722 LvlFindInChain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001723 if (shader_draw_parameters_features) {
1724 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001725 }
1726 }
1727
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001728 const auto *device_group_ci = LvlFindInChain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
Tony-LunarGca4891a2020-08-10 15:46:46 -06001729 if (device_group_ci) {
1730 state_tracker->physical_device_count = device_group_ci->physicalDeviceCount;
1731 state_tracker->device_group_create_info = *device_group_ci;
1732 } else {
1733 state_tracker->physical_device_count = 1;
1734 }
locke-lunargd556cc32019-09-17 01:21:23 -06001735
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001736 const auto *exclusive_scissor_features = LvlFindInChain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001737 if (exclusive_scissor_features) {
1738 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1739 }
1740
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001741 const auto *shading_rate_image_features = LvlFindInChain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001742 if (shading_rate_image_features) {
1743 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1744 }
1745
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001746 const auto *mesh_shader_features = LvlFindInChain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001747 if (mesh_shader_features) {
1748 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1749 }
1750
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001751 const auto *inline_uniform_block_features = LvlFindInChain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001752 if (inline_uniform_block_features) {
1753 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1754 }
1755
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001756 const auto *transform_feedback_features = LvlFindInChain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001757 if (transform_feedback_features) {
1758 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1759 }
1760
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001761 const auto *vtx_attrib_div_features = LvlFindInChain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001762 if (vtx_attrib_div_features) {
1763 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1764 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001765
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001766 const auto *buffer_device_address_ext = LvlFindInChain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001767 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001768 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001769 }
1770
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001771 const auto *cooperative_matrix_features = LvlFindInChain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001772 if (cooperative_matrix_features) {
1773 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1774 }
1775
locke-lunargd556cc32019-09-17 01:21:23 -06001776 const auto *compute_shader_derivatives_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001777 LvlFindInChain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001778 if (compute_shader_derivatives_features) {
1779 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1780 }
1781
1782 const auto *fragment_shader_barycentric_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001783 LvlFindInChain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001784 if (fragment_shader_barycentric_features) {
1785 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1786 }
1787
1788 const auto *shader_image_footprint_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001789 LvlFindInChain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001790 if (shader_image_footprint_features) {
1791 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1792 }
1793
1794 const auto *fragment_shader_interlock_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001795 LvlFindInChain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001796 if (fragment_shader_interlock_features) {
1797 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1798 }
1799
1800 const auto *demote_to_helper_invocation_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001801 LvlFindInChain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001802 if (demote_to_helper_invocation_features) {
1803 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1804 }
1805
1806 const auto *texel_buffer_alignment_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001807 LvlFindInChain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001808 if (texel_buffer_alignment_features) {
1809 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1810 }
1811
locke-lunargd556cc32019-09-17 01:21:23 -06001812 const auto *pipeline_exe_props_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001813 LvlFindInChain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001814 if (pipeline_exe_props_features) {
1815 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1816 }
1817
Jeff Bolz82f854d2019-09-17 14:56:47 -05001818 const auto *dedicated_allocation_image_aliasing_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001819 LvlFindInChain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
Jeff Bolz82f854d2019-09-17 14:56:47 -05001820 if (dedicated_allocation_image_aliasing_features) {
1821 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1822 *dedicated_allocation_image_aliasing_features;
1823 }
1824
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001825 const auto *performance_query_features = LvlFindInChain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001826 if (performance_query_features) {
1827 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1828 }
1829
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001830 const auto *device_coherent_memory_features = LvlFindInChain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
Tobias Hector782bcde2019-11-28 16:19:42 +00001831 if (device_coherent_memory_features) {
1832 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1833 }
1834
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001835 const auto *ycbcr_image_array_features = LvlFindInChain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsungcead0802020-01-30 22:20:10 -08001836 if (ycbcr_image_array_features) {
1837 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1838 }
1839
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001840 const auto *ray_query_features = LvlFindInChain<VkPhysicalDeviceRayQueryFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001841 if (ray_query_features) {
1842 state_tracker->enabled_features.ray_query_features = *ray_query_features;
1843 }
1844
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001845 const auto *ray_tracing_pipeline_features = LvlFindInChain<VkPhysicalDeviceRayTracingPipelineFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001846 if (ray_tracing_pipeline_features) {
1847 state_tracker->enabled_features.ray_tracing_pipeline_features = *ray_tracing_pipeline_features;
1848 }
1849
1850 const auto *ray_tracing_acceleration_structure_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001851 LvlFindInChain<VkPhysicalDeviceAccelerationStructureFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001852 if (ray_tracing_acceleration_structure_features) {
1853 state_tracker->enabled_features.ray_tracing_acceleration_structure_features = *ray_tracing_acceleration_structure_features;
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001854 }
1855
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001856 const auto *robustness2_features = LvlFindInChain<VkPhysicalDeviceRobustness2FeaturesEXT>(pCreateInfo->pNext);
Jeff Bolz165818a2020-05-08 11:19:03 -05001857 if (robustness2_features) {
1858 state_tracker->enabled_features.robustness2_features = *robustness2_features;
1859 }
1860
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001861 const auto *fragment_density_map_features = LvlFindInChain<VkPhysicalDeviceFragmentDensityMapFeaturesEXT>(pCreateInfo->pNext);
janharaldfredriksen-arm3b793772020-05-12 18:55:53 +02001862 if (fragment_density_map_features) {
1863 state_tracker->enabled_features.fragment_density_map_features = *fragment_density_map_features;
1864 }
1865
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001866 const auto *fragment_density_map_features2 = LvlFindInChain<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT>(pCreateInfo->pNext);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001867 if (fragment_density_map_features2) {
1868 state_tracker->enabled_features.fragment_density_map2_features = *fragment_density_map_features2;
1869 }
1870
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001871 const auto *astc_decode_features = LvlFindInChain<VkPhysicalDeviceASTCDecodeFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0c4a06f2020-06-27 01:24:32 -07001872 if (astc_decode_features) {
1873 state_tracker->enabled_features.astc_decode_features = *astc_decode_features;
1874 }
1875
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001876 const auto *custom_border_color_features = LvlFindInChain<VkPhysicalDeviceCustomBorderColorFeaturesEXT>(pCreateInfo->pNext);
Tony-LunarG7337b312020-04-15 16:40:25 -06001877 if (custom_border_color_features) {
1878 state_tracker->enabled_features.custom_border_color_features = *custom_border_color_features;
1879 }
1880
sfricke-samsungfd661d62020-05-16 00:57:27 -07001881 const auto *pipeline_creation_cache_control_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001882 LvlFindInChain<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsungfd661d62020-05-16 00:57:27 -07001883 if (pipeline_creation_cache_control_features) {
1884 state_tracker->enabled_features.pipeline_creation_cache_control_features = *pipeline_creation_cache_control_features;
1885 }
1886
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001887 const auto *fragment_shading_rate_features = LvlFindInChain<VkPhysicalDeviceFragmentShadingRateFeaturesKHR>(pCreateInfo->pNext);
Tobias Hector6663c9b2020-11-05 10:18:02 +00001888 if (fragment_shading_rate_features) {
1889 state_tracker->enabled_features.fragment_shading_rate_features = *fragment_shading_rate_features;
1890 }
1891
Piers Daniell39842ee2020-07-10 16:42:33 -06001892 const auto *extended_dynamic_state_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001893 LvlFindInChain<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT>(pCreateInfo->pNext);
Piers Daniell39842ee2020-07-10 16:42:33 -06001894 if (extended_dynamic_state_features) {
1895 state_tracker->enabled_features.extended_dynamic_state_features = *extended_dynamic_state_features;
1896 }
1897
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001898 const auto *multiview_features = LvlFindInChain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
locke-lunarg3fa463a2020-10-23 16:39:04 -06001899 if (multiview_features) {
1900 state_tracker->enabled_features.multiview_features = *multiview_features;
1901 }
1902
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001903 const auto *portability_features = LvlFindInChain<VkPhysicalDevicePortabilitySubsetFeaturesKHR>(pCreateInfo->pNext);
Nathaniel Cesariob3f2d702020-11-09 09:20:49 -07001904 if (portability_features) {
1905 state_tracker->enabled_features.portability_subset_features = *portability_features;
1906 }
1907
sfricke-samsung0065ce02020-12-03 22:46:37 -08001908 const auto *shader_integer_functions2_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001909 LvlFindInChain<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001910 if (shader_integer_functions2_features) {
1911 state_tracker->enabled_features.shader_integer_functions2_features = *shader_integer_functions2_features;
1912 }
1913
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001914 const auto *shader_sm_builtins_feature = LvlFindInChain<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001915 if (shader_sm_builtins_feature) {
1916 state_tracker->enabled_features.shader_sm_builtins_feature = *shader_sm_builtins_feature;
1917 }
1918
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001919 const auto *shader_atomic_float_feature = LvlFindInChain<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001920 if (shader_atomic_float_feature) {
1921 state_tracker->enabled_features.shader_atomic_float_feature = *shader_atomic_float_feature;
1922 }
1923
1924 const auto *shader_image_atomic_int64_feature =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001925 LvlFindInChain<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001926 if (shader_image_atomic_int64_feature) {
1927 state_tracker->enabled_features.shader_image_atomic_int64_feature = *shader_image_atomic_int64_feature;
1928 }
1929
sfricke-samsung486a51e2021-01-02 00:10:15 -08001930 const auto *shader_clock_feature = LvlFindInChain<VkPhysicalDeviceShaderClockFeaturesKHR>(pCreateInfo->pNext);
1931 if (shader_clock_feature) {
1932 state_tracker->enabled_features.shader_clock_feature = *shader_clock_feature;
1933 }
1934
Jeremy Gebben5f585ae2021-02-02 09:03:06 -07001935 const auto *conditional_rendering_features =
1936 LvlFindInChain<VkPhysicalDeviceConditionalRenderingFeaturesEXT>(pCreateInfo->pNext);
1937 if (conditional_rendering_features) {
1938 state_tracker->enabled_features.conditional_rendering = *conditional_rendering_features;
1939 }
1940
Shannon McPhersondb287d42021-02-02 15:27:32 -07001941 const auto *workgroup_memory_explicit_layout_features =
1942 LvlFindInChain<VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>(pCreateInfo->pNext);
1943 if (workgroup_memory_explicit_layout_features) {
1944 state_tracker->enabled_features.workgroup_memory_explicit_layout_features = *workgroup_memory_explicit_layout_features;
1945 }
1946
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001947 const auto *synchronization2_features =
1948 LvlFindInChain<VkPhysicalDeviceSynchronization2FeaturesKHR>(pCreateInfo->pNext);
1949 if (synchronization2_features) {
1950 state_tracker->enabled_features.synchronization2_features = *synchronization2_features;
1951 }
1952
locke-lunargd556cc32019-09-17 01:21:23 -06001953 // Store physical device properties and physical device mem limits into CoreChecks structs
1954 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1955 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001956 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1957 &state_tracker->phys_dev_props_core11);
1958 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1959 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001960
1961 const auto &dev_ext = state_tracker->device_extensions;
1962 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1963
1964 if (dev_ext.vk_khr_push_descriptor) {
1965 // Get the needed push_descriptor limits
1966 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1967 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1968 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1969 }
1970
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001971 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001972 VkPhysicalDeviceDescriptorIndexingProperties descriptor_indexing_prop;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001973 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1974 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1975 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1976 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1977 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1978 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1979 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1980 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1981 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1982 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1983 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1984 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1985 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1986 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1987 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1988 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1989 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1990 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1991 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1992 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1993 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1994 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1995 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1996 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1997 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1998 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1999 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
2000 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
2001 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
2002 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
2003 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
2004 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
2005 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
2006 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
2007 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
2008 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
2009 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
2010 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
2011 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
2012 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
2013 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
2014 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
2015 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
2016 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
2017 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
2018 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
2019 }
2020
locke-lunargd556cc32019-09-17 01:21:23 -06002021 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
2022 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
2023 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
2024 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002025
2026 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08002027 VkPhysicalDeviceDepthStencilResolveProperties depth_stencil_resolve_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002028 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
2029 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
2030 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
2031 depth_stencil_resolve_props.supportedStencilResolveModes;
2032 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
2033 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
2034 }
2035
locke-lunargd556cc32019-09-17 01:21:23 -06002036 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002037 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
sourav parmarcd5fb182020-07-17 12:58:44 -07002038 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing_pipeline, &phys_dev_props->ray_tracing_propsKHR);
2039 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_acceleration_structure, &phys_dev_props->acc_structure_props);
locke-lunargd556cc32019-09-17 01:21:23 -06002040 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
2041 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02002042 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map_2, &phys_dev_props->fragment_density_map2_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002043 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
sfricke-samsung8f658d42020-05-03 20:12:24 -07002044 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_sample_locations, &phys_dev_props->sample_locations_props);
Tony-LunarG7337b312020-04-15 16:40:25 -06002045 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_custom_border_color, &phys_dev_props->custom_border_color_props);
locke-lunarg3fa463a2020-10-23 16:39:04 -06002046 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_multiview, &phys_dev_props->multiview_props);
Nathaniel Cesario3291c912020-11-17 16:54:41 -07002047 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_portability_subset, &phys_dev_props->portability_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002048
2049 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08002050 VkPhysicalDeviceTimelineSemaphoreProperties timeline_semaphore_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002051 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
2052 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
2053 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
2054 }
2055
2056 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08002057 VkPhysicalDeviceFloatControlsProperties float_controls_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002058 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
2059 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
2060 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
2061 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
2062 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
2063 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
2064 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
2065 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
2066 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
2067 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
2068 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
2069 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
2070 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
2071 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
2072 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
2073 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
2074 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
2075 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
2076 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
2077 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
2078 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
2079 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07002080
locke-lunargd556cc32019-09-17 01:21:23 -06002081 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
2082 // Get the needed cooperative_matrix properties
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07002083 auto cooperative_matrix_props = LvlInitStruct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
2084 auto prop2 = LvlInitStruct<VkPhysicalDeviceProperties2>(&cooperative_matrix_props);
locke-lunargd556cc32019-09-17 01:21:23 -06002085 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
2086 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
2087
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002088 uint32_t num_cooperative_matrix_properties = 0;
2089 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &num_cooperative_matrix_properties, NULL);
2090 state_tracker->cooperative_matrix_properties.resize(num_cooperative_matrix_properties,
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07002091 LvlInitStruct<VkCooperativeMatrixPropertiesNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002092
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002093 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &num_cooperative_matrix_properties,
locke-lunargd556cc32019-09-17 01:21:23 -06002094 state_tracker->cooperative_matrix_properties.data());
2095 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002096 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06002097 // Get the needed subgroup limits
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07002098 auto subgroup_prop = LvlInitStruct<VkPhysicalDeviceSubgroupProperties>();
2099 auto prop2 = LvlInitStruct<VkPhysicalDeviceProperties2>(&subgroup_prop);
locke-lunargd556cc32019-09-17 01:21:23 -06002100 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
2101
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002102 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
2103 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
2104 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
2105 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06002106 }
2107
Tobias Hector6663c9b2020-11-05 10:18:02 +00002108 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_fragment_shading_rate, &phys_dev_props->fragment_shading_rate_props);
2109
locke-lunargd556cc32019-09-17 01:21:23 -06002110 // Store queue family data
2111 if (pCreateInfo->pQueueCreateInfos != nullptr) {
2112 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
sfricke-samsung590ae1e2020-04-25 01:18:05 -07002113 const VkDeviceQueueCreateInfo &queue_create_info = pCreateInfo->pQueueCreateInfos[i];
Jeremy Gebbencbf22862021-03-03 12:01:22 -07002114 state_tracker->queue_family_index_map.emplace(queue_create_info.queueFamilyIndex, queue_create_info.queueCount);
2115 state_tracker->queue_family_create_flags_map.emplace( queue_create_info.queueFamilyIndex, queue_create_info.flags);
locke-lunargd556cc32019-09-17 01:21:23 -06002116 }
2117 }
2118}
2119
2120void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
2121 if (!device) return;
2122
locke-lunargd556cc32019-09-17 01:21:23 -06002123 // Reset all command buffers before destroying them, to unlink object_bindings.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002124 for (auto &command_buffer : commandBufferMap) {
2125 ResetCommandBufferState(command_buffer.first);
locke-lunargd556cc32019-09-17 01:21:23 -06002126 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05002127 pipelineMap.clear();
2128 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06002129 commandBufferMap.clear();
2130
2131 // This will also delete all sets in the pool & remove them from setMap
2132 DeleteDescriptorSetPools();
2133 // All sets should be removed
2134 assert(setMap.empty());
2135 descriptorSetLayoutMap.clear();
2136 imageViewMap.clear();
2137 imageMap.clear();
2138 bufferViewMap.clear();
2139 bufferMap.clear();
2140 // Queues persist until device is destroyed
2141 queueMap.clear();
2142}
2143
2144// Loop through bound objects and increment their in_use counts.
2145void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
John Zulauf79f06582021-02-27 18:38:39 -07002146 for (const auto &obj : cb_node->object_bindings) {
locke-lunargd556cc32019-09-17 01:21:23 -06002147 auto base_obj = GetStateStructPtrFromObject(obj);
2148 if (base_obj) {
2149 base_obj->in_use.fetch_add(1);
2150 }
2151 }
2152}
2153
2154// Track which resources are in-flight by atomically incrementing their "in_use" count
2155void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
2156 cb_node->submitCount++;
2157 cb_node->in_use.fetch_add(1);
2158
2159 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
2160 IncrementBoundObjects(cb_node);
2161 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
2162 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
2163 // should then be flagged prior to calling this function
2164 for (auto event : cb_node->writeEventsBeforeWait) {
2165 auto event_state = GetEventState(event);
2166 if (event_state) event_state->write_in_use++;
2167 }
2168}
2169
2170// Decrement in-use count for objects bound to command buffer
2171void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
2172 BASE_NODE *base_obj = nullptr;
John Zulauf79f06582021-02-27 18:38:39 -07002173 for (const auto &obj : cb_node->object_bindings) {
locke-lunargd556cc32019-09-17 01:21:23 -06002174 base_obj = GetStateStructPtrFromObject(obj);
2175 if (base_obj) {
2176 base_obj->in_use.fetch_sub(1);
2177 }
2178 }
2179}
2180
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002181void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
Jeremy Gebbencbf22862021-03-03 12:01:22 -07002182 layer_data::unordered_map<VkQueue, uint64_t> other_queue_seqs;
2183 layer_data::unordered_map<VkSemaphore, uint64_t> timeline_semaphore_counters;
locke-lunargd556cc32019-09-17 01:21:23 -06002184
2185 // Roll this queue forward, one submission at a time.
2186 while (pQueue->seq < seq) {
2187 auto &submission = pQueue->submissions.front();
2188
2189 for (auto &wait : submission.waitSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002190 auto semaphore_state = GetSemaphoreState(wait.semaphore);
2191 if (semaphore_state) {
2192 semaphore_state->in_use.fetch_sub(1);
locke-lunargd556cc32019-09-17 01:21:23 -06002193 }
Mike Schuchardt2df08912020-12-15 16:28:09 -08002194 if (wait.type == VK_SEMAPHORE_TYPE_TIMELINE) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002195 auto &last_counter = timeline_semaphore_counters[wait.semaphore];
2196 last_counter = std::max(last_counter, wait.payload);
Marshall Drew-Brook03847582020-11-06 15:10:45 -08002197 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002198 auto &last_seq = other_queue_seqs[wait.queue];
2199 last_seq = std::max(last_seq, wait.seq);
Marshall Drew-Brook03847582020-11-06 15:10:45 -08002200 }
locke-lunargd556cc32019-09-17 01:21:23 -06002201 }
2202
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01002203 for (auto &signal : submission.signalSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002204 auto semaphore_state = GetSemaphoreState(signal.semaphore);
2205 if (semaphore_state) {
2206 semaphore_state->in_use.fetch_sub(1);
Mike Schuchardt2df08912020-12-15 16:28:09 -08002207 if (semaphore_state->type == VK_SEMAPHORE_TYPE_TIMELINE && semaphore_state->payload < signal.payload) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002208 semaphore_state->payload = signal.payload;
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01002209 }
locke-lunargd556cc32019-09-17 01:21:23 -06002210 }
2211 }
2212
2213 for (auto &semaphore : submission.externalSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002214 auto semaphore_state = GetSemaphoreState(semaphore);
2215 if (semaphore_state) {
2216 semaphore_state->in_use.fetch_sub(1);
locke-lunargd556cc32019-09-17 01:21:23 -06002217 }
2218 }
2219
2220 for (auto cb : submission.cbs) {
2221 auto cb_node = GetCBState(cb);
2222 if (!cb_node) {
2223 continue;
2224 }
2225 // First perform decrement on general case bound objects
2226 DecrementBoundResources(cb_node);
2227 for (auto event : cb_node->writeEventsBeforeWait) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002228 auto event_node = eventMap.find(event);
2229 if (event_node != eventMap.end()) {
John Zulauf48057322020-12-02 11:59:31 -07002230 event_node->second->write_in_use--;
locke-lunargd556cc32019-09-17 01:21:23 -06002231 }
2232 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002233 QueryMap local_query_to_state_map;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002234 VkQueryPool first_pool = VK_NULL_HANDLE;
Jeff Bolz310775c2019-10-09 00:46:33 -05002235 for (auto &function : cb_node->queryUpdates) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002236 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &local_query_to_state_map);
Jeff Bolz310775c2019-10-09 00:46:33 -05002237 }
2238
John Zulauf79f06582021-02-27 18:38:39 -07002239 for (const auto &query_state_pair : local_query_to_state_map) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002240 if (query_state_pair.second == QUERYSTATE_ENDED) {
2241 queryToStateMap[query_state_pair.first] = QUERYSTATE_AVAILABLE;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002242 }
locke-lunargd556cc32019-09-17 01:21:23 -06002243 }
locke-lunargd556cc32019-09-17 01:21:23 -06002244 cb_node->in_use.fetch_sub(1);
2245 }
2246
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002247 auto fence_state = GetFenceState(submission.fence);
2248 if (fence_state && fence_state->scope == kSyncScopeInternal) {
2249 fence_state->state = FENCE_RETIRED;
locke-lunargd556cc32019-09-17 01:21:23 -06002250 }
2251
2252 pQueue->submissions.pop_front();
2253 pQueue->seq++;
2254 }
2255
2256 // Roll other queues forward to the highest seq we saw a wait for
John Zulauf79f06582021-02-27 18:38:39 -07002257 for (const auto &qs : other_queue_seqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002258 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002259 }
John Zulauf79f06582021-02-27 18:38:39 -07002260 for (const auto &sc : timeline_semaphore_counters) {
Marshall Drew-Brook03847582020-11-06 15:10:45 -08002261 RetireTimelineSemaphore(sc.first, sc.second);
2262 }
locke-lunargd556cc32019-09-17 01:21:23 -06002263}
2264
2265// Submit a fence to a queue, delimiting previous fences and previous untracked
2266// work by it.
2267static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
2268 pFence->state = FENCE_INFLIGHT;
2269 pFence->signaler.first = pQueue->queue;
2270 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
2271}
2272
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002273uint64_t ValidationStateTracker::RecordSubmitFence(QUEUE_STATE *queue_state, VkFence fence, uint32_t submit_count) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002274 auto fence_state = GetFenceState(fence);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002275 uint64_t early_retire_seq = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002276 if (fence_state) {
2277 if (fence_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06002278 // Mark fence in use
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002279 SubmitFence(queue_state, fence_state, std::max(1u, submit_count));
2280 if (!submit_count) {
locke-lunargd556cc32019-09-17 01:21:23 -06002281 // If no submissions, but just dropping a fence on the end of the queue,
2282 // record an empty submission with just the fence, so we can determine
2283 // its completion.
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002284 CB_SUBMISSION submission;
2285 submission.fence = fence;
2286 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06002287 }
2288 } else {
2289 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002290 early_retire_seq = queue_state->seq + queue_state->submissions.size();
locke-lunargd556cc32019-09-17 01:21:23 -06002291 }
2292 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002293 return early_retire_seq;
2294}
2295
2296void ValidationStateTracker::RecordSubmitCommandBuffer(CB_SUBMISSION &submission, VkCommandBuffer command_buffer) {
2297 auto cb_node = GetCBState(command_buffer);
2298 if (cb_node) {
2299 submission.cbs.push_back(command_buffer);
John Zulauf79f06582021-02-27 18:38:39 -07002300 for (auto *secondary_cmd_buffer : cb_node->linkedCommandBuffers) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002301 submission.cbs.push_back(secondary_cmd_buffer->commandBuffer);
2302 IncrementResources(secondary_cmd_buffer);
2303 }
2304 IncrementResources(cb_node);
2305
2306 VkQueryPool first_pool = VK_NULL_HANDLE;
2307 EventToStageMap local_event_to_stage_map;
2308 QueryMap local_query_to_state_map;
2309 for (auto &function : cb_node->queryUpdates) {
2310 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &local_query_to_state_map);
2311 }
2312
John Zulauf79f06582021-02-27 18:38:39 -07002313 for (const auto &query_state_pair : local_query_to_state_map) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002314 queryToStateMap[query_state_pair.first] = query_state_pair.second;
2315 }
2316
John Zulauf79f06582021-02-27 18:38:39 -07002317 for (const auto &function : cb_node->eventUpdates) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002318 function(nullptr, /*do_validate*/ false, &local_event_to_stage_map);
2319 }
2320
John Zulauf79f06582021-02-27 18:38:39 -07002321 for (const auto &eventStagePair : local_event_to_stage_map) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002322 eventMap[eventStagePair.first]->stageMask = eventStagePair.second;
2323 }
2324 }
2325}
2326
2327void ValidationStateTracker::RecordSubmitWaitSemaphore(CB_SUBMISSION &submission, VkQueue queue, VkSemaphore semaphore,
2328 uint64_t value, uint64_t next_seq) {
2329 auto semaphore_state = GetSemaphoreState(semaphore);
2330 if (semaphore_state) {
2331 if (semaphore_state->scope == kSyncScopeInternal) {
2332 SEMAPHORE_WAIT wait;
2333 wait.semaphore = semaphore;
2334 wait.type = semaphore_state->type;
2335 if (semaphore_state->type == VK_SEMAPHORE_TYPE_BINARY) {
2336 if (semaphore_state->signaler.first != VK_NULL_HANDLE) {
2337 wait.queue = semaphore_state->signaler.first;
2338 wait.seq = semaphore_state->signaler.second;
2339 submission.waitSemaphores.emplace_back(std::move(wait));
2340 semaphore_state->in_use.fetch_add(1);
2341 }
2342 semaphore_state->signaler.first = VK_NULL_HANDLE;
2343 semaphore_state->signaled = false;
2344 } else if (semaphore_state->payload < value) {
2345 wait.queue = queue;
2346 wait.seq = next_seq;
2347 wait.payload = value;
2348 submission.waitSemaphores.emplace_back(std::move(wait));
2349 semaphore_state->in_use.fetch_add(1);
2350 }
2351 } else {
2352 submission.externalSemaphores.push_back(semaphore);
2353 semaphore_state->in_use.fetch_add(1);
2354 if (semaphore_state->scope == kSyncScopeExternalTemporary) {
2355 semaphore_state->scope = kSyncScopeInternal;
2356 }
2357 }
2358 }
2359}
2360
2361bool ValidationStateTracker::RecordSubmitSignalSemaphore(CB_SUBMISSION &submission, VkQueue queue, VkSemaphore semaphore,
2362 uint64_t value, uint64_t next_seq) {
2363 bool retire_early = false;
2364 auto semaphore_state = GetSemaphoreState(semaphore);
2365 if (semaphore_state) {
2366 if (semaphore_state->scope == kSyncScopeInternal) {
2367 SEMAPHORE_SIGNAL signal;
2368 signal.semaphore = semaphore;
2369 signal.seq = next_seq;
2370 if (semaphore_state->type == VK_SEMAPHORE_TYPE_BINARY) {
2371 semaphore_state->signaler.first = queue;
2372 semaphore_state->signaler.second = next_seq;
2373 semaphore_state->signaled = true;
2374 } else {
2375 signal.payload = value;
2376 }
2377 semaphore_state->in_use.fetch_add(1);
2378 submission.signalSemaphores.emplace_back(std::move(signal));
2379 } else {
2380 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
2381 retire_early = true;
2382 }
2383 }
2384 return retire_early;
2385}
2386
2387void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
2388 VkFence fence, VkResult result) {
2389 if (result != VK_SUCCESS) return;
2390 auto queue_state = GetQueueState(queue);
2391
2392 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, submitCount);
locke-lunargd556cc32019-09-17 01:21:23 -06002393
2394 // Now process each individual submit
2395 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002396 CB_SUBMISSION submission;
locke-lunargd556cc32019-09-17 01:21:23 -06002397 const VkSubmitInfo *submit = &pSubmits[submit_idx];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002398 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002399 auto *timeline_semaphore_submit = LvlFindInChain<VkTimelineSemaphoreSubmitInfo>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002400 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
Jeremy Gebben4ae5cc72021-03-10 15:34:44 -07002401 uint64_t value = 0;
2402 if (timeline_semaphore_submit && timeline_semaphore_submit->pWaitSemaphoreValues != nullptr &&
2403 (i < timeline_semaphore_submit->waitSemaphoreValueCount)) {
2404 value = timeline_semaphore_submit->pWaitSemaphoreValues[i];
2405 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002406 RecordSubmitWaitSemaphore(submission, queue, submit->pWaitSemaphores[i], value, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002407 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002408
2409 bool retire_early = false;
locke-lunargd556cc32019-09-17 01:21:23 -06002410 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
Jeremy Gebben4ae5cc72021-03-10 15:34:44 -07002411 uint64_t value = 0;
2412 if (timeline_semaphore_submit && timeline_semaphore_submit->pSignalSemaphoreValues != nullptr &&
2413 (i < timeline_semaphore_submit->signalSemaphoreValueCount)) {
2414 value = timeline_semaphore_submit->pSignalSemaphoreValues[i];
2415 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002416 retire_early |= RecordSubmitSignalSemaphore(submission, queue, submit->pSignalSemaphores[i], value, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002417 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002418 if (retire_early) {
2419 early_retire_seq = std::max(early_retire_seq, next_seq);
2420 }
2421
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002422 const auto perf_submit = LvlFindInChain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002423 submission.perf_submit_pass = perf_submit ? perf_submit->counterPassIndex : 0;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002424
locke-lunargd556cc32019-09-17 01:21:23 -06002425 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002426 RecordSubmitCommandBuffer(submission, submit->pCommandBuffers[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002427 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002428 submission.fence = submit_idx == (submitCount - 1) ? fence : VK_NULL_HANDLE;
2429 queue_state->submissions.emplace_back(std::move(submission));
2430 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002431
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002432 if (early_retire_seq) {
2433 RetireWorkOnQueue(queue_state, early_retire_seq);
2434 }
2435}
2436
2437void ValidationStateTracker::PostCallRecordQueueSubmit2KHR(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR *pSubmits,
2438 VkFence fence, VkResult result) {
2439 if (result != VK_SUCCESS) return;
2440 auto queue_state = GetQueueState(queue);
2441
2442 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, submitCount);
2443
2444 // Now process each individual submit
2445 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
2446 CB_SUBMISSION submission;
2447 const VkSubmitInfo2KHR *submit = &pSubmits[submit_idx];
2448 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
2449 for (uint32_t i = 0; i < submit->waitSemaphoreInfoCount; ++i) {
2450 const auto &sem_info = submit->pWaitSemaphoreInfos[i];
2451 RecordSubmitWaitSemaphore(submission, queue, sem_info.semaphore, sem_info.value, next_seq);
2452 }
2453 bool retire_early = false;
2454 for (uint32_t i = 0; i < submit->signalSemaphoreInfoCount; ++i) {
2455 const auto &sem_info = submit->pSignalSemaphoreInfos[i];
2456 retire_early |= RecordSubmitSignalSemaphore(submission, queue, sem_info.semaphore, sem_info.value, next_seq);
2457 }
2458 if (retire_early) {
2459 early_retire_seq = std::max(early_retire_seq, next_seq);
2460 }
2461 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
2462 submission.perf_submit_pass = perf_submit ? perf_submit->counterPassIndex : 0;
2463
2464 for (uint32_t i = 0; i < submit->commandBufferInfoCount; i++) {
2465 RecordSubmitCommandBuffer(submission, submit->pCommandBufferInfos[i].commandBuffer);
2466 }
2467 submission.fence = submit_idx == (submitCount - 1) ? fence : VK_NULL_HANDLE;
2468 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06002469 }
2470
2471 if (early_retire_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002472 RetireWorkOnQueue(queue_state, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002473 }
2474}
2475
2476void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
2477 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
2478 VkResult result) {
2479 if (VK_SUCCESS == result) {
2480 AddMemObjInfo(device, *pMemory, pAllocateInfo);
2481 }
2482 return;
2483}
2484
2485void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
2486 if (!mem) return;
2487 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
2488 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
2489
2490 // Clear mem binding for any bound objects
2491 for (const auto &obj : mem_info->obj_bindings) {
2492 BINDABLE *bindable_state = nullptr;
2493 switch (obj.type) {
2494 case kVulkanObjectTypeImage:
2495 bindable_state = GetImageState(obj.Cast<VkImage>());
2496 break;
2497 case kVulkanObjectTypeBuffer:
2498 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
2499 break;
2500 case kVulkanObjectTypeAccelerationStructureNV:
sourav parmarcd5fb182020-07-17 12:58:44 -07002501 bindable_state = GetAccelerationStructureStateNV(obj.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002502 break;
2503
2504 default:
2505 // Should only have acceleration structure, buffer, or image objects bound to memory
2506 assert(0);
2507 }
2508
2509 if (bindable_state) {
Jeff Bolz41e29052020-03-29 22:33:55 -05002510 // Remove any sparse bindings bound to the resource that use this memory.
2511 for (auto it = bindable_state->sparse_bindings.begin(); it != bindable_state->sparse_bindings.end();) {
2512 auto nextit = it;
2513 nextit++;
2514
2515 auto &sparse_mem_binding = *it;
2516 if (sparse_mem_binding.mem_state.get() == mem_info) {
2517 bindable_state->sparse_bindings.erase(it);
2518 }
2519
2520 it = nextit;
2521 }
locke-lunargd556cc32019-09-17 01:21:23 -06002522 bindable_state->UpdateBoundMemorySet();
2523 }
2524 }
2525 // Any bound cmd buffers are now invalid
2526 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
2527 RemoveAliasingImages(mem_info->bound_images);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002528 mem_info->destroyed = true;
John Zulauf79952712020-04-07 11:25:54 -06002529 fake_memory.Free(mem_info->fake_base_address);
locke-lunargd556cc32019-09-17 01:21:23 -06002530 memObjMap.erase(mem);
2531}
2532
2533void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
2534 VkFence fence, VkResult result) {
2535 if (result != VK_SUCCESS) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002536 auto queue_state = GetQueueState(queue);
locke-lunargd556cc32019-09-17 01:21:23 -06002537
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002538 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, bindInfoCount);
locke-lunargd556cc32019-09-17 01:21:23 -06002539
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002540 for (uint32_t bind_idx = 0; bind_idx < bindInfoCount; ++bind_idx) {
2541 const VkBindSparseInfo &bind_info = pBindInfo[bind_idx];
locke-lunargd556cc32019-09-17 01:21:23 -06002542 // Track objects tied to memory
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002543 for (uint32_t j = 0; j < bind_info.bufferBindCount; j++) {
2544 for (uint32_t k = 0; k < bind_info.pBufferBinds[j].bindCount; k++) {
2545 auto sparse_binding = bind_info.pBufferBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07002546 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002547 VulkanTypedHandle(bind_info.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -06002548 }
2549 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002550 for (uint32_t j = 0; j < bind_info.imageOpaqueBindCount; j++) {
2551 for (uint32_t k = 0; k < bind_info.pImageOpaqueBinds[j].bindCount; k++) {
2552 auto sparse_binding = bind_info.pImageOpaqueBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07002553 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002554 VulkanTypedHandle(bind_info.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
locke-lunargd556cc32019-09-17 01:21:23 -06002555 }
2556 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002557 for (uint32_t j = 0; j < bind_info.imageBindCount; j++) {
2558 for (uint32_t k = 0; k < bind_info.pImageBinds[j].bindCount; k++) {
2559 auto sparse_binding = bind_info.pImageBinds[j].pBinds[k];
locke-lunargd556cc32019-09-17 01:21:23 -06002560 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
2561 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
locke-lunargcf04d582019-11-26 00:31:50 -07002562 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, size,
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002563 VulkanTypedHandle(bind_info.pImageBinds[j].image, kVulkanObjectTypeImage));
locke-lunargd556cc32019-09-17 01:21:23 -06002564 }
2565 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002566 CB_SUBMISSION submission;
2567 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002568 for (uint32_t i = 0; i < bind_info.waitSemaphoreCount; ++i) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002569 RecordSubmitWaitSemaphore(submission, queue, bind_info.pWaitSemaphores[i], 0, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002570 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002571 bool retire_early = false;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002572 for (uint32_t i = 0; i < bind_info.signalSemaphoreCount; ++i) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002573 retire_early |= RecordSubmitSignalSemaphore(submission, queue, bind_info.pSignalSemaphores[i], 0, next_seq);
2574 }
2575 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
2576 if (retire_early) {
2577 early_retire_seq = std::max(early_retire_seq, queue_state->seq + queue_state->submissions.size() + 1);
locke-lunargd556cc32019-09-17 01:21:23 -06002578 }
2579
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002580 submission.fence = bind_idx == (bindInfoCount - 1) ? fence : VK_NULL_HANDLE;
2581 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06002582 }
2583
2584 if (early_retire_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002585 RetireWorkOnQueue(queue_state, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002586 }
2587}
2588
2589void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
2590 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
2591 VkResult result) {
2592 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002593 auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002594 semaphore_state->signaler.first = VK_NULL_HANDLE;
2595 semaphore_state->signaler.second = 0;
2596 semaphore_state->signaled = false;
2597 semaphore_state->scope = kSyncScopeInternal;
Mike Schuchardt2df08912020-12-15 16:28:09 -08002598 semaphore_state->type = VK_SEMAPHORE_TYPE_BINARY;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002599 semaphore_state->payload = 0;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002600 auto semaphore_type_create_info = LvlFindInChain<VkSemaphoreTypeCreateInfo>(pCreateInfo->pNext);
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002601 if (semaphore_type_create_info) {
2602 semaphore_state->type = semaphore_type_create_info->semaphoreType;
2603 semaphore_state->payload = semaphore_type_create_info->initialValue;
2604 }
locke-lunargd556cc32019-09-17 01:21:23 -06002605 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
2606}
2607
Mike Schuchardt2df08912020-12-15 16:28:09 -08002608void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBits handle_type,
2609 VkSemaphoreImportFlags flags) {
locke-lunargd556cc32019-09-17 01:21:23 -06002610 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
2611 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08002612 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) &&
locke-lunargd556cc32019-09-17 01:21:23 -06002613 sema_node->scope == kSyncScopeInternal) {
2614 sema_node->scope = kSyncScopeExternalTemporary;
2615 } else {
2616 sema_node->scope = kSyncScopeExternalPermanent;
2617 }
2618 }
2619}
2620
Mike Schuchardt2df08912020-12-15 16:28:09 -08002621void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfo *pSignalInfo,
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002622 VkResult result) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002623 auto *semaphore_state = GetSemaphoreState(pSignalInfo->semaphore);
2624 semaphore_state->payload = pSignalInfo->value;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002625}
2626
locke-lunargd556cc32019-09-17 01:21:23 -06002627void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
2628 auto mem_info = GetDevMemState(mem);
2629 if (mem_info) {
2630 mem_info->mapped_range.offset = offset;
2631 mem_info->mapped_range.size = size;
2632 mem_info->p_driver_data = *ppData;
2633 }
2634}
2635
2636void ValidationStateTracker::RetireFence(VkFence fence) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002637 auto fence_state = GetFenceState(fence);
2638 if (fence_state && fence_state->scope == kSyncScopeInternal) {
2639 if (fence_state->signaler.first != VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -06002640 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002641 RetireWorkOnQueue(GetQueueState(fence_state->signaler.first), fence_state->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002642 } else {
2643 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
2644 // the fence as retired.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002645 fence_state->state = FENCE_RETIRED;
locke-lunargd556cc32019-09-17 01:21:23 -06002646 }
2647 }
2648}
2649
2650void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2651 VkBool32 waitAll, uint64_t timeout, VkResult result) {
2652 if (VK_SUCCESS != result) return;
2653
2654 // When we know that all fences are complete we can clean/remove their CBs
2655 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
2656 for (uint32_t i = 0; i < fenceCount; i++) {
2657 RetireFence(pFences[i]);
2658 }
2659 }
2660 // NOTE : Alternate case not handled here is when some fences have completed. In
2661 // this case for app to guarantee which fences completed it will have to call
2662 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
2663}
2664
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002665void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002666 auto semaphore_state = GetSemaphoreState(semaphore);
2667 if (semaphore_state) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002668 for (auto &pair : queueMap) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002669 QUEUE_STATE &queue_state = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002670 uint64_t max_seq = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002671 for (const auto &submission : queue_state.submissions) {
2672 for (const auto &signal_semaphore : submission.signalSemaphores) {
2673 if (signal_semaphore.semaphore == semaphore && signal_semaphore.payload <= until_payload) {
2674 if (signal_semaphore.seq > max_seq) {
2675 max_seq = signal_semaphore.seq;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002676 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002677 }
2678 }
2679 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06002680 if (max_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002681 RetireWorkOnQueue(&queue_state, max_seq);
Tony-LunarG47d5e272020-04-07 15:35:55 -06002682 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002683 }
2684 }
2685}
2686
John Zulauff89de662020-04-13 18:57:34 -06002687void ValidationStateTracker::RecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2688 VkResult result) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002689 if (VK_SUCCESS != result) return;
2690
2691 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
2692 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
2693 }
2694}
2695
John Zulauff89de662020-04-13 18:57:34 -06002696void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2697 VkResult result) {
2698 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2699}
2700
2701void ValidationStateTracker::PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo,
2702 uint64_t timeout, VkResult result) {
2703 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2704}
2705
Adrian Coca Lorentec7d76102020-09-28 13:58:16 +02002706void ValidationStateTracker::RecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2707 VkResult result) {
2708 if (VK_SUCCESS != result) return;
2709
2710 RetireTimelineSemaphore(semaphore, *pValue);
2711}
2712
2713void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2714 VkResult result) {
2715 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2716}
2717void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2718 VkResult result) {
2719 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2720}
2721
locke-lunargd556cc32019-09-17 01:21:23 -06002722void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
2723 if (VK_SUCCESS != result) return;
2724 RetireFence(fence);
2725}
2726
2727void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
2728 // Add queue to tracking set only if it is new
2729 auto queue_is_new = queues.emplace(queue);
2730 if (queue_is_new.second == true) {
2731 QUEUE_STATE *queue_state = &queueMap[queue];
2732 queue_state->queue = queue;
2733 queue_state->queueFamilyIndex = queue_family_index;
2734 queue_state->seq = 0;
2735 }
2736}
2737
2738void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2739 VkQueue *pQueue) {
2740 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2741}
2742
2743void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2744 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2745}
2746
2747void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2748 if (VK_SUCCESS != result) return;
2749 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002750 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002751}
2752
2753void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2754 if (VK_SUCCESS != result) return;
2755 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002756 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002757 }
2758}
2759
2760void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2761 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002762 auto fence_state = GetFenceState(fence);
2763 fence_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002764 fenceMap.erase(fence);
2765}
2766
2767void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2768 const VkAllocationCallbacks *pAllocator) {
2769 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002770 auto semaphore_state = GetSemaphoreState(semaphore);
2771 semaphore_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002772 semaphoreMap.erase(semaphore);
2773}
2774
2775void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2776 if (!event) return;
John Zulauf48057322020-12-02 11:59:31 -07002777 EVENT_STATE *event_state = Get<EVENT_STATE>(event);
locke-lunargd556cc32019-09-17 01:21:23 -06002778 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
2779 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
John Zulauf48057322020-12-02 11:59:31 -07002780 event_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002781 eventMap.erase(event);
2782}
2783
2784void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2785 const VkAllocationCallbacks *pAllocator) {
2786 if (!queryPool) return;
2787 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
2788 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
2789 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002790 qp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002791 queryPoolMap.erase(queryPool);
2792}
2793
John Zulaufd13b38e2021-03-05 08:17:38 -07002794void ValidationStateTracker::InsertImageMemoryRange(IMAGE_STATE *image_state, DEVICE_MEMORY_STATE *mem_info,
2795 VkDeviceSize mem_offset) {
2796 mem_info->bound_images.insert(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002797}
2798
John Zulaufd13b38e2021-03-05 08:17:38 -07002799void ValidationStateTracker::RemoveImageMemoryRange(IMAGE_STATE *image_state, DEVICE_MEMORY_STATE *mem_info) {
2800 mem_info->bound_images.erase(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002801}
2802
locke-lunargd556cc32019-09-17 01:21:23 -06002803void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2804 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2805 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002806 // Track objects tied to memory
2807 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
2808 }
2809}
2810
2811void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2812 VkDeviceSize memoryOffset, VkResult result) {
2813 if (VK_SUCCESS != result) return;
2814 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2815}
2816
2817void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002818 const VkBindBufferMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06002819 for (uint32_t i = 0; i < bindInfoCount; i++) {
2820 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2821 }
2822}
2823
2824void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002825 const VkBindBufferMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06002826 for (uint32_t i = 0; i < bindInfoCount; i++) {
2827 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2828 }
2829}
2830
Spencer Fricke6c127102020-04-16 06:25:20 -07002831void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer) {
locke-lunargd556cc32019-09-17 01:21:23 -06002832 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2833 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002834 buffer_state->memory_requirements_checked = true;
2835 }
2836}
2837
2838void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2839 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002840 RecordGetBufferMemoryRequirementsState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002841}
2842
2843void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002844 const VkBufferMemoryRequirementsInfo2 *pInfo,
2845 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002846 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002847}
2848
2849void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002850 const VkBufferMemoryRequirementsInfo2 *pInfo,
2851 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002852 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002853}
2854
Spencer Fricke6c127102020-04-16 06:25:20 -07002855void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002856 const VkImagePlaneMemoryRequirementsInfo *plane_info =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002857 (pInfo == nullptr) ? nullptr : LvlFindInChain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002858 IMAGE_STATE *image_state = GetImageState(image);
2859 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002860 if (plane_info != nullptr) {
2861 // Multi-plane image
2862 image_state->memory_requirements_checked = false; // Each image plane needs to be checked itself
2863 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
2864 image_state->plane0_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002865 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
2866 image_state->plane1_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002867 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
2868 image_state->plane2_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002869 }
2870 } else {
2871 // Single Plane image
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002872 image_state->memory_requirements_checked = true;
2873 }
locke-lunargd556cc32019-09-17 01:21:23 -06002874 }
2875}
2876
2877void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2878 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002879 RecordGetImageMemoryRequirementsState(image, nullptr);
locke-lunargd556cc32019-09-17 01:21:23 -06002880}
2881
2882void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2883 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002884 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002885}
2886
2887void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2888 const VkImageMemoryRequirementsInfo2 *pInfo,
2889 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002890 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002891}
2892
2893static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2894 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2895 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2896 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2897 image_state->sparse_metadata_required = true;
2898 }
2899}
2900
2901void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2902 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2903 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2904 auto image_state = GetImageState(image);
2905 image_state->get_sparse_reqs_called = true;
2906 if (!pSparseMemoryRequirements) return;
2907 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2908 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2909 }
2910}
2911
2912void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
Mike Schuchardt2df08912020-12-15 16:28:09 -08002913 VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount,
2914 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements) {
locke-lunargd556cc32019-09-17 01:21:23 -06002915 auto image_state = GetImageState(pInfo->image);
2916 image_state->get_sparse_reqs_called = true;
2917 if (!pSparseMemoryRequirements) return;
2918 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2919 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2920 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2921 }
2922}
2923
2924void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08002925 VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount,
2926 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements) {
locke-lunargd556cc32019-09-17 01:21:23 -06002927 auto image_state = GetImageState(pInfo->image);
2928 image_state->get_sparse_reqs_called = true;
2929 if (!pSparseMemoryRequirements) return;
2930 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2931 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2932 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2933 }
2934}
2935
2936void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2937 const VkAllocationCallbacks *pAllocator) {
2938 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002939 auto shader_module_state = GetShaderModuleState(shaderModule);
2940 shader_module_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002941 shaderModuleMap.erase(shaderModule);
2942}
2943
2944void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2945 const VkAllocationCallbacks *pAllocator) {
2946 if (!pipeline) return;
2947 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
2948 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
2949 // Any bound cmd buffers are now invalid
2950 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002951 pipeline_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002952 pipelineMap.erase(pipeline);
2953}
2954
2955void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2956 const VkAllocationCallbacks *pAllocator) {
2957 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002958 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
2959 pipeline_layout_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002960 pipelineLayoutMap.erase(pipelineLayout);
2961}
2962
2963void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2964 const VkAllocationCallbacks *pAllocator) {
2965 if (!sampler) return;
2966 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
2967 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
2968 // Any bound cmd buffers are now invalid
2969 if (sampler_state) {
2970 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
Yuly Novikov424cdd52020-05-26 16:45:12 -04002971
2972 if (sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2973 sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
2974 custom_border_color_sampler_count--;
2975 }
2976
2977 sampler_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002978 }
2979 samplerMap.erase(sampler);
2980}
2981
2982void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2983 const VkAllocationCallbacks *pAllocator) {
2984 if (!descriptorSetLayout) return;
2985 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2986 if (layout_it != descriptorSetLayoutMap.end()) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002987 layout_it->second.get()->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002988 descriptorSetLayoutMap.erase(layout_it);
2989 }
2990}
2991
2992void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2993 const VkAllocationCallbacks *pAllocator) {
2994 if (!descriptorPool) return;
2995 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
2996 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
2997 if (desc_pool_state) {
2998 // Any bound cmd buffers are now invalid
2999 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
3000 // Free sets that were in this pool
John Zulauf79f06582021-02-27 18:38:39 -07003001 for (auto *ds : desc_pool_state->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06003002 FreeDescriptorSet(ds);
3003 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003004 desc_pool_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003005 descriptorPoolMap.erase(descriptorPool);
3006 }
3007}
3008
3009// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
3010void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
3011 const VkCommandBuffer *command_buffers) {
3012 for (uint32_t i = 0; i < command_buffer_count; i++) {
John Zulaufd1f85d42020-04-15 12:23:15 -06003013 // Allow any derived class to clean up command buffer state
3014 if (command_buffer_free_callback) {
3015 (*command_buffer_free_callback)(command_buffers[i]);
3016 }
3017
locke-lunargd556cc32019-09-17 01:21:23 -06003018 auto cb_state = GetCBState(command_buffers[i]);
3019 // Remove references to command buffer's state and delete
3020 if (cb_state) {
3021 // reset prior to delete, removing various references to it.
3022 // TODO: fix this, it's insane.
3023 ResetCommandBufferState(cb_state->commandBuffer);
3024 // Remove the cb_state's references from COMMAND_POOL_STATEs
3025 pool_state->commandBuffers.erase(command_buffers[i]);
3026 // Remove the cb debug labels
3027 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
3028 // Remove CBState from CB map
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003029 cb_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003030 commandBufferMap.erase(cb_state->commandBuffer);
3031 }
3032 }
3033}
3034
3035void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
3036 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003037 auto pool = GetCommandPoolState(commandPool);
3038 FreeCommandBufferStates(pool, commandBufferCount, pCommandBuffers);
locke-lunargd556cc32019-09-17 01:21:23 -06003039}
3040
3041void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
3042 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
3043 VkResult result) {
3044 if (VK_SUCCESS != result) return;
sfricke-samsungc1543372020-08-18 22:37:27 -07003045 VkCommandPool command_pool = *pCommandPool;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003046 auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
sfricke-samsungc1543372020-08-18 22:37:27 -07003047 cmd_pool_state->commandPool = command_pool;
locke-lunargd556cc32019-09-17 01:21:23 -06003048 cmd_pool_state->createFlags = pCreateInfo->flags;
3049 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
sfricke-samsung0c45edc2020-07-01 22:19:53 -07003050 cmd_pool_state->unprotected = ((pCreateInfo->flags & VK_COMMAND_POOL_CREATE_PROTECTED_BIT) == 0);
sfricke-samsungc1543372020-08-18 22:37:27 -07003051 commandPoolMap[command_pool] = std::move(cmd_pool_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003052}
3053
3054void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
3055 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
3056 VkResult result) {
3057 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003058 auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003059 query_pool_state->createInfo = *pCreateInfo;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003060 query_pool_state->pool = *pQueryPool;
3061 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07003062 const auto *perf = LvlFindInChain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
Mark Lobodzinski7e948e42020-09-09 10:23:36 -06003063 query_pool_state->perf_counter_index_count = perf->counterIndexCount;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003064
Mark Lobodzinski7e948e42020-09-09 10:23:36 -06003065 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003066 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
3067 const auto &counter = counters.counters[perf->pCounterIndices[i]];
3068 switch (counter.scope) {
3069 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
3070 query_pool_state->has_perf_scope_command_buffer = true;
3071 break;
3072 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
3073 query_pool_state->has_perf_scope_render_pass = true;
3074 break;
3075 default:
3076 break;
3077 }
3078 }
3079
3080 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
3081 &query_pool_state->n_performance_passes);
3082 }
3083
locke-lunargd556cc32019-09-17 01:21:23 -06003084 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
3085
3086 QueryObject query_obj{*pQueryPool, 0u};
3087 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
3088 query_obj.query = i;
3089 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
3090 }
3091}
3092
3093void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
3094 const VkAllocationCallbacks *pAllocator) {
3095 if (!commandPool) return;
3096 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
3097 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
3098 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
3099 if (cp_state) {
3100 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
3101 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
3102 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003103 cp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003104 commandPoolMap.erase(commandPool);
3105 }
3106}
3107
3108void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
3109 VkCommandPoolResetFlags flags, VkResult result) {
3110 if (VK_SUCCESS != result) return;
3111 // Reset all of the CBs allocated from this pool
3112 auto command_pool_state = GetCommandPoolState(commandPool);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003113 for (auto cmd_buffer : command_pool_state->commandBuffers) {
3114 ResetCommandBufferState(cmd_buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06003115 }
3116}
3117
3118void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
3119 VkResult result) {
3120 for (uint32_t i = 0; i < fenceCount; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003121 auto fence_state = GetFenceState(pFences[i]);
3122 if (fence_state) {
3123 if (fence_state->scope == kSyncScopeInternal) {
3124 fence_state->state = FENCE_UNSIGNALED;
3125 } else if (fence_state->scope == kSyncScopeExternalTemporary) {
3126 fence_state->scope = kSyncScopeInternal;
locke-lunargd556cc32019-09-17 01:21:23 -06003127 }
3128 }
3129 }
3130}
3131
Jeff Bolzadbfa852019-10-04 13:53:30 -05003132// For given cb_nodes, invalidate them and track object causing invalidation.
3133// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
3134// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
3135// can also unlink objects from command buffers.
Jeremy Gebbencbf22862021-03-03 12:01:22 -07003136void ValidationStateTracker::InvalidateCommandBuffers(BASE_NODE::BindingsType &cb_nodes,
Jeff Bolzadbfa852019-10-04 13:53:30 -05003137 const VulkanTypedHandle &obj, bool unlink) {
3138 for (const auto &cb_node_pair : cb_nodes) {
3139 auto &cb_node = cb_node_pair.first;
3140 if (cb_node->state == CB_RECORDING) {
3141 cb_node->state = CB_INVALID_INCOMPLETE;
3142 } else if (cb_node->state == CB_RECORDED) {
3143 cb_node->state = CB_INVALID_COMPLETE;
3144 }
3145 cb_node->broken_bindings.push_back(obj);
3146
3147 // if secondary, then propagate the invalidation to the primaries that will call us.
3148 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
3149 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
3150 }
3151 if (unlink) {
3152 int index = cb_node_pair.second;
3153 assert(cb_node->object_bindings[index] == obj);
3154 cb_node->object_bindings[index] = VulkanTypedHandle();
3155 }
3156 }
3157 if (unlink) {
3158 cb_nodes.clear();
3159 }
3160}
3161
Jeremy Gebbencbf22862021-03-03 12:01:22 -07003162void ValidationStateTracker::InvalidateLinkedCommandBuffers(layer_data::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
Jeff Bolzadbfa852019-10-04 13:53:30 -05003163 const VulkanTypedHandle &obj) {
John Zulauf79f06582021-02-27 18:38:39 -07003164 for (auto *cb_node : cb_nodes) {
locke-lunargd556cc32019-09-17 01:21:23 -06003165 if (cb_node->state == CB_RECORDING) {
3166 cb_node->state = CB_INVALID_INCOMPLETE;
3167 } else if (cb_node->state == CB_RECORDED) {
3168 cb_node->state = CB_INVALID_COMPLETE;
3169 }
3170 cb_node->broken_bindings.push_back(obj);
3171
3172 // if secondary, then propagate the invalidation to the primaries that will call us.
3173 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003174 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
locke-lunargd556cc32019-09-17 01:21:23 -06003175 }
3176 }
3177}
3178
3179void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
3180 const VkAllocationCallbacks *pAllocator) {
3181 if (!framebuffer) return;
3182 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
3183 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
3184 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003185 framebuffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003186 frameBufferMap.erase(framebuffer);
3187}
3188
3189void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
3190 const VkAllocationCallbacks *pAllocator) {
3191 if (!renderPass) return;
3192 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
3193 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
3194 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003195 rp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003196 renderPassMap.erase(renderPass);
3197}
3198
3199void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
3200 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
3201 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003202 auto fence_state = std::make_shared<FENCE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003203 fence_state->fence = *pFence;
3204 fence_state->createInfo = *pCreateInfo;
3205 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
3206 fenceMap[*pFence] = std::move(fence_state);
3207}
3208
3209bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
3210 const VkGraphicsPipelineCreateInfo *pCreateInfos,
3211 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003212 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003213 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
3214 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
3215 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
3216 cgpl_state->pipe_state.reserve(count);
3217 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003218 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05003219 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003220 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003221 }
3222 return false;
3223}
3224
3225void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
3226 const VkGraphicsPipelineCreateInfo *pCreateInfos,
3227 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
3228 VkResult result, void *cgpl_state_data) {
3229 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
3230 // This API may create pipelines regardless of the return value
3231 for (uint32_t i = 0; i < count; i++) {
3232 if (pPipelines[i] != VK_NULL_HANDLE) {
3233 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3234 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
3235 }
3236 }
3237 cgpl_state->pipe_state.clear();
3238}
3239
3240bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
3241 const VkComputePipelineCreateInfo *pCreateInfos,
3242 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003243 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003244 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
3245 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
3246 ccpl_state->pipe_state.reserve(count);
3247 for (uint32_t i = 0; i < count; i++) {
3248 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003249 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06003250 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003251 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003252 }
3253 return false;
3254}
3255
3256void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
3257 const VkComputePipelineCreateInfo *pCreateInfos,
3258 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
3259 VkResult result, void *ccpl_state_data) {
3260 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
3261
3262 // This API may create pipelines regardless of the return value
3263 for (uint32_t i = 0; i < count; i++) {
3264 if (pPipelines[i] != VK_NULL_HANDLE) {
3265 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3266 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
3267 }
3268 }
3269 ccpl_state->pipe_state.clear();
3270}
3271
3272bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
3273 uint32_t count,
3274 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
3275 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003276 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003277 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
3278 crtpl_state->pipe_state.reserve(count);
3279 for (uint32_t i = 0; i < count; i++) {
3280 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003281 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05003282 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003283 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003284 }
3285 return false;
3286}
3287
3288void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
3289 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
3290 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
3291 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
3292 // This API may create pipelines regardless of the return value
3293 for (uint32_t i = 0; i < count; i++) {
3294 if (pPipelines[i] != VK_NULL_HANDLE) {
3295 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3296 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
3297 }
3298 }
3299 crtpl_state->pipe_state.clear();
3300}
3301
sourav parmarcd5fb182020-07-17 12:58:44 -07003302bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
3303 VkPipelineCache pipelineCache, uint32_t count,
Jeff Bolz443c2ca2020-03-19 12:11:51 -05003304 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
3305 const VkAllocationCallbacks *pAllocator,
3306 VkPipeline *pPipelines, void *crtpl_state_data) const {
3307 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
3308 crtpl_state->pipe_state.reserve(count);
3309 for (uint32_t i = 0; i < count; i++) {
3310 // Create and initialize internal tracking data structure
3311 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
3312 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
3313 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
3314 }
3315 return false;
3316}
3317
sourav parmarcd5fb182020-07-17 12:58:44 -07003318void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
3319 VkPipelineCache pipelineCache, uint32_t count,
3320 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
3321 const VkAllocationCallbacks *pAllocator,
3322 VkPipeline *pPipelines, VkResult result,
3323 void *crtpl_state_data) {
Jeff Bolz443c2ca2020-03-19 12:11:51 -05003324 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
3325 // This API may create pipelines regardless of the return value
3326 for (uint32_t i = 0; i < count; i++) {
3327 if (pPipelines[i] != VK_NULL_HANDLE) {
3328 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3329 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
3330 }
3331 }
3332 crtpl_state->pipe_state.clear();
3333}
3334
locke-lunargd556cc32019-09-17 01:21:23 -06003335void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
3336 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
3337 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003338 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003339 if (pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
3340 pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
Tony-LunarG7337b312020-04-15 16:40:25 -06003341 custom_border_color_sampler_count++;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003342 }
locke-lunargd556cc32019-09-17 01:21:23 -06003343}
3344
3345void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
3346 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
3347 const VkAllocationCallbacks *pAllocator,
3348 VkDescriptorSetLayout *pSetLayout, VkResult result) {
3349 if (VK_SUCCESS != result) return;
3350 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
3351}
3352
3353// For repeatable sorting, not very useful for "memory in range" search
3354struct PushConstantRangeCompare {
3355 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
3356 if (lhs->offset == rhs->offset) {
3357 if (lhs->size == rhs->size) {
3358 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
3359 return lhs->stageFlags < rhs->stageFlags;
3360 }
3361 // If the offsets are the same then sorting by the end of range is useful for validation
3362 return lhs->size < rhs->size;
3363 }
3364 return lhs->offset < rhs->offset;
3365 }
3366};
3367
3368static PushConstantRangesDict push_constant_ranges_dict;
3369
3370PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
3371 if (!info->pPushConstantRanges) {
3372 // Hand back the empty entry (creating as needed)...
3373 return push_constant_ranges_dict.look_up(PushConstantRanges());
3374 }
3375
3376 // Sort the input ranges to ensure equivalent ranges map to the same id
3377 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
3378 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
3379 sorted.insert(info->pPushConstantRanges + i);
3380 }
3381
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07003382 PushConstantRanges ranges;
3383 ranges.reserve(sorted.size());
John Zulauf79f06582021-02-27 18:38:39 -07003384 for (const auto *range : sorted) {
locke-lunargd556cc32019-09-17 01:21:23 -06003385 ranges.emplace_back(*range);
3386 }
3387 return push_constant_ranges_dict.look_up(std::move(ranges));
3388}
3389
3390// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
3391static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
3392
3393// Dictionary of canonical form of the "compatible for set" records
3394static PipelineLayoutCompatDict pipeline_layout_compat_dict;
3395
3396static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
3397 const PipelineLayoutSetLayoutsId set_layouts_id) {
3398 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
3399}
3400
3401void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
3402 const VkAllocationCallbacks *pAllocator,
3403 VkPipelineLayout *pPipelineLayout, VkResult result) {
3404 if (VK_SUCCESS != result) return;
3405
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003406 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003407 pipeline_layout_state->layout = *pPipelineLayout;
3408 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
3409 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
3410 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05003411 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003412 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
3413 }
3414
3415 // Get canonical form IDs for the "compatible for set" contents
3416 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
3417 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
3418 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
3419
3420 // Create table of "compatible for set N" cannonical forms for trivial accept validation
3421 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
3422 pipeline_layout_state->compat_for_set.emplace_back(
3423 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
3424 }
3425 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
3426}
3427
3428void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
3429 const VkAllocationCallbacks *pAllocator,
3430 VkDescriptorPool *pDescriptorPool, VkResult result) {
3431 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003432 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003433}
3434
3435void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
3436 VkDescriptorPoolResetFlags flags, VkResult result) {
3437 if (VK_SUCCESS != result) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003438 DESCRIPTOR_POOL_STATE *pool = GetDescriptorPoolState(descriptorPool);
locke-lunargd556cc32019-09-17 01:21:23 -06003439 // TODO: validate flags
3440 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
John Zulauf79f06582021-02-27 18:38:39 -07003441 for (auto *ds : pool->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06003442 FreeDescriptorSet(ds);
3443 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003444 pool->sets.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06003445 // Reset available count for each type and available sets for this pool
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003446 for (auto it = pool->availableDescriptorTypeCount.begin(); it != pool->availableDescriptorTypeCount.end(); ++it) {
3447 pool->availableDescriptorTypeCount[it->first] = pool->maxDescriptorTypeCount[it->first];
locke-lunargd556cc32019-09-17 01:21:23 -06003448 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003449 pool->availableSets = pool->maxSets;
locke-lunargd556cc32019-09-17 01:21:23 -06003450}
3451
3452bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
3453 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003454 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003455 // Always update common data
3456 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
3457 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
3458 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
3459
3460 return false;
3461}
3462
3463// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
3464void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
3465 VkDescriptorSet *pDescriptorSets, VkResult result,
3466 void *ads_state_data) {
3467 if (VK_SUCCESS != result) return;
3468 // All the updates are contained in a single cvdescriptorset function
3469 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
3470 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
3471 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
3472}
3473
3474void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
3475 const VkDescriptorSet *pDescriptorSets) {
3476 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
3477 // Update available descriptor sets in pool
3478 pool_state->availableSets += count;
3479
3480 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
3481 for (uint32_t i = 0; i < count; ++i) {
3482 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
3483 auto descriptor_set = setMap[pDescriptorSets[i]].get();
3484 uint32_t type_index = 0, descriptor_count = 0;
3485 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
3486 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
3487 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
3488 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
3489 }
3490 FreeDescriptorSet(descriptor_set);
3491 pool_state->sets.erase(descriptor_set);
3492 }
3493 }
3494}
3495
3496void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
3497 const VkWriteDescriptorSet *pDescriptorWrites,
3498 uint32_t descriptorCopyCount,
3499 const VkCopyDescriptorSet *pDescriptorCopies) {
3500 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
3501 pDescriptorCopies);
3502}
3503
3504void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
3505 VkCommandBuffer *pCommandBuffer, VkResult result) {
3506 if (VK_SUCCESS != result) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003507 auto pool = GetCommandPoolShared(pCreateInfo->commandPool);
3508 if (pool) {
locke-lunargd556cc32019-09-17 01:21:23 -06003509 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
3510 // Add command buffer to its commandPool map
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003511 pool->commandBuffers.insert(pCommandBuffer[i]);
3512 auto cb_state = std::make_shared<CMD_BUFFER_STATE>();
3513 cb_state->createInfo = *pCreateInfo;
3514 cb_state->command_pool = pool;
3515 cb_state->unprotected = pool->unprotected;
locke-lunargd556cc32019-09-17 01:21:23 -06003516 // Add command buffer to map
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003517 commandBufferMap[pCommandBuffer[i]] = std::move(cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003518 ResetCommandBufferState(pCommandBuffer[i]);
3519 }
3520 }
3521}
3522
3523// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
3524void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003525 AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003526 cb_state);
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -07003527 // If imageless fb, skip fb binding
Mike Schuchardt2df08912020-12-15 16:28:09 -08003528 if (!fb_state || fb_state->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003529 const uint32_t attachment_count = fb_state->createInfo.attachmentCount;
3530 for (uint32_t attachment = 0; attachment < attachment_count; ++attachment) {
locke-lunargfc78e932020-11-19 17:06:24 -07003531 auto view_state = GetActiveAttachmentImageViewState(cb_state, attachment);
locke-lunargd556cc32019-09-17 01:21:23 -06003532 if (view_state) {
3533 AddCommandBufferBindingImageView(cb_state, view_state);
3534 }
3535 }
3536}
3537
locke-lunargfc78e932020-11-19 17:06:24 -07003538void UpdateSubpassAttachments(const safe_VkSubpassDescription2 &subpass, std::vector<SUBPASS_INFO> &subpasses) {
3539 for (uint32_t index = 0; index < subpass.inputAttachmentCount; ++index) {
3540 const uint32_t attachment_index = subpass.pInputAttachments[index].attachment;
3541 if (attachment_index != VK_ATTACHMENT_UNUSED) {
3542 subpasses[attachment_index].used = true;
3543 subpasses[attachment_index].usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
3544 subpasses[attachment_index].layout = subpass.pInputAttachments[index].layout;
3545 }
3546 }
3547
3548 for (uint32_t index = 0; index < subpass.colorAttachmentCount; ++index) {
3549 const uint32_t attachment_index = subpass.pColorAttachments[index].attachment;
3550 if (attachment_index != VK_ATTACHMENT_UNUSED) {
3551 subpasses[attachment_index].used = true;
3552 subpasses[attachment_index].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
3553 subpasses[attachment_index].layout = subpass.pColorAttachments[index].layout;
3554 }
3555 if (subpass.pResolveAttachments) {
3556 const uint32_t attachment_index2 = subpass.pResolveAttachments[index].attachment;
3557 if (attachment_index2 != VK_ATTACHMENT_UNUSED) {
3558 subpasses[attachment_index2].used = true;
3559 subpasses[attachment_index2].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
3560 subpasses[attachment_index2].layout = subpass.pResolveAttachments[index].layout;
3561 }
3562 }
3563 }
3564
3565 if (subpass.pDepthStencilAttachment) {
3566 const uint32_t attachment_index = subpass.pDepthStencilAttachment->attachment;
3567 if (attachment_index != VK_ATTACHMENT_UNUSED) {
3568 subpasses[attachment_index].used = true;
3569 subpasses[attachment_index].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
3570 subpasses[attachment_index].layout = subpass.pDepthStencilAttachment->layout;
3571 }
3572 }
3573}
3574
3575void UpdateAttachmentsView(ValidationStateTracker &tracker, CMD_BUFFER_STATE &cb_state, const FRAMEBUFFER_STATE &framebuffer,
3576 const VkRenderPassBeginInfo *pRenderPassBegin) {
3577 auto &attachments = *(cb_state.active_attachments.get());
3578 const bool imageless = (framebuffer.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) ? true : false;
3579 const VkRenderPassAttachmentBeginInfo *attachment_info_struct = nullptr;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07003580 if (pRenderPassBegin) attachment_info_struct = LvlFindInChain<VkRenderPassAttachmentBeginInfo>(pRenderPassBegin->pNext);
locke-lunargfc78e932020-11-19 17:06:24 -07003581
3582 for (uint32_t i = 0; i < attachments.size(); ++i) {
3583 if (imageless) {
3584 if (attachment_info_struct && i < attachment_info_struct->attachmentCount) {
3585 auto res = cb_state.attachments_view_states.insert(
3586 tracker.GetShared<IMAGE_VIEW_STATE>(attachment_info_struct->pAttachments[i]));
3587 attachments[i] = res.first->get();
3588 }
3589 } else {
3590 auto res = cb_state.attachments_view_states.insert(framebuffer.attachments_view_state[i]);
3591 attachments[i] = res.first->get();
3592 }
3593 }
3594}
3595
locke-lunargd556cc32019-09-17 01:21:23 -06003596void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
3597 const VkCommandBufferBeginInfo *pBeginInfo) {
3598 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3599 if (!cb_state) return;
locke-lunargfc78e932020-11-19 17:06:24 -07003600
locke-lunargd556cc32019-09-17 01:21:23 -06003601 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
3602 ResetCommandBufferState(commandBuffer);
3603 }
3604 // Set updated state here in case implicit reset occurs above
3605 cb_state->state = CB_RECORDING;
3606 cb_state->beginInfo = *pBeginInfo;
Tony-LunarG3c287f62020-12-17 12:39:49 -07003607 if (cb_state->beginInfo.pInheritanceInfo && (cb_state->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY)) {
locke-lunargd556cc32019-09-17 01:21:23 -06003608 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
3609 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
3610 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
3611 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
3612 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
locke-lunargaecf2152020-05-12 17:15:41 -06003613 cb_state->activeRenderPass = GetShared<RENDER_PASS_STATE>(cb_state->beginInfo.pInheritanceInfo->renderPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003614 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
locke-lunargfc78e932020-11-19 17:06:24 -07003615
locke-lunargaecf2152020-05-12 17:15:41 -06003616 if (cb_state->beginInfo.pInheritanceInfo->framebuffer) {
3617 cb_state->activeFramebuffer = GetShared<FRAMEBUFFER_STATE>(cb_state->beginInfo.pInheritanceInfo->framebuffer);
locke-lunargfc78e932020-11-19 17:06:24 -07003618 cb_state->active_subpasses = nullptr;
3619 cb_state->active_attachments = nullptr;
3620
3621 if (cb_state->activeFramebuffer) {
3622 cb_state->framebuffers.insert(cb_state->activeFramebuffer);
3623
3624 // Set cb_state->active_subpasses
3625 cb_state->active_subpasses =
3626 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
3627 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
3628 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
3629
3630 // Set cb_state->active_attachments & cb_state->attachments_view_states
3631 cb_state->active_attachments =
3632 std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
3633 UpdateAttachmentsView(*this, *cb_state, *cb_state->activeFramebuffer, nullptr);
3634
3635 // Connect this framebuffer and its children to this cmdBuffer
3636 AddFramebufferBinding(cb_state, cb_state->activeFramebuffer.get());
3637 }
locke-lunargaecf2152020-05-12 17:15:41 -06003638 }
locke-lunargd556cc32019-09-17 01:21:23 -06003639 }
3640 }
3641
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07003642 auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06003643 if (chained_device_group_struct) {
3644 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
3645 } else {
3646 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
3647 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003648
3649 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06003650}
3651
3652void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
3653 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3654 if (!cb_state) return;
3655 // Cached validation is specific to a specific recording of a specific command buffer.
John Zulauf79f06582021-02-27 18:38:39 -07003656 for (auto *descriptor_set : cb_state->validated_descriptor_sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06003657 descriptor_set->ClearCachedValidation(cb_state);
3658 }
3659 cb_state->validated_descriptor_sets.clear();
3660 if (VK_SUCCESS == result) {
3661 cb_state->state = CB_RECORDED;
3662 }
3663}
3664
3665void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
3666 VkResult result) {
3667 if (VK_SUCCESS == result) {
3668 ResetCommandBufferState(commandBuffer);
3669 }
3670}
3671
3672CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
3673 // initially assume everything is static state
3674 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
3675
3676 if (ds) {
3677 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
locke-lunarg4189aa22020-10-21 00:23:48 -06003678 flags &= ~ConvertToCBStatusFlagBits(ds->pDynamicStates[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003679 }
3680 }
locke-lunargd556cc32019-09-17 01:21:23 -06003681 return flags;
3682}
3683
3684// Validation cache:
3685// CV is the bottommost implementor of this extension. Don't pass calls down.
3686// utility function to set collective state for pipeline
3687void SetPipelineState(PIPELINE_STATE *pPipe) {
3688 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
3689 if (pPipe->graphicsPipelineCI.pColorBlendState) {
3690 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
3691 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
3692 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3693 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3694 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3695 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3696 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3697 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3698 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3699 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
3700 pPipe->blendConstantsEnabled = true;
3701 }
3702 }
3703 }
3704 }
sfricke-samsung8f658d42020-05-03 20:12:24 -07003705 // Check if sample location is enabled
3706 if (pPipe->graphicsPipelineCI.pMultisampleState) {
3707 const VkPipelineSampleLocationsStateCreateInfoEXT *sample_location_state =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07003708 LvlFindInChain<VkPipelineSampleLocationsStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pMultisampleState->pNext);
sfricke-samsung8f658d42020-05-03 20:12:24 -07003709 if (sample_location_state != nullptr) {
3710 pPipe->sample_location_enabled = sample_location_state->sampleLocationsEnable;
3711 }
3712 }
locke-lunargd556cc32019-09-17 01:21:23 -06003713}
3714
locke-lunargb8be8222020-10-20 00:34:37 -06003715void UpdateSamplerDescriptorsUsedByImage(LAST_BOUND_STATE &last_bound_state) {
3716 if (!last_bound_state.pipeline_state) return;
3717 if (last_bound_state.per_set.empty()) return;
3718
3719 for (auto &slot : last_bound_state.pipeline_state->active_slots) {
3720 for (auto &req : slot.second) {
3721 for (auto &samplers : req.second.samplers_used_by_image) {
3722 for (auto &sampler : samplers) {
3723 if (sampler.first.sampler_slot.first < last_bound_state.per_set.size() &&
3724 last_bound_state.per_set[sampler.first.sampler_slot.first].bound_descriptor_set) {
3725 sampler.second = last_bound_state.per_set[sampler.first.sampler_slot.first]
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003726 .bound_descriptor_set->GetDescriptorFromBinding(sampler.first.sampler_slot.second,
3727 sampler.first.sampler_index);
locke-lunargb8be8222020-10-20 00:34:37 -06003728 }
3729 }
3730 }
3731 }
3732 }
3733}
3734
locke-lunargd556cc32019-09-17 01:21:23 -06003735void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
3736 VkPipeline pipeline) {
3737 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3738 assert(cb_state);
3739
3740 auto pipe_state = GetPipelineState(pipeline);
3741 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
3742 cb_state->status &= ~cb_state->static_status;
3743 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
3744 cb_state->status |= cb_state->static_status;
locke-lunarg4189aa22020-10-21 00:23:48 -06003745 cb_state->dynamic_status = CBSTATUS_ALL_STATE_SET & (~cb_state->static_status);
locke-lunargd556cc32019-09-17 01:21:23 -06003746 }
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003747 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
3748 cb_state->lastBound[lv_bind_point].pipeline_state = pipe_state;
locke-lunargd556cc32019-09-17 01:21:23 -06003749 SetPipelineState(pipe_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003750 AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
locke-lunargb8be8222020-10-20 00:34:37 -06003751
3752 for (auto &slot : pipe_state->active_slots) {
3753 for (auto &req : slot.second) {
3754 for (auto &sampler : req.second.samplers_used_by_image) {
3755 for (auto &des : sampler) {
3756 des.second = nullptr;
3757 }
3758 }
3759 }
3760 }
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003761 UpdateSamplerDescriptorsUsedByImage(cb_state->lastBound[lv_bind_point]);
locke-lunargd556cc32019-09-17 01:21:23 -06003762}
3763
3764void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3765 uint32_t viewportCount, const VkViewport *pViewports) {
3766 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3767 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
3768 cb_state->status |= CBSTATUS_VIEWPORT_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003769 cb_state->static_status &= ~CBSTATUS_VIEWPORT_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003770}
3771
3772void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
3773 uint32_t exclusiveScissorCount,
3774 const VkRect2D *pExclusiveScissors) {
3775 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3776 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
3777 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
3778 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003779 cb_state->static_status &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003780}
3781
3782void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3783 VkImageLayout imageLayout) {
3784 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3785
3786 if (imageView != VK_NULL_HANDLE) {
3787 auto view_state = GetImageViewState(imageView);
3788 AddCommandBufferBindingImageView(cb_state, view_state);
3789 }
3790}
3791
3792void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3793 uint32_t viewportCount,
3794 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3795 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3796 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3797 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3798 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003799 cb_state->static_status &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003800}
3801
3802void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3803 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3804 const VkAllocationCallbacks *pAllocator,
3805 VkAccelerationStructureNV *pAccelerationStructure,
3806 VkResult result) {
3807 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003808 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003809
3810 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003811 auto as_memory_requirements_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003812 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
3813 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3814 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3815
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003816 auto scratch_memory_req_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003817 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
3818 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3819 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3820 &as_state->build_scratch_memory_requirements);
3821
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003822 auto update_memory_req_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003823 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
3824 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3825 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3826 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003827 as_state->allocator = pAllocator;
locke-lunargd556cc32019-09-17 01:21:23 -06003828 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3829}
3830
Jeff Bolz95176d02020-04-01 00:36:16 -05003831void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3832 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3833 const VkAllocationCallbacks *pAllocator,
3834 VkAccelerationStructureKHR *pAccelerationStructure,
3835 VkResult result) {
3836 if (VK_SUCCESS != result) return;
sourav parmarcd5fb182020-07-17 12:58:44 -07003837 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE_KHR>(*pAccelerationStructure, pCreateInfo);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003838 as_state->allocator = pAllocator;
sourav parmarcd5fb182020-07-17 12:58:44 -07003839 accelerationStructureMap_khr[*pAccelerationStructure] = std::move(as_state);
Jeff Bolz95176d02020-04-01 00:36:16 -05003840}
3841
sourav parmarcd5fb182020-07-17 12:58:44 -07003842void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructuresKHR(
3843 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
3844 const VkAccelerationStructureBuildRangeInfoKHR *const *ppBuildRangeInfos) {
3845 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3846 if (cb_state == nullptr) {
3847 return;
3848 }
3849 for (uint32_t i = 0; i < infoCount; ++i) {
3850 ACCELERATION_STRUCTURE_STATE_KHR *dst_as_state = GetAccelerationStructureStateKHR(pInfos[i].dstAccelerationStructure);
3851 ACCELERATION_STRUCTURE_STATE_KHR *src_as_state = GetAccelerationStructureStateKHR(pInfos[i].srcAccelerationStructure);
3852 if (dst_as_state != nullptr) {
3853 dst_as_state->built = true;
3854 dst_as_state->build_info_khr.initialize(&pInfos[i]);
3855 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3856 }
3857 if (src_as_state != nullptr) {
3858 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3859 }
3860 }
3861 cb_state->hasBuildAccelerationStructureCmd = true;
3862}
3863
3864void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructuresIndirectKHR(
3865 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
3866 const VkDeviceAddress *pIndirectDeviceAddresses, const uint32_t *pIndirectStrides,
3867 const uint32_t *const *ppMaxPrimitiveCounts) {
3868 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3869 if (cb_state == nullptr) {
3870 return;
3871 }
3872 for (uint32_t i = 0; i < infoCount; ++i) {
3873 ACCELERATION_STRUCTURE_STATE_KHR *dst_as_state = GetAccelerationStructureStateKHR(pInfos[i].dstAccelerationStructure);
3874 ACCELERATION_STRUCTURE_STATE_KHR *src_as_state = GetAccelerationStructureStateKHR(pInfos[i].srcAccelerationStructure);
3875 if (dst_as_state != nullptr) {
3876 dst_as_state->built = true;
3877 dst_as_state->build_info_khr.initialize(&pInfos[i]);
3878 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3879 }
3880 if (src_as_state != nullptr) {
3881 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3882 }
3883 }
3884 cb_state->hasBuildAccelerationStructureCmd = true;
3885}
locke-lunargd556cc32019-09-17 01:21:23 -06003886void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
Mike Schuchardt2df08912020-12-15 16:28:09 -08003887 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2 *pMemoryRequirements) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003888 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureStateNV(pInfo->accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003889 if (as_state != nullptr) {
3890 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
3891 as_state->memory_requirements = *pMemoryRequirements;
3892 as_state->memory_requirements_checked = true;
3893 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
3894 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
3895 as_state->build_scratch_memory_requirements_checked = true;
3896 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
3897 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
3898 as_state->update_scratch_memory_requirements_checked = true;
3899 }
3900 }
3901}
3902
sourav parmarcd5fb182020-07-17 12:58:44 -07003903void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
3904 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06003905 if (VK_SUCCESS != result) return;
3906 for (uint32_t i = 0; i < bindInfoCount; i++) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003907 const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06003908
sourav parmarcd5fb182020-07-17 12:58:44 -07003909 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureStateNV(info.accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003910 if (as_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06003911 // Track objects tied to memory
3912 SetMemBinding(info.memory, as_state, info.memoryOffset,
sourav parmarcd5fb182020-07-17 12:58:44 -07003913 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureNV));
locke-lunargd556cc32019-09-17 01:21:23 -06003914
3915 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05003916 // XXX TODO: Query device address for KHR extension
sourav parmarcd5fb182020-07-17 12:58:44 -07003917 if (enabled[gpu_validation]) {
locke-lunargd556cc32019-09-17 01:21:23 -06003918 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
3919 }
3920 }
3921 }
3922}
3923
3924void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
3925 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
3926 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
3927 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3928 if (cb_state == nullptr) {
3929 return;
3930 }
3931
sourav parmarcd5fb182020-07-17 12:58:44 -07003932 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureStateNV(dst);
3933 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureStateNV(src);
locke-lunargd556cc32019-09-17 01:21:23 -06003934 if (dst_as_state != nullptr) {
3935 dst_as_state->built = true;
3936 dst_as_state->build_info.initialize(pInfo);
3937 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3938 }
3939 if (src_as_state != nullptr) {
3940 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3941 }
3942 cb_state->hasBuildAccelerationStructureCmd = true;
3943}
3944
3945void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
3946 VkAccelerationStructureNV dst,
3947 VkAccelerationStructureNV src,
3948 VkCopyAccelerationStructureModeNV mode) {
3949 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3950 if (cb_state) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003951 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureStateNV(src);
3952 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureStateNV(dst);
locke-lunargd556cc32019-09-17 01:21:23 -06003953 if (dst_as_state != nullptr && src_as_state != nullptr) {
3954 dst_as_state->built = true;
3955 dst_as_state->build_info = src_as_state->build_info;
3956 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3957 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3958 }
3959 }
3960}
3961
Jeff Bolz95176d02020-04-01 00:36:16 -05003962void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
3963 VkAccelerationStructureKHR accelerationStructure,
3964 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06003965 if (!accelerationStructure) return;
sourav parmarcd5fb182020-07-17 12:58:44 -07003966 auto *as_state = GetAccelerationStructureStateKHR(accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003967 if (as_state) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003968 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06003969 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
locke-lunargd556cc32019-09-17 01:21:23 -06003970 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003971 as_state->destroyed = true;
sourav parmarcd5fb182020-07-17 12:58:44 -07003972 accelerationStructureMap_khr.erase(accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003973 }
3974}
3975
Jeff Bolz95176d02020-04-01 00:36:16 -05003976void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
3977 VkAccelerationStructureNV accelerationStructure,
3978 const VkAllocationCallbacks *pAllocator) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003979 if (!accelerationStructure) return;
3980 auto *as_state = GetAccelerationStructureStateNV(accelerationStructure);
3981 if (as_state) {
3982 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
3983 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
sourav parmarcd5fb182020-07-17 12:58:44 -07003984 ClearMemoryObjectBindings(obj_struct);
3985 as_state->destroyed = true;
3986 accelerationStructureMap.erase(accelerationStructure);
3987 }
Jeff Bolz95176d02020-04-01 00:36:16 -05003988}
3989
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003990void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3991 uint32_t viewportCount,
3992 const VkViewportWScalingNV *pViewportWScalings) {
3993 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3994 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003995 cb_state->static_status &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003996}
3997
locke-lunargd556cc32019-09-17 01:21:23 -06003998void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
3999 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4000 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004001 cb_state->static_status &= ~CBSTATUS_LINE_WIDTH_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004002}
4003
4004void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
4005 uint16_t lineStipplePattern) {
4006 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4007 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004008 cb_state->static_status &= ~CBSTATUS_LINE_STIPPLE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004009}
4010
4011void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
4012 float depthBiasClamp, float depthBiasSlopeFactor) {
4013 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4014 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004015 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004016}
4017
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004018void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
4019 const VkRect2D *pScissors) {
4020 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4021 cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
4022 cb_state->status |= CBSTATUS_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004023 cb_state->static_status &= ~CBSTATUS_SCISSOR_SET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004024}
4025
locke-lunargd556cc32019-09-17 01:21:23 -06004026void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
4027 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4028 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004029 cb_state->static_status &= ~CBSTATUS_BLEND_CONSTANTS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004030}
4031
4032void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
4033 float maxDepthBounds) {
4034 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4035 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004036 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004037}
4038
4039void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
4040 uint32_t compareMask) {
4041 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4042 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004043 cb_state->static_status &= ~CBSTATUS_STENCIL_READ_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004044}
4045
4046void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
4047 uint32_t writeMask) {
4048 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4049 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004050 cb_state->static_status &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004051}
4052
4053void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
4054 uint32_t reference) {
4055 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4056 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004057 cb_state->static_status &= ~CBSTATUS_STENCIL_REFERENCE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004058}
4059
4060// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
4061// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
4062// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
4063void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
4064 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
4065 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
4066 cvdescriptorset::DescriptorSet *push_descriptor_set,
4067 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
4068 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
4069 // Defensive
4070 assert(pipeline_layout);
4071 if (!pipeline_layout) return;
4072
4073 uint32_t required_size = first_set + set_count;
4074 const uint32_t last_binding_index = required_size - 1;
4075 assert(last_binding_index < pipeline_layout->compat_for_set.size());
4076
4077 // Some useful shorthand
locke-lunargb8d7a7a2020-10-25 16:01:52 -06004078 const auto lv_bind_point = ConvertToLvlBindPoint(pipeline_bind_point);
4079 auto &last_bound = cb_state->lastBound[lv_bind_point];
locke-lunargd556cc32019-09-17 01:21:23 -06004080 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
4081 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
4082
4083 // We need this three times in this function, but nowhere else
4084 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
4085 if (ds && ds->IsPushDescriptor()) {
4086 assert(ds == last_bound.push_descriptor_set.get());
4087 last_bound.push_descriptor_set = nullptr;
4088 return true;
4089 }
4090 return false;
4091 };
4092
4093 // Clean up the "disturbed" before and after the range to be set
4094 if (required_size < current_size) {
4095 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
4096 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
4097 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
4098 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
4099 }
4100 } else {
4101 // We're not disturbing past last, so leave the upper binding data alone.
4102 required_size = current_size;
4103 }
4104 }
4105
4106 // We resize if we need more set entries or if those past "last" are disturbed
4107 if (required_size != current_size) {
4108 last_bound.per_set.resize(required_size);
4109 }
4110
4111 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
4112 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
4113 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
4114 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
4115 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
4116 last_bound.per_set[set_idx].dynamicOffsets.clear();
4117 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
4118 }
4119 }
4120
4121 // Now update the bound sets with the input sets
4122 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
4123 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
4124 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
4125 cvdescriptorset::DescriptorSet *descriptor_set =
4126 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
4127
4128 // Record binding (or push)
4129 if (descriptor_set != last_bound.push_descriptor_set.get()) {
4130 // Only cleanup the push descriptors if they aren't the currently used set.
4131 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
4132 }
4133 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
4134 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
4135
4136 if (descriptor_set) {
4137 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
4138 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
4139 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
4140 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
4141 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
4142 input_dynamic_offsets = end_offset;
4143 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
4144 } else {
4145 last_bound.per_set[set_idx].dynamicOffsets.clear();
4146 }
4147 if (!descriptor_set->IsPushDescriptor()) {
4148 // Can't cache validation of push_descriptors
4149 cb_state->validated_descriptor_sets.insert(descriptor_set);
4150 }
4151 }
4152 }
4153}
4154
4155// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
4156void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
4157 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
4158 uint32_t firstSet, uint32_t setCount,
4159 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
4160 const uint32_t *pDynamicOffsets) {
4161 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4162 auto pipeline_layout = GetPipelineLayout(layout);
4163
4164 // Resize binding arrays
4165 uint32_t last_set_index = firstSet + setCount - 1;
locke-lunargb8d7a7a2020-10-25 16:01:52 -06004166 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
4167 if (last_set_index >= cb_state->lastBound[lv_bind_point].per_set.size()) {
4168 cb_state->lastBound[lv_bind_point].per_set.resize(last_set_index + 1);
locke-lunargd556cc32019-09-17 01:21:23 -06004169 }
4170
4171 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
4172 dynamicOffsetCount, pDynamicOffsets);
locke-lunargb8d7a7a2020-10-25 16:01:52 -06004173 cb_state->lastBound[lv_bind_point].pipeline_layout = layout;
locke-lunargb8d7a7a2020-10-25 16:01:52 -06004174 UpdateSamplerDescriptorsUsedByImage(cb_state->lastBound[lv_bind_point]);
locke-lunargd556cc32019-09-17 01:21:23 -06004175}
4176
4177void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
4178 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
4179 const VkWriteDescriptorSet *pDescriptorWrites) {
4180 const auto &pipeline_layout = GetPipelineLayout(layout);
4181 // Short circuit invalid updates
4182 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004183 !pipeline_layout->set_layouts[set]->IsPushDescriptor()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004184 return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004185 }
locke-lunargd556cc32019-09-17 01:21:23 -06004186
4187 // We need a descriptor set to update the bindings with, compatible with the passed layout
Jeremy Gebben50fb1832021-03-19 09:10:13 -06004188 const auto& dsl = pipeline_layout->set_layouts[set];
locke-lunargb8d7a7a2020-10-25 16:01:52 -06004189 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
4190 auto &last_bound = cb_state->lastBound[lv_bind_point];
locke-lunargd556cc32019-09-17 01:21:23 -06004191 auto &push_descriptor_set = last_bound.push_descriptor_set;
4192 // If we are disturbing the current push_desriptor_set clear it
4193 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
John Zulaufd2c3dae2019-12-12 11:02:17 -07004194 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06004195 }
4196
4197 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
4198 nullptr);
4199 last_bound.pipeline_layout = layout;
4200
4201 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05004202 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06004203}
4204
4205void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
4206 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
4207 uint32_t set, uint32_t descriptorWriteCount,
4208 const VkWriteDescriptorSet *pDescriptorWrites) {
4209 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4210 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
4211}
4212
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06004213void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
4214 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
4215 const void *pValues) {
4216 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4217 if (cb_state != nullptr) {
4218 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
4219
4220 auto &push_constant_data = cb_state->push_constant_data;
4221 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
4222 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
locke-lunargde3f0fa2020-09-10 11:55:31 -06004223 cb_state->push_constant_pipeline_layout_set = layout;
4224
4225 auto flags = stageFlags;
4226 uint32_t bit_shift = 0;
4227 while (flags) {
4228 if (flags & 1) {
4229 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
4230 const auto it = cb_state->push_constant_data_update.find(flag);
4231
4232 if (it != cb_state->push_constant_data_update.end()) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06004233 std::memset(it->second.data() + offset, PC_Byte_Updated, static_cast<std::size_t>(size));
locke-lunargde3f0fa2020-09-10 11:55:31 -06004234 }
4235 }
4236 flags = flags >> 1;
4237 ++bit_shift;
4238 }
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06004239 }
4240}
4241
locke-lunargd556cc32019-09-17 01:21:23 -06004242void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4243 VkIndexType indexType) {
locke-lunargd556cc32019-09-17 01:21:23 -06004244 auto cb_state = GetCBState(commandBuffer);
4245
4246 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
Piers Daniell39842ee2020-07-10 16:42:33 -06004247 cb_state->static_status &= ~CBSTATUS_INDEX_BUFFER_BOUND;
locke-lunarg1ae57d62020-11-18 10:49:19 -07004248 cb_state->index_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(buffer);
4249 cb_state->index_buffer_binding.size = cb_state->index_buffer_binding.buffer_state->createInfo.size;
locke-lunargd556cc32019-09-17 01:21:23 -06004250 cb_state->index_buffer_binding.offset = offset;
4251 cb_state->index_buffer_binding.index_type = indexType;
4252 // Add binding for this index buffer to this commandbuffer
locke-lunarg1ae57d62020-11-18 10:49:19 -07004253 AddCommandBufferBindingBuffer(cb_state, cb_state->index_buffer_binding.buffer_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06004254}
4255
4256void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
4257 uint32_t bindingCount, const VkBuffer *pBuffers,
4258 const VkDeviceSize *pOffsets) {
4259 auto cb_state = GetCBState(commandBuffer);
4260
4261 uint32_t end = firstBinding + bindingCount;
4262 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
4263 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
4264 }
4265
4266 for (uint32_t i = 0; i < bindingCount; ++i) {
4267 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07004268 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06004269 vertex_buffer_binding.offset = pOffsets[i];
Piers Daniell39842ee2020-07-10 16:42:33 -06004270 vertex_buffer_binding.size = VK_WHOLE_SIZE;
4271 vertex_buffer_binding.stride = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06004272 // Add binding for this vertex buffer to this commandbuffer
Jeff Bolz165818a2020-05-08 11:19:03 -05004273 if (pBuffers[i]) {
locke-lunarg1ae57d62020-11-18 10:49:19 -07004274 AddCommandBufferBindingBuffer(cb_state, vertex_buffer_binding.buffer_state.get());
Jeff Bolz165818a2020-05-08 11:19:03 -05004275 }
locke-lunargd556cc32019-09-17 01:21:23 -06004276 }
4277}
4278
4279void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
4280 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
4281 auto cb_state = GetCBState(commandBuffer);
4282 auto dst_buffer_state = GetBufferState(dstBuffer);
4283
4284 // Update bindings between buffer and cmd buffer
4285 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
4286}
4287
Jeremy Gebben74aa7622020-12-15 11:18:00 -07004288bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags2KHR stageMask,
Jeff Bolz310775c2019-10-09 00:46:33 -05004289 EventToStageMap *localEventToStageMap) {
4290 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06004291 return false;
4292}
4293
Jeremy Gebben74aa7622020-12-15 11:18:00 -07004294void ValidationStateTracker::RecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask) {
locke-lunargd556cc32019-09-17 01:21:23 -06004295 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4296 auto event_state = GetEventState(event);
4297 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05004298 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004299 }
4300 cb_state->events.push_back(event);
4301 if (!cb_state->waitedEvents.count(event)) {
4302 cb_state->writeEventsBeforeWait.push_back(event);
4303 }
Jeff Bolz310775c2019-10-09 00:46:33 -05004304 cb_state->eventUpdates.emplace_back(
4305 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
4306 return SetEventStageMask(event, stageMask, localEventToStageMap);
4307 });
locke-lunargd556cc32019-09-17 01:21:23 -06004308}
4309
Jeremy Gebben74aa7622020-12-15 11:18:00 -07004310void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
4311 VkPipelineStageFlags stageMask) {
4312 RecordCmdSetEvent(commandBuffer, event, stageMask);
4313}
4314
4315void ValidationStateTracker::PreCallRecordCmdSetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event,
4316 const VkDependencyInfoKHR *pDependencyInfo) {
4317 auto stage_masks = sync_utils::GetGlobalStageMasks(*pDependencyInfo);
4318
4319 RecordCmdSetEvent(commandBuffer, event, stage_masks.src);
4320}
4321
4322void ValidationStateTracker::RecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
4323 VkPipelineStageFlags2KHR stageMask) {
locke-lunargd556cc32019-09-17 01:21:23 -06004324 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4325 auto event_state = GetEventState(event);
4326 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05004327 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004328 }
4329 cb_state->events.push_back(event);
4330 if (!cb_state->waitedEvents.count(event)) {
4331 cb_state->writeEventsBeforeWait.push_back(event);
4332 }
4333
4334 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05004335 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07004336 return SetEventStageMask(event, VkPipelineStageFlags2KHR(0), localEventToStageMap);
Jeff Bolz310775c2019-10-09 00:46:33 -05004337 });
locke-lunargd556cc32019-09-17 01:21:23 -06004338}
4339
Jeremy Gebben74aa7622020-12-15 11:18:00 -07004340void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
4341 VkPipelineStageFlags stageMask) {
4342 RecordCmdResetEvent(commandBuffer, event, stageMask);
4343}
4344
4345void ValidationStateTracker::PreCallRecordCmdResetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event,
4346 VkPipelineStageFlags2KHR stageMask) {
4347 RecordCmdResetEvent(commandBuffer, event, stageMask);
4348}
4349
4350void ValidationStateTracker::RecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents) {
locke-lunargd556cc32019-09-17 01:21:23 -06004351 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4352 for (uint32_t i = 0; i < eventCount; ++i) {
4353 auto event_state = GetEventState(pEvents[i]);
4354 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05004355 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
4356 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004357 }
4358 cb_state->waitedEvents.insert(pEvents[i]);
4359 cb_state->events.push_back(pEvents[i]);
4360 }
4361}
4362
Jeremy Gebben74aa7622020-12-15 11:18:00 -07004363void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
4364 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
4365 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
4366 uint32_t bufferMemoryBarrierCount,
4367 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
4368 uint32_t imageMemoryBarrierCount,
4369 const VkImageMemoryBarrier *pImageMemoryBarriers) {
4370 RecordCmdWaitEvents(commandBuffer, eventCount, pEvents);
4371}
4372
4373void ValidationStateTracker::PreCallRecordCmdWaitEvents2KHR(VkCommandBuffer commandBuffer, uint32_t eventCount,
4374 const VkEvent *pEvents, const VkDependencyInfoKHR *pDependencyInfos) {
4375 RecordCmdWaitEvents(commandBuffer, eventCount, pEvents);
4376}
4377
Jeff Bolz310775c2019-10-09 00:46:33 -05004378bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
4379 (*localQueryToStateMap)[object] = value;
4380 return false;
4381}
4382
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004383bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
4384 QueryState value, QueryMap *localQueryToStateMap) {
Jeff Bolz310775c2019-10-09 00:46:33 -05004385 for (uint32_t i = 0; i < queryCount; i++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004386 QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
Jeff Bolz310775c2019-10-09 00:46:33 -05004387 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06004388 }
4389 return false;
4390}
4391
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004392QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex,
4393 uint32_t perfPass) const {
4394 QueryObject query = QueryObject(QueryObject(queryPool, queryIndex), perfPass);
locke-lunargd556cc32019-09-17 01:21:23 -06004395
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004396 auto iter = localQueryToStateMap->find(query);
4397 if (iter != localQueryToStateMap->end()) return iter->second;
Jeff Bolz310775c2019-10-09 00:46:33 -05004398
Jeff Bolz310775c2019-10-09 00:46:33 -05004399 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06004400}
4401
4402void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004403 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004404 cb_state->activeQueries.insert(query_obj);
4405 cb_state->startedQueries.insert(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004406 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
4407 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4408 QueryMap *localQueryToStateMap) {
4409 SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
4410 return false;
4411 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004412 auto pool_state = GetQueryPoolState(query_obj.pool);
4413 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
4414 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004415}
4416
4417void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
4418 VkFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004419 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004420 QueryObject query = {queryPool, slot};
4421 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4422 RecordCmdBeginQuery(cb_state, query);
4423}
4424
4425void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004426 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004427 cb_state->activeQueries.erase(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004428 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
4429 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4430 QueryMap *localQueryToStateMap) {
4431 return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
4432 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004433 auto pool_state = GetQueryPoolState(query_obj.pool);
4434 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
4435 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004436}
4437
4438void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004439 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004440 QueryObject query_obj = {queryPool, slot};
4441 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4442 RecordCmdEndQuery(cb_state, query_obj);
4443}
4444
4445void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4446 uint32_t firstQuery, uint32_t queryCount) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004447 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004448 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4449
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02004450 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
4451 QueryObject query = {queryPool, slot};
4452 cb_state->resetQueries.insert(query);
4453 }
4454
Jeff Bolz310775c2019-10-09 00:46:33 -05004455 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004456 bool do_validate, VkQueryPool &firstPerfQueryPool,
4457 uint32_t perfQueryPass,
4458 QueryMap *localQueryToStateMap) {
4459 return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06004460 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004461 auto pool_state = GetQueryPoolState(queryPool);
4462 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004463 cb_state);
4464}
4465
4466void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4467 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
4468 VkDeviceSize dstOffset, VkDeviceSize stride,
4469 VkQueryResultFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004470 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004471 auto cb_state = GetCBState(commandBuffer);
4472 auto dst_buff_state = GetBufferState(dstBuffer);
4473 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05004474 auto pool_state = GetQueryPoolState(queryPool);
4475 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004476 cb_state);
4477}
4478
4479void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
4480 VkQueryPool queryPool, uint32_t slot) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07004481 PostCallRecordCmdWriteTimestamp2KHR(commandBuffer, pipelineStage, queryPool, slot);
4482}
4483
4484void ValidationStateTracker::PostCallRecordCmdWriteTimestamp2KHR(VkCommandBuffer commandBuffer,
4485 VkPipelineStageFlags2KHR pipelineStage, VkQueryPool queryPool,
4486 uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004487 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004488 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeff Bolzadbfa852019-10-04 13:53:30 -05004489 auto pool_state = GetQueryPoolState(queryPool);
4490 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004491 cb_state);
4492 QueryObject query = {queryPool, slot};
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004493 cb_state->queryUpdates.emplace_back([query](const ValidationStateTracker *device_data, bool do_validate,
4494 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4495 QueryMap *localQueryToStateMap) {
4496 return SetQueryState(QueryObject(query, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
4497 });
locke-lunargd556cc32019-09-17 01:21:23 -06004498}
4499
Marijn Suijten6750fdc2020-12-30 22:06:42 +01004500void ValidationStateTracker::PostCallRecordCmdWriteAccelerationStructuresPropertiesKHR(
4501 VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR *pAccelerationStructures,
4502 VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) {
4503 if (disabled[query_validation]) return;
4504 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4505 auto pool_state = GetQueryPoolState(queryPool);
4506 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
4507 cb_state);
4508 cb_state->queryUpdates.emplace_back(
4509 [queryPool, firstQuery, accelerationStructureCount](const ValidationStateTracker *device_data, bool do_validate,
4510 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4511 QueryMap *localQueryToStateMap) {
4512 return SetQueryStateMulti(queryPool, firstQuery, accelerationStructureCount, perfQueryPass, QUERYSTATE_ENDED,
4513 localQueryToStateMap);
4514 });
4515}
4516
locke-lunargd556cc32019-09-17 01:21:23 -06004517void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
4518 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
4519 VkResult result) {
4520 if (VK_SUCCESS != result) return;
4521 // Shadow create info and store in map
Jeff Bolz6ae39612019-10-11 20:57:36 -05004522 auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
locke-lunargd556cc32019-09-17 01:21:23 -06004523
Mike Schuchardt2df08912020-12-15 16:28:09 -08004524 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) == 0) {
locke-lunarg1ae57d62020-11-18 10:49:19 -07004525 fb_state->attachments_view_state.resize(pCreateInfo->attachmentCount);
4526
locke-lunargd556cc32019-09-17 01:21:23 -06004527 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
locke-lunarg1ae57d62020-11-18 10:49:19 -07004528 fb_state->attachments_view_state[i] = GetShared<IMAGE_VIEW_STATE>(pCreateInfo->pAttachments[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06004529 }
4530 }
4531 frameBufferMap[*pFramebuffer] = std::move(fb_state);
4532}
4533
Mike Schuchardt2df08912020-12-15 16:28:09 -08004534void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2 *pCreateInfo,
locke-lunargd556cc32019-09-17 01:21:23 -06004535 RENDER_PASS_STATE *render_pass) {
4536 auto &subpass_to_node = render_pass->subpassToNode;
4537 subpass_to_node.resize(pCreateInfo->subpassCount);
4538 auto &self_dependencies = render_pass->self_dependencies;
4539 self_dependencies.resize(pCreateInfo->subpassCount);
John Zulauf4aff5d92020-02-21 08:29:35 -07004540 auto &subpass_dependencies = render_pass->subpass_dependencies;
4541 subpass_dependencies.resize(pCreateInfo->subpassCount);
locke-lunargd556cc32019-09-17 01:21:23 -06004542
4543 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
4544 subpass_to_node[i].pass = i;
4545 self_dependencies[i].clear();
John Zulauf4aff5d92020-02-21 08:29:35 -07004546 subpass_dependencies[i].pass = i;
locke-lunargd556cc32019-09-17 01:21:23 -06004547 }
4548 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004549 const auto &dependency = pCreateInfo->pDependencies[i];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004550 const auto src_subpass = dependency.srcSubpass;
4551 const auto dst_subpass = dependency.dstSubpass;
locke-lunargd556cc32019-09-17 01:21:23 -06004552 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
4553 if (dependency.srcSubpass == dependency.dstSubpass) {
4554 self_dependencies[dependency.srcSubpass].push_back(i);
4555 } else {
4556 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
4557 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
4558 }
4559 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004560 if (src_subpass == VK_SUBPASS_EXTERNAL) {
4561 assert(dst_subpass != VK_SUBPASS_EXTERNAL); // this is invalid per VUID-VkSubpassDependency-srcSubpass-00865
4562 subpass_dependencies[dst_subpass].barrier_from_external.emplace_back(&dependency);
4563 } else if (dst_subpass == VK_SUBPASS_EXTERNAL) {
4564 subpass_dependencies[src_subpass].barrier_to_external.emplace_back(&dependency);
John Zulauf4aff5d92020-02-21 08:29:35 -07004565 } else if (dependency.srcSubpass != dependency.dstSubpass) {
4566 // ignore self dependencies in prev and next
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004567 subpass_dependencies[src_subpass].next[&subpass_dependencies[dst_subpass]].emplace_back(&dependency);
4568 subpass_dependencies[dst_subpass].prev[&subpass_dependencies[src_subpass]].emplace_back(&dependency);
John Zulauf4aff5d92020-02-21 08:29:35 -07004569 }
4570 }
4571
4572 //
4573 // Determine "asynchrononous" subpassess
4574 // syncronization is only interested in asyncronous stages *earlier* that the current one... so we'll only look towards those.
4575 // NOTE: This is O(N^3), which we could shrink to O(N^2logN) using sets instead of arrays, but given that N is likely to be
4576 // small and the K for |= from the prev is must less than for set, we'll accept the brute force.
4577 std::vector<std::vector<bool>> pass_depends(pCreateInfo->subpassCount);
4578 for (uint32_t i = 1; i < pCreateInfo->subpassCount; ++i) {
4579 auto &depends = pass_depends[i];
4580 depends.resize(i);
4581 auto &subpass_dep = subpass_dependencies[i];
4582 for (const auto &prev : subpass_dep.prev) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004583 const auto prev_pass = prev.first->pass;
John Zulauf4aff5d92020-02-21 08:29:35 -07004584 const auto &prev_depends = pass_depends[prev_pass];
4585 for (uint32_t j = 0; j < prev_pass; j++) {
4586 depends[j] = depends[j] | prev_depends[j];
4587 }
4588 depends[prev_pass] = true;
4589 }
4590 for (uint32_t pass = 0; pass < subpass_dep.pass; pass++) {
4591 if (!depends[pass]) {
4592 subpass_dep.async.push_back(pass);
4593 }
4594 }
locke-lunargd556cc32019-09-17 01:21:23 -06004595 }
4596}
4597
John Zulauf4aff5d92020-02-21 08:29:35 -07004598static VkSubpassDependency2 ImplicitDependencyFromExternal(uint32_t subpass) {
4599 VkSubpassDependency2 from_external = {VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
4600 nullptr,
4601 VK_SUBPASS_EXTERNAL,
4602 subpass,
4603 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
4604 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
4605 0,
4606 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
4607 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
4608 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
4609 0,
4610 0};
4611 return from_external;
4612}
4613
4614static VkSubpassDependency2 ImplicitDependencyToExternal(uint32_t subpass) {
4615 VkSubpassDependency2 to_external = {VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
4616 nullptr,
4617 subpass,
4618 VK_SUBPASS_EXTERNAL,
4619 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
4620 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
4621 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
4622 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
4623 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
4624 0,
4625 0,
4626 0};
4627 return to_external;
4628}
4629
locke-lunargd556cc32019-09-17 01:21:23 -06004630void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
4631 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
4632 VkRenderPass *pRenderPass) {
4633 render_pass->renderPass = *pRenderPass;
4634 auto create_info = render_pass->createInfo.ptr();
4635
4636 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
4637
John Zulauf8863c332020-03-20 10:34:33 -06004638 struct AttachmentTracker { // This is really only of local interest, but a bit big for a lambda
4639 RENDER_PASS_STATE *const rp;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004640 std::vector<uint32_t> &first;
John Zulauf1507ee42020-05-18 11:33:09 -06004641 std::vector<bool> &first_is_transition;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004642 std::vector<uint32_t> &last;
John Zulauf8863c332020-03-20 10:34:33 -06004643 std::vector<std::vector<RENDER_PASS_STATE::AttachmentTransition>> &subpass_transitions;
Jeremy Gebbencbf22862021-03-03 12:01:22 -07004644 layer_data::unordered_map<uint32_t, bool> &first_read;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004645 const uint32_t attachment_count;
John Zulauf8863c332020-03-20 10:34:33 -06004646 std::vector<VkImageLayout> attachment_layout;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004647 std::vector<std::vector<VkImageLayout>> subpass_attachment_layout;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004648 explicit AttachmentTracker(std::shared_ptr<RENDER_PASS_STATE> &render_pass)
John Zulauf8863c332020-03-20 10:34:33 -06004649 : rp(render_pass.get()),
4650 first(rp->attachment_first_subpass),
John Zulauf1507ee42020-05-18 11:33:09 -06004651 first_is_transition(rp->attachment_first_is_transition),
John Zulauf8863c332020-03-20 10:34:33 -06004652 last(rp->attachment_last_subpass),
4653 subpass_transitions(rp->subpass_transitions),
4654 first_read(rp->attachment_first_read),
4655 attachment_count(rp->createInfo.attachmentCount),
John Zulauf2bc1fde2020-04-24 15:09:51 -06004656 attachment_layout(),
4657 subpass_attachment_layout() {
John Zulaufbb9f07f2020-03-19 16:53:06 -06004658 first.resize(attachment_count, VK_SUBPASS_EXTERNAL);
John Zulauf1507ee42020-05-18 11:33:09 -06004659 first_is_transition.resize(attachment_count, false);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004660 last.resize(attachment_count, VK_SUBPASS_EXTERNAL);
John Zulauf8863c332020-03-20 10:34:33 -06004661 subpass_transitions.resize(rp->createInfo.subpassCount + 1); // Add an extra for EndRenderPass
4662 attachment_layout.reserve(attachment_count);
John Zulauf2bc1fde2020-04-24 15:09:51 -06004663 subpass_attachment_layout.resize(rp->createInfo.subpassCount);
4664 for (auto &subpass_layouts : subpass_attachment_layout) {
4665 subpass_layouts.resize(attachment_count, kInvalidLayout);
4666 }
4667
John Zulauf8863c332020-03-20 10:34:33 -06004668 for (uint32_t j = 0; j < attachment_count; j++) {
4669 attachment_layout.push_back(rp->createInfo.pAttachments[j].initialLayout);
4670 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004671 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004672
John Zulaufbb9f07f2020-03-19 16:53:06 -06004673 void Update(uint32_t subpass, const VkAttachmentReference2 *attach_ref, uint32_t count, bool is_read) {
4674 if (nullptr == attach_ref) return;
4675 for (uint32_t j = 0; j < count; ++j) {
4676 const auto attachment = attach_ref[j].attachment;
4677 if (attachment != VK_ATTACHMENT_UNUSED) {
John Zulauf8863c332020-03-20 10:34:33 -06004678 const auto layout = attach_ref[j].layout;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004679 // Take advantage of the fact that insert won't overwrite, so we'll only write the first time.
Jeremy Gebbencbf22862021-03-03 12:01:22 -07004680 first_read.emplace(attachment, is_read);
John Zulauf2bc1fde2020-04-24 15:09:51 -06004681 if (first[attachment] == VK_SUBPASS_EXTERNAL) {
4682 first[attachment] = subpass;
4683 const auto initial_layout = rp->createInfo.pAttachments[attachment].initialLayout;
John Zulauf1507ee42020-05-18 11:33:09 -06004684 if (initial_layout != layout) {
4685 subpass_transitions[subpass].emplace_back(VK_SUBPASS_EXTERNAL, attachment, initial_layout, layout);
4686 first_is_transition[attachment] = true;
4687 }
John Zulauf2bc1fde2020-04-24 15:09:51 -06004688 }
John Zulaufbb9f07f2020-03-19 16:53:06 -06004689 last[attachment] = subpass;
John Zulauf8863c332020-03-20 10:34:33 -06004690
John Zulauf2bc1fde2020-04-24 15:09:51 -06004691 for (const auto &prev : rp->subpass_dependencies[subpass].prev) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004692 const auto prev_pass = prev.first->pass;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004693 const auto prev_layout = subpass_attachment_layout[prev_pass][attachment];
4694 if ((prev_layout != kInvalidLayout) && (prev_layout != layout)) {
4695 subpass_transitions[subpass].emplace_back(prev_pass, attachment, prev_layout, layout);
4696 }
John Zulauf8863c332020-03-20 10:34:33 -06004697 }
John Zulauf2bc1fde2020-04-24 15:09:51 -06004698 attachment_layout[attachment] = layout;
John Zulauf8863c332020-03-20 10:34:33 -06004699 }
4700 }
4701 }
4702 void FinalTransitions() {
4703 auto &final_transitions = subpass_transitions[rp->createInfo.subpassCount];
4704
4705 for (uint32_t attachment = 0; attachment < attachment_count; ++attachment) {
4706 const auto final_layout = rp->createInfo.pAttachments[attachment].finalLayout;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004707 // Add final transitions for attachments that were used and change layout.
4708 if ((last[attachment] != VK_SUBPASS_EXTERNAL) && final_layout != attachment_layout[attachment]) {
4709 final_transitions.emplace_back(last[attachment], attachment, attachment_layout[attachment], final_layout);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004710 }
locke-lunargd556cc32019-09-17 01:21:23 -06004711 }
4712 }
John Zulaufbb9f07f2020-03-19 16:53:06 -06004713 };
John Zulauf8863c332020-03-20 10:34:33 -06004714 AttachmentTracker attachment_tracker(render_pass);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004715
4716 for (uint32_t subpass_index = 0; subpass_index < create_info->subpassCount; ++subpass_index) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08004717 const VkSubpassDescription2 &subpass = create_info->pSubpasses[subpass_index];
John Zulauf8863c332020-03-20 10:34:33 -06004718 attachment_tracker.Update(subpass_index, subpass.pColorAttachments, subpass.colorAttachmentCount, false);
4719 attachment_tracker.Update(subpass_index, subpass.pResolveAttachments, subpass.colorAttachmentCount, false);
4720 attachment_tracker.Update(subpass_index, subpass.pDepthStencilAttachment, 1, false);
4721 attachment_tracker.Update(subpass_index, subpass.pInputAttachments, subpass.inputAttachmentCount, true);
John Zulauf4aff5d92020-02-21 08:29:35 -07004722 }
John Zulauf8863c332020-03-20 10:34:33 -06004723 attachment_tracker.FinalTransitions();
John Zulauf4aff5d92020-02-21 08:29:35 -07004724
John Zulaufbb9f07f2020-03-19 16:53:06 -06004725 // Add implicit dependencies
John Zulauf8863c332020-03-20 10:34:33 -06004726 for (uint32_t attachment = 0; attachment < attachment_tracker.attachment_count; attachment++) {
4727 const auto first_use = attachment_tracker.first[attachment];
John Zulauf4aff5d92020-02-21 08:29:35 -07004728 if (first_use != VK_SUBPASS_EXTERNAL) {
4729 auto &subpass_dep = render_pass->subpass_dependencies[first_use];
John Zulaufbaea94f2020-09-15 17:55:16 -06004730 if (subpass_dep.barrier_from_external.size() == 0) {
4731 // Add implicit from barrier if they're aren't any
John Zulauf4aff5d92020-02-21 08:29:35 -07004732 subpass_dep.implicit_barrier_from_external.reset(
4733 new VkSubpassDependency2(ImplicitDependencyFromExternal(first_use)));
John Zulaufbaea94f2020-09-15 17:55:16 -06004734 subpass_dep.barrier_from_external.emplace_back(subpass_dep.implicit_barrier_from_external.get());
John Zulauf4aff5d92020-02-21 08:29:35 -07004735 }
4736 }
4737
John Zulauf8863c332020-03-20 10:34:33 -06004738 const auto last_use = attachment_tracker.last[attachment];
John Zulauf4aff5d92020-02-21 08:29:35 -07004739 if (last_use != VK_SUBPASS_EXTERNAL) {
4740 auto &subpass_dep = render_pass->subpass_dependencies[last_use];
John Zulaufbaea94f2020-09-15 17:55:16 -06004741 if (render_pass->subpass_dependencies[last_use].barrier_to_external.size() == 0) {
4742 // Add implicit to barrier if they're aren't any
John Zulauf4aff5d92020-02-21 08:29:35 -07004743 subpass_dep.implicit_barrier_to_external.reset(new VkSubpassDependency2(ImplicitDependencyToExternal(last_use)));
John Zulaufbaea94f2020-09-15 17:55:16 -06004744 subpass_dep.barrier_to_external.emplace_back(subpass_dep.implicit_barrier_to_external.get());
John Zulauf4aff5d92020-02-21 08:29:35 -07004745 }
locke-lunargd556cc32019-09-17 01:21:23 -06004746 }
4747 }
4748
4749 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
4750 renderPassMap[*pRenderPass] = std::move(render_pass);
4751}
4752
4753// Style note:
4754// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
4755// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
4756// construction or assignment.
4757void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
4758 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4759 VkResult result) {
4760 if (VK_SUCCESS != result) return;
4761 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
4762 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
4763}
4764
Mike Schuchardt2df08912020-12-15 16:28:09 -08004765void ValidationStateTracker::RecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07004766 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4767 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004768 if (VK_SUCCESS != result) return;
4769 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
4770 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
4771}
4772
Mike Schuchardt2df08912020-12-15 16:28:09 -08004773void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07004774 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4775 VkResult result) {
4776 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
4777}
4778
Mike Schuchardt2df08912020-12-15 16:28:09 -08004779void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07004780 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4781 VkResult result) {
4782 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
4783}
4784
locke-lunargd556cc32019-09-17 01:21:23 -06004785void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
4786 const VkRenderPassBeginInfo *pRenderPassBegin,
4787 const VkSubpassContents contents) {
4788 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunargaecf2152020-05-12 17:15:41 -06004789 auto render_pass_state = pRenderPassBegin ? GetShared<RENDER_PASS_STATE>(pRenderPassBegin->renderPass) : nullptr;
4790 auto framebuffer = pRenderPassBegin ? GetShared<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer) : nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004791
4792 if (render_pass_state) {
locke-lunargaecf2152020-05-12 17:15:41 -06004793 cb_state->activeFramebuffer = framebuffer;
locke-lunargd556cc32019-09-17 01:21:23 -06004794 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07004795 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06004796 cb_state->activeSubpass = 0;
4797 cb_state->activeSubpassContents = contents;
locke-lunargfc78e932020-11-19 17:06:24 -07004798
locke-lunargd556cc32019-09-17 01:21:23 -06004799 // Connect this RP to cmdBuffer
locke-lunargaecf2152020-05-12 17:15:41 -06004800 AddCommandBufferBinding(
4801 render_pass_state->cb_bindings,
4802 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state.get()), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004803
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07004804 auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06004805 if (chained_device_group_struct) {
4806 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
4807 } else {
4808 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
4809 }
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004810
locke-lunargfc78e932020-11-19 17:06:24 -07004811 cb_state->active_subpasses = nullptr;
4812 cb_state->active_attachments = nullptr;
4813
4814 if (framebuffer) {
4815 cb_state->framebuffers.insert(framebuffer);
4816
4817 // Set cb_state->active_subpasses
4818 cb_state->active_subpasses =
4819 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
4820 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
4821 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
4822
4823 // Set cb_state->active_attachments & cb_state->attachments_view_states
4824 cb_state->active_attachments =
4825 std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(framebuffer->createInfo.attachmentCount);
4826 UpdateAttachmentsView(*this, *cb_state, *cb_state->activeFramebuffer, pRenderPassBegin);
4827
4828 // Connect this framebuffer and its children to this cmdBuffer
4829 AddFramebufferBinding(cb_state, framebuffer.get());
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004830 }
locke-lunargd556cc32019-09-17 01:21:23 -06004831 }
4832}
4833
4834void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
4835 const VkRenderPassBeginInfo *pRenderPassBegin,
4836 VkSubpassContents contents) {
4837 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
4838}
4839
4840void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
4841 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004842 const VkSubpassBeginInfo *pSubpassBeginInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06004843 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4844}
4845
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06004846void ValidationStateTracker::PostCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4847 uint32_t counterBufferCount,
4848 const VkBuffer *pCounterBuffers,
4849 const VkDeviceSize *pCounterBufferOffsets) {
4850 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4851
4852 cb_state->transform_feedback_active = true;
4853}
4854
4855void ValidationStateTracker::PostCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4856 uint32_t counterBufferCount, const VkBuffer *pCounterBuffers,
4857 const VkDeviceSize *pCounterBufferOffsets) {
4858 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4859
4860 cb_state->transform_feedback_active = false;
4861}
4862
Tony-LunarG977448c2019-12-02 14:52:02 -07004863void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
4864 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004865 const VkSubpassBeginInfo *pSubpassBeginInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004866 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4867}
4868
locke-lunargd556cc32019-09-17 01:21:23 -06004869void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4870 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4871 cb_state->activeSubpass++;
4872 cb_state->activeSubpassContents = contents;
locke-lunargfc78e932020-11-19 17:06:24 -07004873
4874 // Update cb_state->active_subpasses
4875 if (cb_state->activeRenderPass && cb_state->activeFramebuffer) {
4876 cb_state->active_subpasses = nullptr;
4877 cb_state->active_subpasses =
4878 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
4879
4880 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
4881 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
4882 }
locke-lunargd556cc32019-09-17 01:21:23 -06004883}
4884
4885void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4886 RecordCmdNextSubpass(commandBuffer, contents);
4887}
4888
4889void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004890 const VkSubpassBeginInfo *pSubpassBeginInfo,
4891 const VkSubpassEndInfo *pSubpassEndInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06004892 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4893}
4894
Tony-LunarG977448c2019-12-02 14:52:02 -07004895void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004896 const VkSubpassBeginInfo *pSubpassBeginInfo,
4897 const VkSubpassEndInfo *pSubpassEndInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004898 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4899}
4900
locke-lunargd556cc32019-09-17 01:21:23 -06004901void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
4902 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4903 cb_state->activeRenderPass = nullptr;
locke-lunargfc78e932020-11-19 17:06:24 -07004904 cb_state->active_attachments = nullptr;
4905 cb_state->active_subpasses = nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004906 cb_state->activeSubpass = 0;
4907 cb_state->activeFramebuffer = VK_NULL_HANDLE;
4908}
4909
4910void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
4911 RecordCmdEndRenderPassState(commandBuffer);
4912}
4913
4914void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004915 const VkSubpassEndInfo *pSubpassEndInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06004916 RecordCmdEndRenderPassState(commandBuffer);
4917}
4918
Tony-LunarG977448c2019-12-02 14:52:02 -07004919void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004920 const VkSubpassEndInfo *pSubpassEndInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004921 RecordCmdEndRenderPassState(commandBuffer);
4922}
locke-lunargd556cc32019-09-17 01:21:23 -06004923void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
4924 const VkCommandBuffer *pCommandBuffers) {
4925 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4926
4927 CMD_BUFFER_STATE *sub_cb_state = NULL;
4928 for (uint32_t i = 0; i < commandBuffersCount; i++) {
4929 sub_cb_state = GetCBState(pCommandBuffers[i]);
4930 assert(sub_cb_state);
4931 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
4932 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
4933 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
4934 // from the validation step to the recording step
4935 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
4936 }
4937 }
4938
4939 // Propagate inital layout and current layout state to the primary cmd buffer
4940 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
4941 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
4942 // for those other classes.
4943 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
4944 const auto image = sub_layout_map_entry.first;
4945 const auto *image_state = GetImageState(image);
4946 if (!image_state) continue; // Can't set layouts of a dead image
4947
4948 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
John Zulauf17708d02021-02-22 11:20:58 -07004949 const auto *sub_cb_subres_map = &sub_layout_map_entry.second;
locke-lunargd556cc32019-09-17 01:21:23 -06004950 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
4951 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
4952 }
4953
4954 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
4955 cb_state->linkedCommandBuffers.insert(sub_cb_state);
4956 sub_cb_state->linkedCommandBuffers.insert(cb_state);
4957 for (auto &function : sub_cb_state->queryUpdates) {
4958 cb_state->queryUpdates.push_back(function);
4959 }
4960 for (auto &function : sub_cb_state->queue_submit_functions) {
4961 cb_state->queue_submit_functions.push_back(function);
4962 }
4963 }
4964}
4965
4966void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
4967 VkFlags flags, void **ppData, VkResult result) {
4968 if (VK_SUCCESS != result) return;
4969 RecordMappedMemory(mem, offset, size, ppData);
4970}
4971
4972void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
4973 auto mem_info = GetDevMemState(mem);
4974 if (mem_info) {
4975 mem_info->mapped_range = MemRange();
4976 mem_info->p_driver_data = nullptr;
4977 }
4978}
4979
4980void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
4981 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
4982 if (image_state) {
locke-lunargae26eac2020-04-16 15:29:05 -06004983 // An Android sepcial image cannot get VkSubresourceLayout until the image binds a memory.
4984 // See: VUID-vkGetImageSubresourceLayout-image-01895
4985 image_state->fragment_encoder =
4986 std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(new subresource_adapter::ImageRangeEncoder(*image_state));
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07004987 const auto swapchain_info = LvlFindInChain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06004988 if (swapchain_info) {
John Zulauf29d00532021-03-04 13:28:54 -07004989 auto *swapchain = GetSwapchainState(swapchain_info->swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004990 if (swapchain) {
John Zulaufd13b38e2021-03-05 08:17:38 -07004991 SWAPCHAIN_IMAGE &swap_image = swapchain->images[swapchain_info->imageIndex];
John Zulauf29d00532021-03-04 13:28:54 -07004992 if (swap_image.bound_images.empty()) {
4993 // If this is the first "binding" of an image to this swapchain index, get a fake allocation
4994 image_state->swapchain_fake_address = fake_memory.Alloc(image_state->fragment_encoder->TotalSize());
4995 } else {
4996 image_state->swapchain_fake_address = (*swap_image.bound_images.cbegin())->swapchain_fake_address;
4997 }
John Zulaufd13b38e2021-03-05 08:17:38 -07004998 swap_image.bound_images.emplace(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004999 image_state->bind_swapchain = swapchain_info->swapchain;
5000 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
John Zulaufd13b38e2021-03-05 08:17:38 -07005001
John Zulauf29d00532021-03-04 13:28:54 -07005002 // All images bound to this swapchain and index are aliases
John Zulaufd13b38e2021-03-05 08:17:38 -07005003 AddAliasingImage(image_state, &swap_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06005004 }
5005 } else {
5006 // Track bound memory range information
5007 auto mem_info = GetDevMemState(bindInfo.memory);
5008 if (mem_info) {
John Zulaufd13b38e2021-03-05 08:17:38 -07005009 InsertImageMemoryRange(image_state, mem_info, bindInfo.memoryOffset);
5010 if (image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) {
5011 AddAliasingImage(image_state, &mem_info->bound_images);
5012 }
locke-lunargd556cc32019-09-17 01:21:23 -06005013 }
5014
5015 // Track objects tied to memory
5016 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
5017 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
5018 }
locke-lunargd556cc32019-09-17 01:21:23 -06005019 }
5020}
5021
5022void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
5023 VkDeviceSize memoryOffset, VkResult result) {
5024 if (VK_SUCCESS != result) return;
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06005025 auto bind_info = LvlInitStruct<VkBindImageMemoryInfo>();
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005026 bind_info.image = image;
5027 bind_info.memory = mem;
5028 bind_info.memoryOffset = memoryOffset;
5029 UpdateBindImageMemoryState(bind_info);
locke-lunargd556cc32019-09-17 01:21:23 -06005030}
5031
5032void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005033 const VkBindImageMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06005034 if (VK_SUCCESS != result) return;
5035 for (uint32_t i = 0; i < bindInfoCount; i++) {
5036 UpdateBindImageMemoryState(pBindInfos[i]);
5037 }
5038}
5039
5040void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005041 const VkBindImageMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06005042 if (VK_SUCCESS != result) return;
5043 for (uint32_t i = 0; i < bindInfoCount; i++) {
5044 UpdateBindImageMemoryState(pBindInfos[i]);
5045 }
5046}
5047
5048void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
5049 auto event_state = GetEventState(event);
5050 if (event_state) {
5051 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
5052 }
locke-lunargd556cc32019-09-17 01:21:23 -06005053}
5054
5055void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
5056 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
5057 VkResult result) {
5058 if (VK_SUCCESS != result) return;
5059 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
5060 pImportSemaphoreFdInfo->flags);
5061}
5062
5063void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005064 VkExternalSemaphoreHandleTypeFlagBits handle_type) {
locke-lunargd556cc32019-09-17 01:21:23 -06005065 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
Mike Schuchardt2df08912020-12-15 16:28:09 -08005066 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT) {
locke-lunargd556cc32019-09-17 01:21:23 -06005067 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
5068 semaphore_state->scope = kSyncScopeExternalPermanent;
5069 }
5070}
5071
5072#ifdef VK_USE_PLATFORM_WIN32_KHR
5073void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
5074 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
5075 if (VK_SUCCESS != result) return;
5076 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
5077 pImportSemaphoreWin32HandleInfo->flags);
5078}
5079
5080void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
5081 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
5082 HANDLE *pHandle, VkResult result) {
5083 if (VK_SUCCESS != result) return;
5084 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
5085}
5086
5087void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
5088 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
5089 if (VK_SUCCESS != result) return;
5090 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
5091 pImportFenceWin32HandleInfo->flags);
5092}
5093
5094void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
5095 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
5096 HANDLE *pHandle, VkResult result) {
5097 if (VK_SUCCESS != result) return;
5098 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
5099}
5100#endif
5101
5102void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
5103 VkResult result) {
5104 if (VK_SUCCESS != result) return;
5105 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
5106}
5107
Mike Schuchardt2df08912020-12-15 16:28:09 -08005108void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBits handle_type,
5109 VkFenceImportFlags flags) {
locke-lunargd556cc32019-09-17 01:21:23 -06005110 FENCE_STATE *fence_node = GetFenceState(fence);
5111 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08005112 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT || flags & VK_FENCE_IMPORT_TEMPORARY_BIT) &&
locke-lunargd556cc32019-09-17 01:21:23 -06005113 fence_node->scope == kSyncScopeInternal) {
5114 fence_node->scope = kSyncScopeExternalTemporary;
5115 } else {
5116 fence_node->scope = kSyncScopeExternalPermanent;
5117 }
5118 }
5119}
5120
5121void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
5122 VkResult result) {
5123 if (VK_SUCCESS != result) return;
5124 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
5125}
5126
Mike Schuchardt2df08912020-12-15 16:28:09 -08005127void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBits handle_type) {
locke-lunargd556cc32019-09-17 01:21:23 -06005128 FENCE_STATE *fence_state = GetFenceState(fence);
5129 if (fence_state) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08005130 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT) {
locke-lunargd556cc32019-09-17 01:21:23 -06005131 // Export with reference transference becomes external
5132 fence_state->scope = kSyncScopeExternalPermanent;
5133 } else if (fence_state->scope == kSyncScopeInternal) {
5134 // Export with copy transference has a side effect of resetting the fence
5135 fence_state->state = FENCE_UNSIGNALED;
5136 }
5137 }
5138}
5139
5140void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
5141 VkResult result) {
5142 if (VK_SUCCESS != result) return;
5143 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
5144}
5145
5146void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
5147 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
5148 if (VK_SUCCESS != result) return;
John Zulaufd5115702021-01-18 12:34:33 -07005149 const auto event = *pEvent;
Jeremy Gebbencbf22862021-03-03 12:01:22 -07005150 eventMap.emplace(event, std::make_shared<EVENT_STATE>(event, pCreateInfo->flags));
locke-lunargd556cc32019-09-17 01:21:23 -06005151}
5152
5153void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
5154 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
5155 SWAPCHAIN_NODE *old_swapchain_state) {
5156 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005157 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06005158 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
5159 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
5160 swapchain_state->shared_presentable = true;
5161 }
5162 surface_state->swapchain = swapchain_state.get();
5163 swapchainMap[*pSwapchain] = std::move(swapchain_state);
5164 } else {
5165 surface_state->swapchain = nullptr;
5166 }
5167 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
5168 if (old_swapchain_state) {
5169 old_swapchain_state->retired = true;
5170 }
5171 return;
5172}
5173
5174void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
5175 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
5176 VkResult result) {
5177 auto surface_state = GetSurfaceState(pCreateInfo->surface);
5178 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
5179 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
5180}
5181
5182void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
5183 const VkAllocationCallbacks *pAllocator) {
5184 if (!swapchain) return;
5185 auto swapchain_data = GetSwapchainState(swapchain);
5186 if (swapchain_data) {
John Zulauffaa7a522021-03-05 12:22:45 -07005187 for (auto &swapchain_image : swapchain_data->images) {
5188 // TODO: missing validation that the bound images are empty (except for image_state above)
5189 // Clean up the aliases and the bound_images *before* erasing the image_state.
5190 RemoveAliasingImages(swapchain_image.bound_images);
5191 swapchain_image.bound_images.clear();
5192
5193 if (swapchain_image.image_state) {
John Zulauf2d60a452021-03-04 15:12:03 -07005194 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image_state->image, kVulkanObjectTypeImage));
5195 imageMap.erase(swapchain_image.image_state->image);
John Zulauffaa7a522021-03-05 12:22:45 -07005196 swapchain_image.image_state = nullptr;
John Zulauf2d60a452021-03-04 15:12:03 -07005197 }
locke-lunargd556cc32019-09-17 01:21:23 -06005198 }
5199
5200 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
5201 if (surface_state) {
5202 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
5203 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005204 swapchain_data->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005205 swapchainMap.erase(swapchain);
5206 }
5207}
5208
sfricke-samsung5c1b7392020-12-13 22:17:15 -08005209void ValidationStateTracker::PostCallRecordCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
5210 const VkDisplayModeCreateInfoKHR *pCreateInfo,
5211 const VkAllocationCallbacks *pAllocator, VkDisplayModeKHR *pMode,
5212 VkResult result) {
5213 if (VK_SUCCESS != result) return;
5214 if (!pMode) return;
5215 auto display_mode_state = std::make_shared<DISPLAY_MODE_STATE>(*pMode);
5216 display_mode_state->physical_device = physicalDevice;
5217 display_mode_map[*pMode] = std::move(display_mode_state);
5218}
5219
locke-lunargd556cc32019-09-17 01:21:23 -06005220void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
5221 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
5222 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005223 auto semaphore_state = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
5224 if (semaphore_state) {
5225 semaphore_state->signaler.first = VK_NULL_HANDLE;
5226 semaphore_state->signaled = false;
locke-lunargd556cc32019-09-17 01:21:23 -06005227 }
5228 }
5229
5230 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
5231 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
5232 // confused itself just as much.
5233 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
5234 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
5235 // Mark the image as having been released to the WSI
5236 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
5237 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
John Zulauffaa7a522021-03-05 12:22:45 -07005238 IMAGE_STATE *image_state = swapchain_data->images[pPresentInfo->pImageIndices[i]].image_state;
locke-lunargd556cc32019-09-17 01:21:23 -06005239 if (image_state) {
5240 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005241 if (image_state->shared_presentable) {
5242 image_state->layout_locked = true;
5243 }
locke-lunargd556cc32019-09-17 01:21:23 -06005244 }
5245 }
5246 }
5247 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
5248 // its semaphore waits) /never/ participate in any completion proof.
5249}
5250
5251void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
5252 const VkSwapchainCreateInfoKHR *pCreateInfos,
5253 const VkAllocationCallbacks *pAllocator,
5254 VkSwapchainKHR *pSwapchains, VkResult result) {
5255 if (pCreateInfos) {
5256 for (uint32_t i = 0; i < swapchainCount; i++) {
5257 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
5258 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
5259 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
5260 }
5261 }
5262}
5263
5264void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
5265 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005266 auto fence_state = GetFenceState(fence);
5267 if (fence_state && fence_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06005268 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
5269 // import
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005270 fence_state->state = FENCE_INFLIGHT;
5271 fence_state->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
locke-lunargd556cc32019-09-17 01:21:23 -06005272 }
5273
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005274 auto semaphore_state = GetSemaphoreState(semaphore);
5275 if (semaphore_state && semaphore_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06005276 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
5277 // temporary import
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005278 semaphore_state->signaled = true;
5279 semaphore_state->signaler.first = VK_NULL_HANDLE;
locke-lunargd556cc32019-09-17 01:21:23 -06005280 }
5281
5282 // Mark the image as acquired.
5283 auto swapchain_data = GetSwapchainState(swapchain);
5284 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
John Zulauffaa7a522021-03-05 12:22:45 -07005285 IMAGE_STATE *image_state = swapchain_data->images[*pImageIndex].image_state;
locke-lunargd556cc32019-09-17 01:21:23 -06005286 if (image_state) {
5287 image_state->acquired = true;
5288 image_state->shared_presentable = swapchain_data->shared_presentable;
5289 }
5290 }
5291}
5292
5293void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
5294 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
5295 VkResult result) {
5296 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
5297 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
5298}
5299
5300void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
5301 uint32_t *pImageIndex, VkResult result) {
5302 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
5303 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
5304 pAcquireInfo->fence, pImageIndex);
5305}
5306
5307void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
5308 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
5309 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
5310 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
5311 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
5312 phys_device_state.phys_device = pPhysicalDevices[i];
5313 // Init actual features for each physical device
5314 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
5315 }
5316 }
5317}
5318
5319// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
5320static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005321 VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06005322 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
5323
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005324 if (pQueueFamilyProperties) { // Save queue family properties
locke-lunargd556cc32019-09-17 01:21:23 -06005325 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
5326 for (uint32_t i = 0; i < count; ++i) {
5327 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
5328 }
5329 }
5330}
5331
5332void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
5333 uint32_t *pQueueFamilyPropertyCount,
5334 VkQueueFamilyProperties *pQueueFamilyProperties) {
5335 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5336 assert(physical_device_state);
Mike Schuchardt2df08912020-12-15 16:28:09 -08005337 VkQueueFamilyProperties2 *pqfp = nullptr;
5338 std::vector<VkQueueFamilyProperties2> qfp;
locke-lunargd556cc32019-09-17 01:21:23 -06005339 qfp.resize(*pQueueFamilyPropertyCount);
5340 if (pQueueFamilyProperties) {
5341 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06005342 qfp[i] = LvlInitStruct<VkQueueFamilyProperties2>();
locke-lunargd556cc32019-09-17 01:21:23 -06005343 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
5344 }
5345 pqfp = qfp.data();
5346 }
5347 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
5348}
5349
5350void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
Mike Schuchardt2df08912020-12-15 16:28:09 -08005351 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06005352 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5353 assert(physical_device_state);
5354 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
5355 pQueueFamilyProperties);
5356}
5357
5358void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08005359 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06005360 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5361 assert(physical_device_state);
5362 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
5363 pQueueFamilyProperties);
5364}
5365void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
5366 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005367 if (!surface) return;
5368 auto surface_state = GetSurfaceState(surface);
5369 surface_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005370 surface_map.erase(surface);
5371}
5372
5373void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005374 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06005375}
5376
5377void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
5378 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
5379 const VkAllocationCallbacks *pAllocator,
5380 VkSurfaceKHR *pSurface, VkResult result) {
5381 if (VK_SUCCESS != result) return;
5382 RecordVulkanSurface(pSurface);
5383}
5384
5385#ifdef VK_USE_PLATFORM_ANDROID_KHR
5386void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
5387 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
5388 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5389 VkResult result) {
5390 if (VK_SUCCESS != result) return;
5391 RecordVulkanSurface(pSurface);
5392}
5393#endif // VK_USE_PLATFORM_ANDROID_KHR
5394
5395#ifdef VK_USE_PLATFORM_IOS_MVK
5396void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
5397 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5398 VkResult result) {
5399 if (VK_SUCCESS != result) return;
5400 RecordVulkanSurface(pSurface);
5401}
5402#endif // VK_USE_PLATFORM_IOS_MVK
5403
5404#ifdef VK_USE_PLATFORM_MACOS_MVK
5405void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
5406 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
5407 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5408 VkResult result) {
5409 if (VK_SUCCESS != result) return;
5410 RecordVulkanSurface(pSurface);
5411}
5412#endif // VK_USE_PLATFORM_MACOS_MVK
5413
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07005414#ifdef VK_USE_PLATFORM_METAL_EXT
5415void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
5416 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
5417 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5418 VkResult result) {
5419 if (VK_SUCCESS != result) return;
5420 RecordVulkanSurface(pSurface);
5421}
5422#endif // VK_USE_PLATFORM_METAL_EXT
5423
locke-lunargd556cc32019-09-17 01:21:23 -06005424#ifdef VK_USE_PLATFORM_WAYLAND_KHR
5425void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
5426 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
5427 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5428 VkResult result) {
5429 if (VK_SUCCESS != result) return;
5430 RecordVulkanSurface(pSurface);
5431}
5432#endif // VK_USE_PLATFORM_WAYLAND_KHR
5433
5434#ifdef VK_USE_PLATFORM_WIN32_KHR
5435void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
5436 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
5437 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5438 VkResult result) {
5439 if (VK_SUCCESS != result) return;
5440 RecordVulkanSurface(pSurface);
5441}
5442#endif // VK_USE_PLATFORM_WIN32_KHR
5443
5444#ifdef VK_USE_PLATFORM_XCB_KHR
5445void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
5446 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5447 VkResult result) {
5448 if (VK_SUCCESS != result) return;
5449 RecordVulkanSurface(pSurface);
5450}
5451#endif // VK_USE_PLATFORM_XCB_KHR
5452
5453#ifdef VK_USE_PLATFORM_XLIB_KHR
5454void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
5455 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5456 VkResult result) {
5457 if (VK_SUCCESS != result) return;
5458 RecordVulkanSurface(pSurface);
5459}
5460#endif // VK_USE_PLATFORM_XLIB_KHR
5461
Niklas Haas8b84af12020-04-19 22:20:11 +02005462void ValidationStateTracker::PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance,
5463 const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
5464 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5465 VkResult result) {
5466 if (VK_SUCCESS != result) return;
5467 RecordVulkanSurface(pSurface);
5468}
5469
Cort23cf2282019-09-20 18:58:18 +02005470void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005471 VkPhysicalDeviceFeatures *pFeatures) {
5472 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Yilong Li358152a2020-07-08 02:16:45 -07005473 // Reset the features2 safe struct before setting up the features field.
5474 physical_device_state->features2 = safe_VkPhysicalDeviceFeatures2();
Cortffba2642019-09-20 22:09:41 +02005475 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02005476}
5477
5478void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005479 VkPhysicalDeviceFeatures2 *pFeatures) {
5480 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02005481 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02005482}
5483
5484void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005485 VkPhysicalDeviceFeatures2 *pFeatures) {
5486 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02005487 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02005488}
5489
locke-lunargd556cc32019-09-17 01:21:23 -06005490void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
5491 VkSurfaceKHR surface,
5492 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
5493 VkResult result) {
5494 if (VK_SUCCESS != result) return;
5495 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005496 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005497
5498 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
5499 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005500}
5501
5502void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
5503 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
5504 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
5505 if (VK_SUCCESS != result) return;
5506 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005507 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005508
5509 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
5510 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005511}
5512
5513void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
5514 VkSurfaceKHR surface,
5515 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
5516 VkResult result) {
5517 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005518 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
5519 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
5520 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
5521 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
5522 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
5523 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
5524 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
5525 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
5526 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
5527 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005528
5529 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
5530 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005531}
5532
5533void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
5534 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
5535 VkBool32 *pSupported, VkResult result) {
5536 if (VK_SUCCESS != result) return;
5537 auto surface_state = GetSurfaceState(surface);
5538 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
5539}
5540
5541void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
5542 VkSurfaceKHR surface,
5543 uint32_t *pPresentModeCount,
5544 VkPresentModeKHR *pPresentModes,
5545 VkResult result) {
5546 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5547
5548 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
5549 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005550
5551 if (*pPresentModeCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005552 if (*pPresentModeCount > physical_device_state->present_modes.size()) {
locke-lunargd556cc32019-09-17 01:21:23 -06005553 physical_device_state->present_modes.resize(*pPresentModeCount);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005554 }
locke-lunargd556cc32019-09-17 01:21:23 -06005555 }
5556 if (pPresentModes) {
locke-lunargd556cc32019-09-17 01:21:23 -06005557 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
5558 physical_device_state->present_modes[i] = pPresentModes[i];
5559 }
5560 }
5561}
5562
5563void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
5564 uint32_t *pSurfaceFormatCount,
5565 VkSurfaceFormatKHR *pSurfaceFormats,
5566 VkResult result) {
5567 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5568
5569 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005570
5571 if (*pSurfaceFormatCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005572 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size()) {
locke-lunargd556cc32019-09-17 01:21:23 -06005573 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005574 }
locke-lunargd556cc32019-09-17 01:21:23 -06005575 }
5576 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06005577 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
5578 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
5579 }
5580 }
5581}
5582
5583void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
5584 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
5585 uint32_t *pSurfaceFormatCount,
5586 VkSurfaceFormat2KHR *pSurfaceFormats,
5587 VkResult result) {
5588 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5589
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005590 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005591 if (*pSurfaceFormatCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005592 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size()) {
5593 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
5594 }
locke-lunargd556cc32019-09-17 01:21:23 -06005595 }
5596 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06005597 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005598 physical_device_state->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
locke-lunargd556cc32019-09-17 01:21:23 -06005599 }
5600 }
5601}
5602
5603void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
5604 const VkDebugUtilsLabelEXT *pLabelInfo) {
5605 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
5606}
5607
5608void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
5609 EndCmdDebugUtilsLabel(report_data, commandBuffer);
5610}
5611
5612void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
5613 const VkDebugUtilsLabelEXT *pLabelInfo) {
5614 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
5615
5616 // Squirrel away an easily accessible copy.
5617 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5618 cb_state->debug_label = LoggingLabel(pLabelInfo);
5619}
5620
5621void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
Mike Schuchardt2df08912020-12-15 16:28:09 -08005622 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06005623 if (NULL != pPhysicalDeviceGroupProperties) {
5624 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
5625 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
5626 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
5627 auto &phys_device_state = physical_device_map[cur_phys_dev];
5628 phys_device_state.phys_device = cur_phys_dev;
5629 // Init actual features for each physical device
5630 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
5631 }
5632 }
5633 }
5634}
5635
5636void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
Mike Schuchardt2df08912020-12-15 16:28:09 -08005637 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties,
locke-lunargd556cc32019-09-17 01:21:23 -06005638 VkResult result) {
5639 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5640 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
5641}
5642
5643void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08005644 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties,
locke-lunargd556cc32019-09-17 01:21:23 -06005645 VkResult result) {
5646 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5647 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
5648}
5649
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005650void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
5651 uint32_t queueFamilyIndex,
5652 uint32_t *pCounterCount,
5653 VkPerformanceCounterKHR *pCounters) {
5654 if (NULL == pCounters) return;
5655
5656 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5657 assert(physical_device_state);
5658
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005659 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queue_family_counters(new QUEUE_FAMILY_PERF_COUNTERS());
5660 queue_family_counters->counters.resize(*pCounterCount);
5661 for (uint32_t i = 0; i < *pCounterCount; i++) queue_family_counters->counters[i] = pCounters[i];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005662
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005663 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queue_family_counters);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005664}
5665
5666void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
5667 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
5668 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
5669 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5670 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
5671}
5672
5673void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
5674 VkResult result) {
5675 if (result == VK_SUCCESS) performance_lock_acquired = true;
5676}
5677
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005678void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
5679 performance_lock_acquired = false;
5680 for (auto &cmd_buffer : commandBufferMap) {
5681 cmd_buffer.second->performance_lock_released = true;
5682 }
5683}
5684
locke-lunargd556cc32019-09-17 01:21:23 -06005685void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005686 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06005687 const VkAllocationCallbacks *pAllocator) {
5688 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005689 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5690 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005691 desc_template_map.erase(descriptorUpdateTemplate);
5692}
5693
5694void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005695 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06005696 const VkAllocationCallbacks *pAllocator) {
5697 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005698 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5699 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005700 desc_template_map.erase(descriptorUpdateTemplate);
5701}
5702
Mike Schuchardt2df08912020-12-15 16:28:09 -08005703void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
5704 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate) {
locke-lunargd556cc32019-09-17 01:21:23 -06005705 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005706 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06005707 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
5708}
5709
Mike Schuchardt2df08912020-12-15 16:28:09 -08005710void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(VkDevice device,
5711 const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
5712 const VkAllocationCallbacks *pAllocator,
5713 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate,
5714 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06005715 if (VK_SUCCESS != result) return;
5716 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
5717}
5718
5719void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08005720 VkDevice device, const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
5721 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06005722 if (VK_SUCCESS != result) return;
5723 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
5724}
5725
5726void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005727 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06005728 const void *pData) {
5729 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
5730 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
5731 assert(0);
5732 } else {
5733 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
5734 // TODO: Record template push descriptor updates
5735 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
5736 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
5737 }
5738 }
5739}
5740
5741void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
5742 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
5743 const void *pData) {
5744 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
5745}
5746
5747void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005748 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06005749 const void *pData) {
5750 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
5751}
5752
Mike Schuchardt2df08912020-12-15 16:28:09 -08005753void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
5754 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
5755 VkPipelineLayout layout, uint32_t set,
5756 const void *pData) {
locke-lunargd556cc32019-09-17 01:21:23 -06005757 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5758
5759 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5760 if (template_state) {
5761 auto layout_data = GetPipelineLayout(layout);
5762 auto dsl = GetDslFromPipelineLayout(layout_data, set);
5763 const auto &template_ci = template_state->create_info;
Jeff Bolz6ae39612019-10-11 20:57:36 -05005764 if (dsl && !dsl->destroyed) {
locke-lunargd556cc32019-09-17 01:21:23 -06005765 // Decode the template into a set of write updates
5766 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
5767 dsl->GetDescriptorSetLayout());
5768 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
5769 static_cast<uint32_t>(decoded_template.desc_writes.size()),
5770 decoded_template.desc_writes.data());
5771 }
5772 }
5773}
5774
5775void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
5776 uint32_t *pPropertyCount, void *pProperties) {
5777 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5778 if (*pPropertyCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005779 physical_device_state->display_plane_property_count = *pPropertyCount;
5780 }
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005781 if (*pPropertyCount || pProperties) {
5782 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005783 }
5784}
5785
5786void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
5787 uint32_t *pPropertyCount,
5788 VkDisplayPlanePropertiesKHR *pProperties,
5789 VkResult result) {
5790 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5791 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5792}
5793
5794void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
5795 uint32_t *pPropertyCount,
5796 VkDisplayPlaneProperties2KHR *pProperties,
5797 VkResult result) {
5798 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5799 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5800}
5801
5802void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5803 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
5804 QueryObject query_obj = {queryPool, query, index};
5805 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5806 RecordCmdBeginQuery(cb_state, query_obj);
5807}
5808
5809void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5810 uint32_t query, uint32_t index) {
5811 QueryObject query_obj = {queryPool, query, index};
5812 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5813 RecordCmdEndQuery(cb_state, query_obj);
5814}
5815
5816void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
5817 VkSamplerYcbcrConversion ycbcr_conversion) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005818 auto ycbcr_state = std::make_shared<SAMPLER_YCBCR_CONVERSION_STATE>();
5819
locke-lunargd556cc32019-09-17 01:21:23 -06005820 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005821 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion, ycbcr_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06005822 }
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005823
5824 const VkFormat conversion_format = create_info->format;
5825
5826 if (conversion_format != VK_FORMAT_UNDEFINED) {
5827 // If format is VK_FORMAT_UNDEFINED, will be set by external AHB features
5828 ycbcr_state->format_features = GetPotentialFormatFeatures(conversion_format);
5829 }
5830
5831 ycbcr_state->chromaFilter = create_info->chromaFilter;
5832 ycbcr_state->format = conversion_format;
5833 samplerYcbcrConversionMap[ycbcr_conversion] = std::move(ycbcr_state);
locke-lunargd556cc32019-09-17 01:21:23 -06005834}
5835
5836void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
5837 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5838 const VkAllocationCallbacks *pAllocator,
5839 VkSamplerYcbcrConversion *pYcbcrConversion,
5840 VkResult result) {
5841 if (VK_SUCCESS != result) return;
5842 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5843}
5844
5845void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
5846 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5847 const VkAllocationCallbacks *pAllocator,
5848 VkSamplerYcbcrConversion *pYcbcrConversion,
5849 VkResult result) {
5850 if (VK_SUCCESS != result) return;
5851 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5852}
5853
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005854void ValidationStateTracker::RecordDestroySamplerYcbcrConversionState(VkSamplerYcbcrConversion ycbcr_conversion) {
5855 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
5856 RecordDestroySamplerYcbcrConversionANDROID(ycbcr_conversion);
5857 }
5858
5859 auto ycbcr_state = GetSamplerYcbcrConversionState(ycbcr_conversion);
5860 ycbcr_state->destroyed = true;
5861 samplerYcbcrConversionMap.erase(ycbcr_conversion);
5862}
5863
locke-lunargd556cc32019-09-17 01:21:23 -06005864void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
5865 const VkAllocationCallbacks *pAllocator) {
5866 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005867 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005868}
5869
5870void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
5871 VkSamplerYcbcrConversion ycbcrConversion,
5872 const VkAllocationCallbacks *pAllocator) {
5873 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005874 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005875}
5876
Tony-LunarG977448c2019-12-02 14:52:02 -07005877void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5878 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005879 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07005880 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06005881
5882 // Do nothing if the query pool has been destroyed.
5883 auto query_pool_state = GetQueryPoolState(queryPool);
5884 if (!query_pool_state) return;
5885
5886 // Reset the state of existing entries.
5887 QueryObject query_obj{queryPool, 0};
5888 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
5889 for (uint32_t i = 0; i < max_query_count; ++i) {
5890 query_obj.query = firstQuery + i;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005891 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005892 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005893 for (uint32_t pass_index = 0; pass_index < query_pool_state->n_performance_passes; pass_index++) {
5894 query_obj.perf_pass = pass_index;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005895 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005896 }
5897 }
locke-lunargd556cc32019-09-17 01:21:23 -06005898 }
5899}
5900
Tony-LunarG977448c2019-12-02 14:52:02 -07005901void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5902 uint32_t queryCount) {
5903 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5904}
5905
5906void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5907 uint32_t queryCount) {
5908 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5909}
5910
locke-lunargd556cc32019-09-17 01:21:23 -06005911void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
5912 const TEMPLATE_STATE *template_state, const void *pData) {
5913 // Translate the templated update into a normal update for validation...
5914 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
5915 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
5916 decoded_update.desc_writes.data(), 0, NULL);
5917}
5918
5919// Update the common AllocateDescriptorSetsData
5920void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005921 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005922 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05005923 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06005924 if (layout) {
5925 ds_data->layout_nodes[i] = layout;
5926 // Count total descriptors required per type
5927 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
5928 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005929 uint32_t type_index = static_cast<uint32_t>(binding_layout->descriptorType);
5930 ds_data->required_descriptors_by_type[type_index] += binding_layout->descriptorCount;
locke-lunargd556cc32019-09-17 01:21:23 -06005931 }
5932 }
5933 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
5934 }
5935}
5936
5937// Decrement allocated sets from the pool and insert new sets into set_map
5938void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
5939 const VkDescriptorSet *descriptor_sets,
5940 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
5941 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
5942 // Account for sets and individual descriptors allocated from pool
5943 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
5944 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
5945 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
5946 }
5947
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07005948 const auto *variable_count_info = LvlFindInChain<VkDescriptorSetVariableDescriptorCountAllocateInfo>(p_alloc_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06005949 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
5950
5951 // Create tracking object for each descriptor set; insert into global map and the pool's set.
5952 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
5953 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
5954
Jeff Bolz41a1ced2019-10-11 11:40:49 -05005955 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07005956 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06005957 pool_state->sets.insert(new_ds.get());
5958 new_ds->in_use.store(0);
5959 setMap[descriptor_sets[i]] = std::move(new_ds);
5960 }
5961}
5962
5963// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
Jeremy Kniager05631e72020-06-08 14:21:35 -06005964void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type,
locke-lunarg540b2252020-08-03 13:23:36 -06005965 VkPipelineBindPoint bind_point, const char *function) {
5966 UpdateDrawState(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005967 cb_state->hasDispatchCmd = true;
5968}
5969
locke-lunargd556cc32019-09-17 01:21:23 -06005970// Generic function to handle state update for all CmdDraw* type functions
locke-lunarg540b2252020-08-03 13:23:36 -06005971void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, VkPipelineBindPoint bind_point,
5972 const char *function) {
5973 UpdateStateCmdDrawDispatchType(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005974 cb_state->hasDrawCmd = true;
5975}
5976
5977void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
5978 uint32_t firstVertex, uint32_t firstInstance) {
5979 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005980 UpdateStateCmdDrawType(cb_state, CMD_DRAW, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDraw()");
locke-lunargd556cc32019-09-17 01:21:23 -06005981}
5982
5983void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
5984 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
5985 uint32_t firstInstance) {
5986 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005987 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXED, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexed()");
locke-lunargd556cc32019-09-17 01:21:23 -06005988}
5989
5990void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5991 uint32_t count, uint32_t stride) {
5992 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5993 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005994 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06005995 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5996}
5997
5998void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5999 VkDeviceSize offset, uint32_t count, uint32_t stride) {
6000 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6001 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006002 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexedIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06006003 AddCommandBufferBindingBuffer(cb_state, buffer_state);
6004}
6005
6006void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
6007 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006008 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCH, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatch()");
locke-lunargd556cc32019-09-17 01:21:23 -06006009}
6010
6011void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
6012 VkDeviceSize offset) {
6013 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006014 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCHINDIRECT, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatchIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06006015 BUFFER_STATE *buffer_state = GetBufferState(buffer);
6016 AddCommandBufferBindingBuffer(cb_state, buffer_state);
6017}
6018
Tony-LunarG977448c2019-12-02 14:52:02 -07006019void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
6020 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
locke-lunarg540b2252020-08-03 13:23:36 -06006021 uint32_t stride, const char *function) {
Tony-LunarG977448c2019-12-02 14:52:02 -07006022 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6023 BUFFER_STATE *buffer_state = GetBufferState(buffer);
6024 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006025 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
Tony-LunarG977448c2019-12-02 14:52:02 -07006026 AddCommandBufferBindingBuffer(cb_state, buffer_state);
6027 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
6028}
6029
locke-lunargd556cc32019-09-17 01:21:23 -06006030void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
6031 VkDeviceSize offset, VkBuffer countBuffer,
6032 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
6033 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06006034 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
6035 "vkCmdDrawIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07006036}
6037
6038void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
6039 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
6040 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06006041 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
6042 "vkCmdDrawIndirectCount()");
Tony-LunarG977448c2019-12-02 14:52:02 -07006043}
6044
6045void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
6046 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
locke-lunarg540b2252020-08-03 13:23:36 -06006047 uint32_t maxDrawCount, uint32_t stride, const char *function) {
locke-lunargd556cc32019-09-17 01:21:23 -06006048 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6049 BUFFER_STATE *buffer_state = GetBufferState(buffer);
6050 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006051 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
locke-lunargd556cc32019-09-17 01:21:23 -06006052 AddCommandBufferBindingBuffer(cb_state, buffer_state);
6053 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
6054}
6055
6056void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
6057 VkDeviceSize offset, VkBuffer countBuffer,
6058 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
6059 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06006060 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
6061 "vkCmdDrawIndexedIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07006062}
6063
6064void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
6065 VkDeviceSize offset, VkBuffer countBuffer,
6066 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
6067 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06006068 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
6069 "vkCmdDrawIndexedIndirectCount()");
locke-lunargd556cc32019-09-17 01:21:23 -06006070}
6071
6072void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
6073 uint32_t firstTask) {
6074 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006075 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSNV, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawMeshTasksNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06006076}
6077
6078void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
6079 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
6080 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006081 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
6082 "vkCmdDrawMeshTasksIndirectNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06006083 BUFFER_STATE *buffer_state = GetBufferState(buffer);
6084 if (buffer_state) {
6085 AddCommandBufferBindingBuffer(cb_state, buffer_state);
6086 }
6087}
6088
6089void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
6090 VkDeviceSize offset, VkBuffer countBuffer,
6091 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
6092 uint32_t stride) {
6093 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6094 BUFFER_STATE *buffer_state = GetBufferState(buffer);
6095 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006096 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTCOUNTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
6097 "vkCmdDrawMeshTasksIndirectCountNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06006098 if (buffer_state) {
6099 AddCommandBufferBindingBuffer(cb_state, buffer_state);
6100 }
6101 if (count_buffer_state) {
6102 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
6103 }
6104}
6105
6106void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
6107 const VkAllocationCallbacks *pAllocator,
6108 VkShaderModule *pShaderModule, VkResult result,
6109 void *csm_state_data) {
6110 if (VK_SUCCESS != result) return;
6111 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
6112
Tony-LunarG8a51b7d2020-07-01 15:57:23 -06006113 spv_target_env spirv_environment = PickSpirvEnv(api_version, (device_extensions.vk_khr_spirv_1_4 != kNotEnabled));
locke-lunargd556cc32019-09-17 01:21:23 -06006114 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05006115 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
6116 csm_state->unique_shader_id)
6117 : std::make_shared<SHADER_MODULE_STATE>();
sfricke-samsung962cad92021-04-13 00:46:29 -07006118 new_shader_module->SetPushConstantUsedInShader();
locke-lunargd556cc32019-09-17 01:21:23 -06006119 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
6120}
6121
6122void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05006123 PIPELINE_STATE::StageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06006124 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
locke-lunargde3f0fa2020-09-10 11:55:31 -06006125 stage_state->entry_point_name = pStage->pName;
6126 stage_state->shader_state = GetShared<SHADER_MODULE_STATE>(pStage->module);
6127 auto module = stage_state->shader_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -06006128 if (!module->has_valid_spirv) return;
6129
6130 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
sfricke-samsung962cad92021-04-13 00:46:29 -07006131 auto entrypoint = module->FindEntrypoint(pStage->pName, pStage->stage);
locke-lunargd556cc32019-09-17 01:21:23 -06006132 if (entrypoint == module->end()) return;
6133
locke-lunarg654e3692020-06-04 17:19:15 -06006134 stage_state->stage_flag = pStage->stage;
6135
locke-lunargd556cc32019-09-17 01:21:23 -06006136 // Mark accessible ids
sfricke-samsung962cad92021-04-13 00:46:29 -07006137 stage_state->accessible_ids = module->MarkAccessibleIds(entrypoint);
6138 module->ProcessExecutionModes(entrypoint, pipeline);
locke-lunargd556cc32019-09-17 01:21:23 -06006139
sfricke-samsung962cad92021-04-13 00:46:29 -07006140 stage_state->descriptor_uses = module->CollectInterfaceByDescriptorSlot(
6141 stage_state->accessible_ids, &stage_state->has_writable_descriptor, &stage_state->has_atomic_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06006142 // Capture descriptor uses for the pipeline
locke-lunarg36045992020-08-20 16:54:37 -06006143 for (const auto &use : stage_state->descriptor_uses) {
locke-lunargd556cc32019-09-17 01:21:23 -06006144 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06006145 const uint32_t slot = use.first.first;
locke-lunarg351c9d82020-10-23 14:43:21 -06006146 pipeline->active_slots[slot][use.first.second].is_writable |= use.second.is_writable;
locke-lunarg36045992020-08-20 16:54:37 -06006147 auto &reqs = pipeline->active_slots[slot][use.first.second].reqs;
sfricke-samsung962cad92021-04-13 00:46:29 -07006148 reqs = descriptor_req(reqs | module->DescriptorTypeToReqs(use.second.type_id));
locke-lunarg25b6c352020-08-06 17:44:18 -06006149 if (use.second.is_atomic_operation) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_VIEW_ATOMIC_OPERATION);
locke-lunarg12d20992020-09-21 12:46:49 -06006150 if (use.second.is_sampler_implicitLod_dref_proj) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_IMPLICITLOD_DREF_PROJ);
locke-lunargae2a43c2020-09-22 17:21:57 -06006151 if (use.second.is_sampler_bias_offset) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_BIAS_OFFSET);
locke-lunarg12d20992020-09-21 12:46:49 -06006152
John Zulauf649edd52019-10-02 14:39:41 -06006153 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunarg36045992020-08-20 16:54:37 -06006154 if (use.second.samplers_used_by_image.size()) {
locke-lunarg654a9052020-10-13 16:28:42 -06006155 auto &samplers_used_by_image = pipeline->active_slots[slot][use.first.second].samplers_used_by_image;
6156 if (use.second.samplers_used_by_image.size() > samplers_used_by_image.size()) {
6157 samplers_used_by_image.resize(use.second.samplers_used_by_image.size());
6158 }
locke-lunarg654a9052020-10-13 16:28:42 -06006159 uint32_t image_index = 0;
6160 for (const auto &samplers : use.second.samplers_used_by_image) {
6161 for (const auto &sampler : samplers) {
locke-lunargb8be8222020-10-20 00:34:37 -06006162 samplers_used_by_image[image_index].emplace(sampler, nullptr);
locke-lunarg654a9052020-10-13 16:28:42 -06006163 }
6164 ++image_index;
6165 }
locke-lunarg36045992020-08-20 16:54:37 -06006166 }
locke-lunargd556cc32019-09-17 01:21:23 -06006167 }
locke-lunarg78486832020-09-09 19:39:42 -06006168
locke-lunarg96dc9632020-06-10 17:22:18 -06006169 if (pStage->stage == VK_SHADER_STAGE_FRAGMENT_BIT) {
sfricke-samsung962cad92021-04-13 00:46:29 -07006170 pipeline->fragmentShader_writable_output_location_list = module->CollectWritableOutputLocationinFS(*pStage);
locke-lunarg96dc9632020-06-10 17:22:18 -06006171 }
locke-lunargd556cc32019-09-17 01:21:23 -06006172}
6173
sfricke-samsung70ad9ce2021-04-04 00:53:54 -07006174// Discussed in details in https://github.com/KhronosGroup/Vulkan-Docs/issues/1081
6175// Internal discussion and CTS were written to prove that this is not called after an incompatible vkCmdBindPipeline
6176// "Binding a pipeline with a layout that is not compatible with the push constant layout does not disturb the push constant values"
6177//
6178// vkCmdBindDescriptorSet has nothing to do with push constants and don't need to call this after neither
6179//
6180// Part of this assumes apps at draw/dispath/traceRays/etc time will have it properly compatabile or else other VU will be triggered
locke-lunargd556cc32019-09-17 01:21:23 -06006181void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
6182 if (cb_state == nullptr) {
6183 return;
6184 }
6185
6186 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
6187 if (pipeline_layout_state == nullptr) {
6188 return;
6189 }
6190
6191 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
6192 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
6193 cb_state->push_constant_data.clear();
locke-lunargde3f0fa2020-09-10 11:55:31 -06006194 cb_state->push_constant_data_update.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06006195 uint32_t size_needed = 0;
John Zulauf79f06582021-02-27 18:38:39 -07006196 for (const auto &push_constant_range : *cb_state->push_constant_data_ranges) {
locke-lunargde3f0fa2020-09-10 11:55:31 -06006197 auto size = push_constant_range.offset + push_constant_range.size;
6198 size_needed = std::max(size_needed, size);
6199
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006200 auto stage_flags = push_constant_range.stageFlags;
locke-lunargde3f0fa2020-09-10 11:55:31 -06006201 uint32_t bit_shift = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006202 while (stage_flags) {
6203 if (stage_flags & 1) {
locke-lunargde3f0fa2020-09-10 11:55:31 -06006204 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
6205 const auto it = cb_state->push_constant_data_update.find(flag);
6206
6207 if (it != cb_state->push_constant_data_update.end()) {
6208 if (it->second.size() < push_constant_range.offset) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06006209 it->second.resize(push_constant_range.offset, PC_Byte_Not_Set);
locke-lunargde3f0fa2020-09-10 11:55:31 -06006210 }
6211 if (it->second.size() < size) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06006212 it->second.resize(size, PC_Byte_Not_Updated);
locke-lunargde3f0fa2020-09-10 11:55:31 -06006213 }
6214 } else {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06006215 std::vector<uint8_t> bytes;
6216 bytes.resize(push_constant_range.offset, PC_Byte_Not_Set);
6217 bytes.resize(size, PC_Byte_Not_Updated);
locke-lunargde3f0fa2020-09-10 11:55:31 -06006218 cb_state->push_constant_data_update[flag] = bytes;
6219 }
6220 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006221 stage_flags = stage_flags >> 1;
locke-lunargde3f0fa2020-09-10 11:55:31 -06006222 ++bit_shift;
6223 }
locke-lunargd556cc32019-09-17 01:21:23 -06006224 }
6225 cb_state->push_constant_data.resize(size_needed, 0);
6226 }
6227}
John Zulauf22b0fbe2019-10-15 06:26:16 -06006228
6229void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
6230 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
6231 VkResult result) {
6232 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
6233 auto swapchain_state = GetSwapchainState(swapchain);
6234
6235 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
6236
6237 if (pSwapchainImages) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06006238 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
John Zulauf29d00532021-03-04 13:28:54 -07006239 SWAPCHAIN_IMAGE &swapchain_image = swapchain_state->images[i];
John Zulauffaa7a522021-03-05 12:22:45 -07006240 if (swapchain_image.image_state) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06006241
6242 // Add imageMap entries for each swapchain image
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06006243 auto image_ci = LvlInitStruct<VkImageCreateInfo>();
6244 image_ci.pNext = LvlFindInChain<VkImageFormatListCreateInfo>(swapchain_state->createInfo.pNext);
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -06006245 image_ci.flags = 0; // to be updated below
John Zulauf22b0fbe2019-10-15 06:26:16 -06006246 image_ci.imageType = VK_IMAGE_TYPE_2D;
6247 image_ci.format = swapchain_state->createInfo.imageFormat;
6248 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
6249 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
6250 image_ci.extent.depth = 1;
6251 image_ci.mipLevels = 1;
6252 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
6253 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
6254 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
6255 image_ci.usage = swapchain_state->createInfo.imageUsage;
6256 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
6257 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
6258 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
6259 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
6260
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006261 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06006262 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006263 }
6264 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06006265 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006266 }
6267 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08006268 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006269 }
John Zulauf22b0fbe2019-10-15 06:26:16 -06006270
locke-lunarg296a3c92020-03-25 01:04:29 -06006271 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(device, pSwapchainImages[i], &image_ci);
John Zulauffaa7a522021-03-05 12:22:45 -07006272 auto *image_state = imageMap[pSwapchainImages[i]].get();
6273 assert(image_state);
John Zulauf22b0fbe2019-10-15 06:26:16 -06006274 image_state->valid = false;
6275 image_state->create_from_swapchain = swapchain;
6276 image_state->bind_swapchain = swapchain;
6277 image_state->bind_swapchain_imageIndex = i;
Tony-LunarGe64e4fe2020-02-17 16:21:55 -07006278 image_state->is_swapchain_image = true;
sfricke-samsungc8a50852021-03-31 13:56:50 -07006279 image_state->unprotected = ((image_ci.flags & VK_IMAGE_CREATE_PROTECTED_BIT) == 0);
John Zulauf29d00532021-03-04 13:28:54 -07006280
6281 // Since swapchains can't be linear, we can create an encoder here, and SyncValNeeds a fake_base_address
6282 image_state->fragment_encoder = std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(
6283 new subresource_adapter::ImageRangeEncoder(*image_state));
6284
6285 if (swapchain_image.bound_images.empty()) {
6286 // First time "bind" allocates
6287 image_state->swapchain_fake_address = fake_memory.Alloc(image_state->fragment_encoder->TotalSize());
6288 } else {
6289 // All others reuse
6290 image_state->swapchain_fake_address = (*swapchain_image.bound_images.cbegin())->swapchain_fake_address;
6291 // Since there are others, need to update the aliasing information
John Zulauffaa7a522021-03-05 12:22:45 -07006292 AddAliasingImage(image_state, &swapchain_image.bound_images);
John Zulauf29d00532021-03-04 13:28:54 -07006293 }
6294
6295 swapchain_image.image_state = image_state; // Don't move, it's already a reference to the imageMap
John Zulauffaa7a522021-03-05 12:22:45 -07006296 swapchain_image.bound_images.emplace(image_state);
Petr Kraus44f1c482020-04-25 20:09:25 +02006297
6298 AddImageStateProps(*image_state, device, physical_device);
John Zulauf22b0fbe2019-10-15 06:26:16 -06006299 }
6300 }
6301
6302 if (*pSwapchainImageCount) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06006303 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
6304 }
6305}
sourav parmar35e7a002020-06-09 17:58:44 -07006306
sourav parmar35e7a002020-06-09 17:58:44 -07006307void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer,
6308 const VkCopyAccelerationStructureInfoKHR *pInfo) {
6309 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6310 if (cb_state) {
sourav parmarcd5fb182020-07-17 12:58:44 -07006311 ACCELERATION_STRUCTURE_STATE_KHR *src_as_state = GetAccelerationStructureStateKHR(pInfo->src);
6312 ACCELERATION_STRUCTURE_STATE_KHR *dst_as_state = GetAccelerationStructureStateKHR(pInfo->dst);
sourav parmar35e7a002020-06-09 17:58:44 -07006313 if (dst_as_state != nullptr && src_as_state != nullptr) {
6314 dst_as_state->built = true;
6315 dst_as_state->build_info_khr = src_as_state->build_info_khr;
6316 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
6317 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
6318 }
6319 }
6320}
Piers Daniell39842ee2020-07-10 16:42:33 -06006321
6322void ValidationStateTracker::PreCallRecordCmdSetCullModeEXT(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) {
6323 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6324 cb_state->status |= CBSTATUS_CULL_MODE_SET;
6325 cb_state->static_status &= ~CBSTATUS_CULL_MODE_SET;
6326}
6327
6328void ValidationStateTracker::PreCallRecordCmdSetFrontFaceEXT(VkCommandBuffer commandBuffer, VkFrontFace frontFace) {
6329 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6330 cb_state->status |= CBSTATUS_FRONT_FACE_SET;
6331 cb_state->static_status &= ~CBSTATUS_FRONT_FACE_SET;
6332}
6333
6334void ValidationStateTracker::PreCallRecordCmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer,
6335 VkPrimitiveTopology primitiveTopology) {
6336 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6337 cb_state->primitiveTopology = primitiveTopology;
6338 cb_state->status |= CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
6339 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
6340}
6341
6342void ValidationStateTracker::PreCallRecordCmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer, uint32_t viewportCount,
6343 const VkViewport *pViewports) {
6344 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6345 cb_state->viewportWithCountMask |= (1u << viewportCount) - 1u;
Tobias Hector6663c9b2020-11-05 10:18:02 +00006346 cb_state->viewportWithCountCount = viewportCount;
Piers Daniell39842ee2020-07-10 16:42:33 -06006347 cb_state->status |= CBSTATUS_VIEWPORT_WITH_COUNT_SET;
6348 cb_state->static_status &= ~CBSTATUS_VIEWPORT_WITH_COUNT_SET;
6349}
6350
6351void ValidationStateTracker::PreCallRecordCmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer, uint32_t scissorCount,
6352 const VkRect2D *pScissors) {
6353 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6354 cb_state->scissorWithCountMask |= (1u << scissorCount) - 1u;
6355 cb_state->status |= CBSTATUS_SCISSOR_WITH_COUNT_SET;
6356 cb_state->static_status &= ~CBSTATUS_SCISSOR_WITH_COUNT_SET;
6357}
6358
6359void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer, uint32_t firstBinding,
6360 uint32_t bindingCount, const VkBuffer *pBuffers,
6361 const VkDeviceSize *pOffsets, const VkDeviceSize *pSizes,
6362 const VkDeviceSize *pStrides) {
6363 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6364 if (pStrides) {
6365 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
6366 cb_state->static_status &= ~CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
6367 }
6368
6369 uint32_t end = firstBinding + bindingCount;
6370 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
6371 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
6372 }
6373
6374 for (uint32_t i = 0; i < bindingCount; ++i) {
6375 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07006376 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
Piers Daniell39842ee2020-07-10 16:42:33 -06006377 vertex_buffer_binding.offset = pOffsets[i];
6378 vertex_buffer_binding.size = (pSizes) ? pSizes[i] : VK_WHOLE_SIZE;
6379 vertex_buffer_binding.stride = (pStrides) ? pStrides[i] : 0;
6380 // Add binding for this vertex buffer to this commandbuffer
6381 if (pBuffers[i]) {
locke-lunarg1ae57d62020-11-18 10:49:19 -07006382 AddCommandBufferBindingBuffer(cb_state, vertex_buffer_binding.buffer_state.get());
Piers Daniell39842ee2020-07-10 16:42:33 -06006383 }
6384 }
6385}
6386
6387void ValidationStateTracker::PreCallRecordCmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) {
6388 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6389 cb_state->status |= CBSTATUS_DEPTH_TEST_ENABLE_SET;
6390 cb_state->static_status &= ~CBSTATUS_DEPTH_TEST_ENABLE_SET;
6391}
6392
6393void ValidationStateTracker::PreCallRecordCmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) {
6394 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6395 cb_state->status |= CBSTATUS_DEPTH_WRITE_ENABLE_SET;
6396 cb_state->static_status &= ~CBSTATUS_DEPTH_WRITE_ENABLE_SET;
6397}
6398
6399void ValidationStateTracker::PreCallRecordCmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) {
6400 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6401 cb_state->status |= CBSTATUS_DEPTH_COMPARE_OP_SET;
6402 cb_state->static_status &= ~CBSTATUS_DEPTH_COMPARE_OP_SET;
6403}
6404
6405void ValidationStateTracker::PreCallRecordCmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer,
6406 VkBool32 depthBoundsTestEnable) {
6407 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6408 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
6409 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
6410}
6411void ValidationStateTracker::PreCallRecordCmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) {
6412 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6413 cb_state->status |= CBSTATUS_STENCIL_TEST_ENABLE_SET;
6414 cb_state->static_status &= ~CBSTATUS_STENCIL_TEST_ENABLE_SET;
6415}
6416
6417void ValidationStateTracker::PreCallRecordCmdSetStencilOpEXT(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
6418 VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp,
6419 VkCompareOp compareOp) {
6420 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6421 cb_state->status |= CBSTATUS_STENCIL_OP_SET;
6422 cb_state->static_status &= ~CBSTATUS_STENCIL_OP_SET;
6423}
locke-lunarg4189aa22020-10-21 00:23:48 -06006424
6425void ValidationStateTracker::PreCallRecordCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle,
6426 uint32_t discardRectangleCount,
6427 const VkRect2D *pDiscardRectangles) {
6428 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6429 cb_state->status |= CBSTATUS_DISCARD_RECTANGLE_SET;
6430 cb_state->static_status &= ~CBSTATUS_DISCARD_RECTANGLE_SET;
6431}
6432
6433void ValidationStateTracker::PreCallRecordCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,
6434 const VkSampleLocationsInfoEXT *pSampleLocationsInfo) {
6435 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6436 cb_state->status |= CBSTATUS_SAMPLE_LOCATIONS_SET;
6437 cb_state->static_status &= ~CBSTATUS_SAMPLE_LOCATIONS_SET;
6438}
6439
6440void ValidationStateTracker::PreCallRecordCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer,
6441 VkCoarseSampleOrderTypeNV sampleOrderType,
6442 uint32_t customSampleOrderCount,
6443 const VkCoarseSampleOrderCustomNV *pCustomSampleOrders) {
6444 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6445 cb_state->status |= CBSTATUS_COARSE_SAMPLE_ORDER_SET;
6446 cb_state->static_status &= ~CBSTATUS_COARSE_SAMPLE_ORDER_SET;
6447}