blob: ce730c4daf51a431cef113c0115fe728929da525 [file] [log] [blame]
sfricke-samsung486a51e2021-01-02 00:10:15 -08001/* Copyright (c) 2015-2021 The Khronos Group Inc.
2 * Copyright (c) 2015-2021 Valve Corporation
3 * Copyright (c) 2015-2021 LunarG, Inc.
4 * Copyright (C) 2015-2021 Google Inc.
Tobias Hector6663c9b2020-11-05 10:18:02 +00005 * Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved.
locke-lunargd556cc32019-09-17 01:21:23 -06006 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Mark Lobodzinski <mark@lunarg.com>
20 * Author: Dave Houlton <daveh@lunarg.com>
21 * Shannon McPherson <shannon@lunarg.com>
Tobias Hector6663c9b2020-11-05 10:18:02 +000022 * Author: Tobias Hector <tobias.hector@amd.com>
locke-lunargd556cc32019-09-17 01:21:23 -060023 */
24
David Zhao Akeley44139b12021-04-26 16:16:13 -070025#include <algorithm>
locke-lunargd556cc32019-09-17 01:21:23 -060026#include <cmath>
locke-lunargd556cc32019-09-17 01:21:23 -060027
28#include "vk_enum_string_helper.h"
29#include "vk_format_utils.h"
30#include "vk_layer_data.h"
31#include "vk_layer_utils.h"
32#include "vk_layer_logging.h"
33#include "vk_typemap_helper.h"
34
35#include "chassis.h"
36#include "state_tracker.h"
37#include "shader_validation.h"
Jeremy Gebben74aa7622020-12-15 11:18:00 -070038#include "sync_utils.h"
locke-lunargd556cc32019-09-17 01:21:23 -060039
John Zulauf890b50b2020-06-17 15:18:19 -060040const char *CommandTypeString(CMD_TYPE type) {
sfricke-samsung946027b2021-04-20 22:44:36 -070041 // Autogenerated as part of the command_validation.h codegen
Mark Lobodzinski677dc0b2020-11-12 15:28:09 -070042 return kGeneratedCommandNameList[type];
John Zulauf890b50b2020-06-17 15:18:19 -060043}
44
John Zulaufd0ec59f2021-03-13 14:25:08 -070045uint32_t GetSubpassDepthStencilAttachmentIndex(const safe_VkPipelineDepthStencilStateCreateInfo *pipe_ds_ci,
46 const safe_VkAttachmentReference2 *depth_stencil_ref) {
47 uint32_t depth_stencil_attachment = VK_ATTACHMENT_UNUSED;
48 if (pipe_ds_ci && depth_stencil_ref) {
49 depth_stencil_attachment = depth_stencil_ref->attachment;
50 }
51 return depth_stencil_attachment;
52}
53
locke-lunarg4189aa22020-10-21 00:23:48 -060054VkDynamicState ConvertToDynamicState(CBStatusFlagBits flag) {
55 switch (flag) {
56 case CBSTATUS_LINE_WIDTH_SET:
57 return VK_DYNAMIC_STATE_LINE_WIDTH;
58 case CBSTATUS_DEPTH_BIAS_SET:
59 return VK_DYNAMIC_STATE_DEPTH_BIAS;
60 case CBSTATUS_BLEND_CONSTANTS_SET:
61 return VK_DYNAMIC_STATE_BLEND_CONSTANTS;
62 case CBSTATUS_DEPTH_BOUNDS_SET:
63 return VK_DYNAMIC_STATE_DEPTH_BOUNDS;
64 case CBSTATUS_STENCIL_READ_MASK_SET:
65 return VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK;
66 case CBSTATUS_STENCIL_WRITE_MASK_SET:
67 return VK_DYNAMIC_STATE_STENCIL_WRITE_MASK;
68 case CBSTATUS_STENCIL_REFERENCE_SET:
69 return VK_DYNAMIC_STATE_STENCIL_REFERENCE;
70 case CBSTATUS_VIEWPORT_SET:
71 return VK_DYNAMIC_STATE_VIEWPORT;
72 case CBSTATUS_SCISSOR_SET:
73 return VK_DYNAMIC_STATE_SCISSOR;
74 case CBSTATUS_EXCLUSIVE_SCISSOR_SET:
75 return VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV;
76 case CBSTATUS_SHADING_RATE_PALETTE_SET:
77 return VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV;
78 case CBSTATUS_LINE_STIPPLE_SET:
79 return VK_DYNAMIC_STATE_LINE_STIPPLE_EXT;
80 case CBSTATUS_VIEWPORT_W_SCALING_SET:
81 return VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV;
82 case CBSTATUS_CULL_MODE_SET:
83 return VK_DYNAMIC_STATE_CULL_MODE_EXT;
84 case CBSTATUS_FRONT_FACE_SET:
85 return VK_DYNAMIC_STATE_FRONT_FACE_EXT;
86 case CBSTATUS_PRIMITIVE_TOPOLOGY_SET:
87 return VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT;
88 case CBSTATUS_VIEWPORT_WITH_COUNT_SET:
89 return VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT;
90 case CBSTATUS_SCISSOR_WITH_COUNT_SET:
91 return VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT;
92 case CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET:
93 return VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT;
94 case CBSTATUS_DEPTH_TEST_ENABLE_SET:
95 return VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT;
96 case CBSTATUS_DEPTH_WRITE_ENABLE_SET:
97 return VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT;
98 case CBSTATUS_DEPTH_COMPARE_OP_SET:
99 return VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT;
100 case CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET:
101 return VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT;
102 case CBSTATUS_STENCIL_TEST_ENABLE_SET:
103 return VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT;
104 case CBSTATUS_STENCIL_OP_SET:
105 return VK_DYNAMIC_STATE_STENCIL_OP_EXT;
106 case CBSTATUS_DISCARD_RECTANGLE_SET:
107 return VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT;
108 case CBSTATUS_SAMPLE_LOCATIONS_SET:
109 return VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT;
110 case CBSTATUS_COARSE_SAMPLE_ORDER_SET:
111 return VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV;
Vikram Kushwahaa57b0c32021-04-19 12:21:46 -0700112 case CBSTATUS_PATCH_CONTROL_POINTS_SET:
113 return VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT;
114 case CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET:
115 return VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT;
116 case CBSTATUS_DEPTH_BIAS_ENABLE_SET:
117 return VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT;
118 case CBSTATUS_LOGIC_OP_SET:
119 return VK_DYNAMIC_STATE_LOGIC_OP_EXT;
120 case CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET:
121 return VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT;
locke-lunarg4189aa22020-10-21 00:23:48 -0600122 default:
123 // CBSTATUS_INDEX_BUFFER_BOUND is not in VkDynamicState
124 return VK_DYNAMIC_STATE_MAX_ENUM;
125 }
126 return VK_DYNAMIC_STATE_MAX_ENUM;
127}
128
129CBStatusFlagBits ConvertToCBStatusFlagBits(VkDynamicState state) {
130 switch (state) {
131 case VK_DYNAMIC_STATE_VIEWPORT:
132 return CBSTATUS_VIEWPORT_SET;
133 case VK_DYNAMIC_STATE_SCISSOR:
134 return CBSTATUS_SCISSOR_SET;
135 case VK_DYNAMIC_STATE_LINE_WIDTH:
136 return CBSTATUS_LINE_WIDTH_SET;
137 case VK_DYNAMIC_STATE_DEPTH_BIAS:
138 return CBSTATUS_DEPTH_BIAS_SET;
139 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
140 return CBSTATUS_BLEND_CONSTANTS_SET;
141 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
142 return CBSTATUS_DEPTH_BOUNDS_SET;
143 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
144 return CBSTATUS_STENCIL_READ_MASK_SET;
145 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
146 return CBSTATUS_STENCIL_WRITE_MASK_SET;
147 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
148 return CBSTATUS_STENCIL_REFERENCE_SET;
149 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
150 return CBSTATUS_VIEWPORT_W_SCALING_SET;
151 case VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT:
152 return CBSTATUS_DISCARD_RECTANGLE_SET;
153 case VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT:
154 return CBSTATUS_SAMPLE_LOCATIONS_SET;
155 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
156 return CBSTATUS_SHADING_RATE_PALETTE_SET;
157 case VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV:
158 return CBSTATUS_COARSE_SAMPLE_ORDER_SET;
159 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
160 return CBSTATUS_EXCLUSIVE_SCISSOR_SET;
161 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
162 return CBSTATUS_LINE_STIPPLE_SET;
163 case VK_DYNAMIC_STATE_CULL_MODE_EXT:
164 return CBSTATUS_CULL_MODE_SET;
165 case VK_DYNAMIC_STATE_FRONT_FACE_EXT:
166 return CBSTATUS_FRONT_FACE_SET;
167 case VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT:
168 return CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
169 case VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT:
170 return CBSTATUS_VIEWPORT_WITH_COUNT_SET;
171 case VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT:
172 return CBSTATUS_SCISSOR_WITH_COUNT_SET;
173 case VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT:
174 return CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
175 case VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT:
176 return CBSTATUS_DEPTH_TEST_ENABLE_SET;
177 case VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT:
178 return CBSTATUS_DEPTH_WRITE_ENABLE_SET;
179 case VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT:
180 return CBSTATUS_DEPTH_COMPARE_OP_SET;
181 case VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT:
182 return CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
183 case VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT:
184 return CBSTATUS_STENCIL_TEST_ENABLE_SET;
185 case VK_DYNAMIC_STATE_STENCIL_OP_EXT:
186 return CBSTATUS_STENCIL_OP_SET;
Vikram Kushwahaa57b0c32021-04-19 12:21:46 -0700187 case VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT:
188 return CBSTATUS_PATCH_CONTROL_POINTS_SET;
189 case VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT:
190 return CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET;
191 case VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT:
192 return CBSTATUS_DEPTH_BIAS_ENABLE_SET;
193 case VK_DYNAMIC_STATE_LOGIC_OP_EXT:
194 return CBSTATUS_LOGIC_OP_SET;
195 case VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT:
196 return CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET;
locke-lunarg4189aa22020-10-21 00:23:48 -0600197 default:
198 return CBSTATUS_NONE;
199 }
200 return CBSTATUS_NONE;
201}
202
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -0600203void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
204 if (add_obj) {
205 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
206 // Call base class
207 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
208 }
209}
210
John Zulauf5c5e88d2019-12-26 11:22:02 -0700211uint32_t ResolveRemainingLevels(const VkImageSubresourceRange *range, uint32_t mip_levels) {
212 // Return correct number of mip levels taking into account VK_REMAINING_MIP_LEVELS
213 uint32_t mip_level_count = range->levelCount;
214 if (range->levelCount == VK_REMAINING_MIP_LEVELS) {
215 mip_level_count = mip_levels - range->baseMipLevel;
216 }
217 return mip_level_count;
218}
219
220uint32_t ResolveRemainingLayers(const VkImageSubresourceRange *range, uint32_t layers) {
221 // Return correct number of layers taking into account VK_REMAINING_ARRAY_LAYERS
222 uint32_t array_layer_count = range->layerCount;
223 if (range->layerCount == VK_REMAINING_ARRAY_LAYERS) {
224 array_layer_count = layers - range->baseArrayLayer;
225 }
226 return array_layer_count;
227}
228
229VkImageSubresourceRange NormalizeSubresourceRange(const VkImageCreateInfo &image_create_info,
230 const VkImageSubresourceRange &range) {
231 VkImageSubresourceRange norm = range;
232 norm.levelCount = ResolveRemainingLevels(&range, image_create_info.mipLevels);
233
Mike Schuchardt2df08912020-12-15 16:28:09 -0800234 // Special case for 3D images with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT flag bit, where <extent.depth> and
John Zulauf5c5e88d2019-12-26 11:22:02 -0700235 // <arrayLayers> can potentially alias.
Mike Schuchardt2df08912020-12-15 16:28:09 -0800236 uint32_t layer_limit = (0 != (image_create_info.flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT))
John Zulauf5c5e88d2019-12-26 11:22:02 -0700237 ? image_create_info.extent.depth
238 : image_create_info.arrayLayers;
239 norm.layerCount = ResolveRemainingLayers(&range, layer_limit);
240
241 // For multiplanar formats, IMAGE_ASPECT_COLOR is equivalent to adding the aspect of the individual planes
242 VkImageAspectFlags &aspect_mask = norm.aspectMask;
243 if (FormatIsMultiplane(image_create_info.format)) {
244 if (aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) {
245 aspect_mask &= ~VK_IMAGE_ASPECT_COLOR_BIT;
246 aspect_mask |= (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT);
247 if (FormatPlaneCount(image_create_info.format) > 2) {
248 aspect_mask |= VK_IMAGE_ASPECT_PLANE_2_BIT;
249 }
250 }
251 }
252 return norm;
253}
254
255VkImageSubresourceRange NormalizeSubresourceRange(const IMAGE_STATE &image_state, const VkImageSubresourceRange &range) {
256 const VkImageCreateInfo &image_create_info = image_state.createInfo;
257 return NormalizeSubresourceRange(image_create_info, range);
258}
259
John Zulauf2bc1fde2020-04-24 15:09:51 -0600260// NOTE: Beware the lifespan of the rp_begin when holding the return. If the rp_begin isn't a "safe" copy, "IMAGELESS"
261// attachments won't persist past the API entry point exit.
262std::pair<uint32_t, const VkImageView *> GetFramebufferAttachments(const VkRenderPassBeginInfo &rp_begin,
263 const FRAMEBUFFER_STATE &fb_state) {
264 const VkImageView *attachments = fb_state.createInfo.pAttachments;
265 uint32_t count = fb_state.createInfo.attachmentCount;
266 if (fb_state.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700267 const auto *framebuffer_attachments = LvlFindInChain<VkRenderPassAttachmentBeginInfo>(rp_begin.pNext);
John Zulauf2bc1fde2020-04-24 15:09:51 -0600268 if (framebuffer_attachments) {
269 attachments = framebuffer_attachments->pAttachments;
270 count = framebuffer_attachments->attachmentCount;
271 }
272 }
273 return std::make_pair(count, attachments);
274}
275
John Zulauf64ffe552021-02-06 10:25:07 -0700276template <typename ImageViewPointer, typename Get>
277std::vector<ImageViewPointer> GetAttachmentViewsImpl(const VkRenderPassBeginInfo &rp_begin, const FRAMEBUFFER_STATE &fb_state,
278 const Get &get_fn) {
279 std::vector<ImageViewPointer> views;
John Zulauf2bc1fde2020-04-24 15:09:51 -0600280
281 const auto count_attachment = GetFramebufferAttachments(rp_begin, fb_state);
282 const auto attachment_count = count_attachment.first;
283 const auto *attachments = count_attachment.second;
284 views.resize(attachment_count, nullptr);
285 for (uint32_t i = 0; i < attachment_count; i++) {
286 if (attachments[i] != VK_NULL_HANDLE) {
John Zulauf64ffe552021-02-06 10:25:07 -0700287 views[i] = get_fn(attachments[i]);
John Zulauf2bc1fde2020-04-24 15:09:51 -0600288 }
289 }
290 return views;
291}
292
John Zulauf64ffe552021-02-06 10:25:07 -0700293std::vector<const IMAGE_VIEW_STATE *> ValidationStateTracker::GetAttachmentViews(const VkRenderPassBeginInfo &rp_begin,
294 const FRAMEBUFFER_STATE &fb_state) const {
295 auto get_fn = [this](VkImageView handle) { return this->Get<IMAGE_VIEW_STATE>(handle); };
296 return GetAttachmentViewsImpl<const IMAGE_VIEW_STATE *>(rp_begin, fb_state, get_fn);
297}
298
299std::vector<std::shared_ptr<const IMAGE_VIEW_STATE>> ValidationStateTracker::GetSharedAttachmentViews(
300 const VkRenderPassBeginInfo &rp_begin, const FRAMEBUFFER_STATE &fb_state) const {
301 auto get_fn = [this](VkImageView handle) { return this->GetShared<IMAGE_VIEW_STATE>(handle); };
302 return GetAttachmentViewsImpl<std::shared_ptr<const IMAGE_VIEW_STATE>>(rp_begin, fb_state, get_fn);
303}
304
John Zulauf2bc1fde2020-04-24 15:09:51 -0600305std::vector<const IMAGE_VIEW_STATE *> ValidationStateTracker::GetCurrentAttachmentViews(const CMD_BUFFER_STATE &cb_state) const {
306 // Only valid *after* RecordBeginRenderPass and *before* RecordEndRenderpass as it relies on cb_state for the renderpass info.
307 std::vector<const IMAGE_VIEW_STATE *> views;
308
locke-lunargaecf2152020-05-12 17:15:41 -0600309 const auto *rp_state = cb_state.activeRenderPass.get();
John Zulauf2bc1fde2020-04-24 15:09:51 -0600310 if (!rp_state) return views;
311 const auto &rp_begin = *cb_state.activeRenderPassBeginInfo.ptr();
312 const auto *fb_state = Get<FRAMEBUFFER_STATE>(rp_begin.framebuffer);
313 if (!fb_state) return views;
314
315 return GetAttachmentViews(rp_begin, *fb_state);
316}
317
locke-lunarg3e127c72020-06-09 17:45:28 -0600318PIPELINE_STATE *GetCurrentPipelineFromCommandBuffer(const CMD_BUFFER_STATE &cmd, VkPipelineBindPoint pipelineBindPoint) {
locke-lunargb8d7a7a2020-10-25 16:01:52 -0600319 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
320 return cmd.lastBound[lv_bind_point].pipeline_state;
locke-lunarg3e127c72020-06-09 17:45:28 -0600321}
322
323void GetCurrentPipelineAndDesriptorSetsFromCommandBuffer(const CMD_BUFFER_STATE &cmd, VkPipelineBindPoint pipelineBindPoint,
324 const PIPELINE_STATE **rtn_pipe,
325 const std::vector<LAST_BOUND_STATE::PER_SET> **rtn_sets) {
locke-lunargb8d7a7a2020-10-25 16:01:52 -0600326 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
327 const auto &last_bound_it = cmd.lastBound[lv_bind_point];
328 if (!last_bound_it.IsUsing()) {
locke-lunarg3e127c72020-06-09 17:45:28 -0600329 return;
330 }
locke-lunargb8d7a7a2020-10-25 16:01:52 -0600331 *rtn_pipe = last_bound_it.pipeline_state;
332 *rtn_sets = &(last_bound_it.per_set);
locke-lunarg3e127c72020-06-09 17:45:28 -0600333}
334
locke-lunargd556cc32019-09-17 01:21:23 -0600335#ifdef VK_USE_PLATFORM_ANDROID_KHR
336// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
337// This could also move into a seperate core_validation_android.cpp file... ?
338
339void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700340 const VkExternalMemoryImageCreateInfo *emici = LvlFindInChain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600341 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
Spencer Fricke224c9852020-04-06 07:47:29 -0700342 is_node->external_ahb = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600343 }
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700344 const VkExternalFormatANDROID *ext_fmt_android = LvlFindInChain<VkExternalFormatANDROID>(create_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600345 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
346 is_node->has_ahb_format = true;
347 is_node->ahb_format = ext_fmt_android->externalFormat;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700348 // VUID 01894 will catch if not found in map
349 auto it = ahb_ext_formats_map.find(ext_fmt_android->externalFormat);
350 if (it != ahb_ext_formats_map.end()) {
351 is_node->format_features = it->second;
352 }
locke-lunargd556cc32019-09-17 01:21:23 -0600353 }
354}
355
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700356void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700357 const VkExternalMemoryBufferCreateInfo *embci = LvlFindInChain<VkExternalMemoryBufferCreateInfo>(create_info->pNext);
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700358 if (embci && (embci->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
359 bs_node->external_ahb = true;
360 }
361}
362
locke-lunargd556cc32019-09-17 01:21:23 -0600363void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700364 VkSamplerYcbcrConversion ycbcr_conversion,
365 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700366 const VkExternalFormatANDROID *ext_format_android = LvlFindInChain<VkExternalFormatANDROID>(create_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600367 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
368 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700369 // VUID 01894 will catch if not found in map
370 auto it = ahb_ext_formats_map.find(ext_format_android->externalFormat);
371 if (it != ahb_ext_formats_map.end()) {
372 ycbcr_state->format_features = it->second;
373 }
locke-lunargd556cc32019-09-17 01:21:23 -0600374 }
375};
376
377void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
378 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
379};
380
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700381void ValidationStateTracker::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
382 VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties, VkResult result) {
383 if (VK_SUCCESS != result) return;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700384 auto ahb_format_props = LvlFindInChain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700385 if (ahb_format_props) {
Jeremy Gebbenfc6f8152021-03-18 16:58:55 -0600386 ahb_ext_formats_map.emplace(ahb_format_props->externalFormat, ahb_format_props->formatFeatures);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700387 }
388}
389
locke-lunargd556cc32019-09-17 01:21:23 -0600390#else
391
392void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
393
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700394void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {}
395
locke-lunargd556cc32019-09-17 01:21:23 -0600396void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700397 VkSamplerYcbcrConversion ycbcr_conversion,
398 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state){};
locke-lunargd556cc32019-09-17 01:21:23 -0600399
400void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
401
402#endif // VK_USE_PLATFORM_ANDROID_KHR
403
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -0600404std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPipelineLayout(PIPELINE_LAYOUT_STATE const *layout_data,
405 uint32_t set) {
406 std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> dsl = nullptr;
407 if (layout_data && (set < layout_data->set_layouts.size())) {
408 dsl = layout_data->set_layouts[set];
409 }
410 return dsl;
411}
412
Petr Kraus44f1c482020-04-25 20:09:25 +0200413void AddImageStateProps(IMAGE_STATE &image_state, const VkDevice device, const VkPhysicalDevice physical_device) {
414 // Add feature support according to Image Format Features (vkspec.html#resources-image-format-features)
415 // if format is AHB external format then the features are already set
416 if (image_state.has_ahb_format == false) {
417 const VkImageTiling image_tiling = image_state.createInfo.tiling;
418 const VkFormat image_format = image_state.createInfo.format;
419 if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
420 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {
421 VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, nullptr};
422 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state.image, &drm_format_properties);
423
424 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
425 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
426 nullptr};
427 format_properties_2.pNext = (void *)&drm_properties_list;
428 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Lionel Landwerlin09351a72020-06-22 18:15:59 +0300429 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
430 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
431 drm_properties_list.pDrmFormatModifierProperties = &drm_properties[0];
432 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Petr Kraus44f1c482020-04-25 20:09:25 +0200433
434 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300435 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier ==
436 drm_format_properties.drmFormatModifier) {
437 image_state.format_features =
Petr Kraus44f1c482020-04-25 20:09:25 +0200438 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300439 break;
Petr Kraus44f1c482020-04-25 20:09:25 +0200440 }
441 }
442 } else {
443 VkFormatProperties format_properties;
444 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_format, &format_properties);
445 image_state.format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
446 : format_properties.optimalTilingFeatures;
447 }
448 }
449}
450
locke-lunargd556cc32019-09-17 01:21:23 -0600451void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
452 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
453 if (VK_SUCCESS != result) return;
locke-lunarg296a3c92020-03-25 01:04:29 -0600454 auto is_node = std::make_shared<IMAGE_STATE>(device, *pImage, pCreateInfo);
sfricke-samsung71bc6572020-04-29 15:49:43 -0700455 is_node->disjoint = ((pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT) != 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600456 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
457 RecordCreateImageANDROID(pCreateInfo, is_node.get());
458 }
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700459 const auto swapchain_info = LvlFindInChain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600460 if (swapchain_info) {
461 is_node->create_from_swapchain = swapchain_info->swapchain;
462 }
463
locke-lunargd556cc32019-09-17 01:21:23 -0600464 // Record the memory requirements in case they won't be queried
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700465 // External AHB memory can't be queried until after memory is bound
Spencer Fricke224c9852020-04-06 07:47:29 -0700466 if (is_node->external_ahb == false) {
sfricke-samsung71bc6572020-04-29 15:49:43 -0700467 if (is_node->disjoint == false) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700468 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
469 } else {
470 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
471 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
472 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
473 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
474 mem_req_info2.pNext = &image_plane_req;
475 mem_req_info2.image = *pImage;
476
477 assert(plane_count != 0); // assumes each format has at least first plane
478 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
479 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
480 is_node->plane0_requirements = mem_reqs2.memoryRequirements;
481
482 if (plane_count >= 2) {
483 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
484 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
485 is_node->plane1_requirements = mem_reqs2.memoryRequirements;
486 }
487 if (plane_count >= 3) {
488 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
489 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
490 is_node->plane2_requirements = mem_reqs2.memoryRequirements;
491 }
492 }
locke-lunargd556cc32019-09-17 01:21:23 -0600493 }
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700494
Petr Kraus44f1c482020-04-25 20:09:25 +0200495 AddImageStateProps(*is_node, device, physical_device);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700496
sfricke-samsungedce77a2020-07-03 22:35:13 -0700497 is_node->unprotected = ((pCreateInfo->flags & VK_IMAGE_CREATE_PROTECTED_BIT) == 0);
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700498 imageMap.emplace(*pImage, std::move(is_node));
locke-lunargd556cc32019-09-17 01:21:23 -0600499}
500
501void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
502 if (!image) return;
503 IMAGE_STATE *image_state = GetImageState(image);
504 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
505 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
506 // Clean up memory mapping, bindings and range references for image
John Zulauf79f06582021-02-27 18:38:39 -0700507 for (auto *mem_binding : image_state->GetBoundMemory()) {
John Zulaufd13b38e2021-03-05 08:17:38 -0700508 RemoveImageMemoryRange(image_state, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600509 }
510 if (image_state->bind_swapchain) {
511 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
512 if (swapchain) {
John Zulaufd13b38e2021-03-05 08:17:38 -0700513 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600514 }
515 }
516 RemoveAliasingImage(image_state);
517 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500518 image_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600519 // Remove image from imageMap
520 imageMap.erase(image);
521}
522
523void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
524 VkImageLayout imageLayout, const VkClearColorValue *pColor,
525 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
526 auto cb_node = GetCBState(commandBuffer);
527 auto image_state = GetImageState(image);
528 if (cb_node && image_state) {
529 AddCommandBufferBindingImage(cb_node, image_state);
530 }
531}
532
533void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
534 VkImageLayout imageLayout,
535 const VkClearDepthStencilValue *pDepthStencil,
536 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
537 auto cb_node = GetCBState(commandBuffer);
538 auto image_state = GetImageState(image);
539 if (cb_node && image_state) {
540 AddCommandBufferBindingImage(cb_node, image_state);
541 }
542}
543
544void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
545 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
546 uint32_t regionCount, const VkImageCopy *pRegions) {
547 auto cb_node = GetCBState(commandBuffer);
548 auto src_image_state = GetImageState(srcImage);
549 auto dst_image_state = GetImageState(dstImage);
550
551 // Update bindings between images and cmd buffer
552 AddCommandBufferBindingImage(cb_node, src_image_state);
553 AddCommandBufferBindingImage(cb_node, dst_image_state);
554}
555
Jeff Leger178b1e52020-10-05 12:22:23 -0400556void ValidationStateTracker::PreCallRecordCmdCopyImage2KHR(VkCommandBuffer commandBuffer,
557 const VkCopyImageInfo2KHR *pCopyImageInfo) {
558 auto cb_node = GetCBState(commandBuffer);
559 auto src_image_state = GetImageState(pCopyImageInfo->srcImage);
560 auto dst_image_state = GetImageState(pCopyImageInfo->dstImage);
561
562 // Update bindings between images and cmd buffer
563 AddCommandBufferBindingImage(cb_node, src_image_state);
564 AddCommandBufferBindingImage(cb_node, dst_image_state);
565}
566
locke-lunargd556cc32019-09-17 01:21:23 -0600567void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
568 VkImageLayout srcImageLayout, VkImage dstImage,
569 VkImageLayout dstImageLayout, uint32_t regionCount,
570 const VkImageResolve *pRegions) {
571 auto cb_node = GetCBState(commandBuffer);
572 auto src_image_state = GetImageState(srcImage);
573 auto dst_image_state = GetImageState(dstImage);
574
575 // Update bindings between images and cmd buffer
576 AddCommandBufferBindingImage(cb_node, src_image_state);
577 AddCommandBufferBindingImage(cb_node, dst_image_state);
578}
579
Jeff Leger178b1e52020-10-05 12:22:23 -0400580void ValidationStateTracker::PreCallRecordCmdResolveImage2KHR(VkCommandBuffer commandBuffer,
581 const VkResolveImageInfo2KHR *pResolveImageInfo) {
582 auto cb_node = GetCBState(commandBuffer);
583 auto src_image_state = GetImageState(pResolveImageInfo->srcImage);
584 auto dst_image_state = GetImageState(pResolveImageInfo->dstImage);
585
586 // Update bindings between images and cmd buffer
587 AddCommandBufferBindingImage(cb_node, src_image_state);
588 AddCommandBufferBindingImage(cb_node, dst_image_state);
589}
590
locke-lunargd556cc32019-09-17 01:21:23 -0600591void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
592 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
593 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
594 auto cb_node = GetCBState(commandBuffer);
595 auto src_image_state = GetImageState(srcImage);
596 auto dst_image_state = GetImageState(dstImage);
597
598 // Update bindings between images and cmd buffer
599 AddCommandBufferBindingImage(cb_node, src_image_state);
600 AddCommandBufferBindingImage(cb_node, dst_image_state);
601}
602
Jeff Leger178b1e52020-10-05 12:22:23 -0400603void ValidationStateTracker::PreCallRecordCmdBlitImage2KHR(VkCommandBuffer commandBuffer,
604 const VkBlitImageInfo2KHR *pBlitImageInfo) {
605 auto cb_node = GetCBState(commandBuffer);
606 auto src_image_state = GetImageState(pBlitImageInfo->srcImage);
607 auto dst_image_state = GetImageState(pBlitImageInfo->dstImage);
608
609 // Update bindings between images and cmd buffer
610 AddCommandBufferBindingImage(cb_node, src_image_state);
611 AddCommandBufferBindingImage(cb_node, dst_image_state);
612}
613
locke-lunargd556cc32019-09-17 01:21:23 -0600614void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
615 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
616 VkResult result) {
617 if (result != VK_SUCCESS) return;
618 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500619 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600620
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700621 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
622 RecordCreateBufferANDROID(pCreateInfo, buffer_state.get());
623 }
locke-lunargd556cc32019-09-17 01:21:23 -0600624 // Get a set of requirements in the case the app does not
sfricke-samsungad90e722020-07-08 20:54:24 -0700625 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
locke-lunargd556cc32019-09-17 01:21:23 -0600626
sfricke-samsungedce77a2020-07-03 22:35:13 -0700627 buffer_state->unprotected = ((pCreateInfo->flags & VK_BUFFER_CREATE_PROTECTED_BIT) == 0);
628
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700629 bufferMap.emplace(*pBuffer, std::move(buffer_state));
locke-lunargd556cc32019-09-17 01:21:23 -0600630}
631
632void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
633 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
634 VkResult result) {
635 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500636 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
locke-lunarg25b6c352020-08-06 17:44:18 -0600637 auto buffer_view_state = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
638
639 VkFormatProperties format_properties;
640 DispatchGetPhysicalDeviceFormatProperties(physical_device, pCreateInfo->format, &format_properties);
641 buffer_view_state->format_features = format_properties.bufferFeatures;
642
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700643 bufferViewMap.emplace(*pView, std::move(buffer_view_state));
locke-lunargd556cc32019-09-17 01:21:23 -0600644}
645
646void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
647 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
648 VkResult result) {
649 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500650 auto image_state = GetImageShared(pCreateInfo->image);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700651 auto image_view_state = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
652
653 // Add feature support according to Image View Format Features (vkspec.html#resources-image-view-format-features)
654 const VkImageTiling image_tiling = image_state->createInfo.tiling;
655 const VkFormat image_view_format = pCreateInfo->format;
656 if (image_state->has_ahb_format == true) {
657 // The ImageView uses same Image's format feature since they share same AHB
658 image_view_state->format_features = image_state->format_features;
659 } else if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
660 // Parameter validation should catch if this is used without VK_EXT_image_drm_format_modifier
661 assert(device_extensions.vk_ext_image_drm_format_modifier);
662 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
663 nullptr};
664 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state->image, &drm_format_properties);
665
666 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
667 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
668 nullptr};
669 format_properties_2.pNext = (void *)&drm_properties_list;
nyorain38a9d232021-03-06 13:06:12 +0100670
671 // First call is to get the number of modifiers compatible with the queried format
672 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
673
674 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
675 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
676 drm_properties_list.pDrmFormatModifierProperties = drm_properties.data();
677
678 // Second call, now with an allocated array in pDrmFormatModifierProperties, is to get the modifiers
679 // compatible with the queried format
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700680 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
681
682 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300683 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier == drm_format_properties.drmFormatModifier) {
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700684 image_view_state->format_features |=
685 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300686 break;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700687 }
688 }
689 } else {
690 VkFormatProperties format_properties;
691 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_view_format, &format_properties);
692 image_view_state->format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
693 : format_properties.optimalTilingFeatures;
694 }
695
sfricke-samsungd23f6e62021-01-17 09:05:47 -0800696 auto usage_create_info = LvlFindInChain<VkImageViewUsageCreateInfo>(pCreateInfo->pNext);
697 image_view_state->inherited_usage = (usage_create_info) ? usage_create_info->usage : image_state->createInfo.usage;
698
locke-lunarg9939d4b2020-10-26 20:11:08 -0600699 // filter_cubic_props is used in CmdDraw validation. But it takes a lot of performance if it does in CmdDraw.
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700700 image_view_state->filter_cubic_props = LvlInitStruct<VkFilterCubicImageViewImageFormatPropertiesEXT>();
locke-lunarg9939d4b2020-10-26 20:11:08 -0600701 if (IsExtEnabled(device_extensions.vk_ext_filter_cubic)) {
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700702 auto imageview_format_info = LvlInitStruct<VkPhysicalDeviceImageViewImageFormatInfoEXT>();
locke-lunarg9939d4b2020-10-26 20:11:08 -0600703 imageview_format_info.imageViewType = pCreateInfo->viewType;
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700704 auto image_format_info = LvlInitStruct<VkPhysicalDeviceImageFormatInfo2>(&imageview_format_info);
locke-lunarg9939d4b2020-10-26 20:11:08 -0600705 image_format_info.type = image_state->createInfo.imageType;
706 image_format_info.format = image_state->createInfo.format;
707 image_format_info.tiling = image_state->createInfo.tiling;
sfricke-samsungd23f6e62021-01-17 09:05:47 -0800708 image_format_info.usage = image_view_state->inherited_usage;
locke-lunarg9939d4b2020-10-26 20:11:08 -0600709 image_format_info.flags = image_state->createInfo.flags;
710
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700711 auto image_format_properties = LvlInitStruct<VkImageFormatProperties2>(&image_view_state->filter_cubic_props);
locke-lunarg9939d4b2020-10-26 20:11:08 -0600712
713 DispatchGetPhysicalDeviceImageFormatProperties2(physical_device, &image_format_info, &image_format_properties);
714 }
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700715 imageViewMap.emplace(*pView, std::move(image_view_state));
locke-lunargd556cc32019-09-17 01:21:23 -0600716}
717
718void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
719 uint32_t regionCount, const VkBufferCopy *pRegions) {
720 auto cb_node = GetCBState(commandBuffer);
721 auto src_buffer_state = GetBufferState(srcBuffer);
722 auto dst_buffer_state = GetBufferState(dstBuffer);
723
724 // Update bindings between buffers and cmd buffer
725 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
726 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
727}
728
Jeff Leger178b1e52020-10-05 12:22:23 -0400729void ValidationStateTracker::PreCallRecordCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer,
730 const VkCopyBufferInfo2KHR *pCopyBufferInfos) {
731 auto cb_node = GetCBState(commandBuffer);
732 auto src_buffer_state = GetBufferState(pCopyBufferInfos->srcBuffer);
733 auto dst_buffer_state = GetBufferState(pCopyBufferInfos->dstBuffer);
734
735 // Update bindings between buffers and cmd buffer
736 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
737 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
738}
739
locke-lunargd556cc32019-09-17 01:21:23 -0600740void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
741 const VkAllocationCallbacks *pAllocator) {
742 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
743 if (!image_view_state) return;
744 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
745
746 // Any bound cmd buffers are now invalid
747 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500748 image_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600749 imageViewMap.erase(imageView);
750}
751
752void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
753 if (!buffer) return;
754 auto buffer_state = GetBufferState(buffer);
755 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
756
757 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
locke-lunargd556cc32019-09-17 01:21:23 -0600758 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500759 buffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600760 bufferMap.erase(buffer_state->buffer);
761}
762
763void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
764 const VkAllocationCallbacks *pAllocator) {
765 if (!bufferView) return;
766 auto buffer_view_state = GetBufferViewState(bufferView);
767 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
768
769 // Any bound cmd buffers are now invalid
770 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500771 buffer_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600772 bufferViewMap.erase(bufferView);
773}
774
775void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
776 VkDeviceSize size, uint32_t data) {
777 auto cb_node = GetCBState(commandBuffer);
778 auto buffer_state = GetBufferState(dstBuffer);
779 // Update bindings between buffer and cmd buffer
780 AddCommandBufferBindingBuffer(cb_node, buffer_state);
781}
782
783void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
784 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
785 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
786 auto cb_node = GetCBState(commandBuffer);
787 auto src_image_state = GetImageState(srcImage);
788 auto dst_buffer_state = GetBufferState(dstBuffer);
789
790 // Update bindings between buffer/image and cmd buffer
791 AddCommandBufferBindingImage(cb_node, src_image_state);
792 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
793}
794
Jeff Leger178b1e52020-10-05 12:22:23 -0400795void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
796 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) {
797 auto cb_node = GetCBState(commandBuffer);
798 auto src_image_state = GetImageState(pCopyImageToBufferInfo->srcImage);
799 auto dst_buffer_state = GetBufferState(pCopyImageToBufferInfo->dstBuffer);
800
801 // Update bindings between buffer/image and cmd buffer
802 AddCommandBufferBindingImage(cb_node, src_image_state);
803 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
804}
805
locke-lunargd556cc32019-09-17 01:21:23 -0600806void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
807 VkImageLayout dstImageLayout, uint32_t regionCount,
808 const VkBufferImageCopy *pRegions) {
809 auto cb_node = GetCBState(commandBuffer);
810 auto src_buffer_state = GetBufferState(srcBuffer);
811 auto dst_image_state = GetImageState(dstImage);
812
813 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
814 AddCommandBufferBindingImage(cb_node, dst_image_state);
815}
816
Jeff Leger178b1e52020-10-05 12:22:23 -0400817void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
818 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) {
819 auto cb_node = GetCBState(commandBuffer);
820 auto src_buffer_state = GetBufferState(pCopyBufferToImageInfo->srcBuffer);
821 auto dst_image_state = GetImageState(pCopyBufferToImageInfo->dstImage);
822
823 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
824 AddCommandBufferBindingImage(cb_node, dst_image_state);
825}
826
locke-lunargd556cc32019-09-17 01:21:23 -0600827// Get the image viewstate for a given framebuffer attachment
locke-lunargfc78e932020-11-19 17:06:24 -0700828IMAGE_VIEW_STATE *ValidationStateTracker::GetActiveAttachmentImageViewState(const CMD_BUFFER_STATE *cb, uint32_t index,
829 const CMD_BUFFER_STATE *primary_cb) {
830 if (primary_cb) {
831 assert(primary_cb->active_attachments && index != VK_ATTACHMENT_UNUSED && (index < primary_cb->active_attachments->size()));
832 return primary_cb->active_attachments->at(index);
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300833 }
locke-lunargfc78e932020-11-19 17:06:24 -0700834 assert(cb->active_attachments && index != VK_ATTACHMENT_UNUSED && (index < cb->active_attachments->size()));
835 return cb->active_attachments->at(index);
locke-lunargd556cc32019-09-17 01:21:23 -0600836}
837
838// Get the image viewstate for a given framebuffer attachment
locke-lunargfc78e932020-11-19 17:06:24 -0700839const IMAGE_VIEW_STATE *ValidationStateTracker::GetActiveAttachmentImageViewState(const CMD_BUFFER_STATE *cb, uint32_t index,
840 const CMD_BUFFER_STATE *primary_cb) const {
841 if (primary_cb) {
842 assert(primary_cb->active_attachments && index != VK_ATTACHMENT_UNUSED && (index < primary_cb->active_attachments->size()));
843 return primary_cb->active_attachments->at(index);
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300844 }
locke-lunargfc78e932020-11-19 17:06:24 -0700845 assert(cb->active_attachments && index != VK_ATTACHMENT_UNUSED && (index < cb->active_attachments->size()));
846 return cb->active_attachments->at(index);
locke-lunargd556cc32019-09-17 01:21:23 -0600847}
848
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700849void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state, layer_data::unordered_set<IMAGE_STATE *> *bound_images) {
John Zulaufd13b38e2021-03-05 08:17:38 -0700850 assert(bound_images);
851 for (auto *bound_image : *bound_images) {
852 if (bound_image && (bound_image != image_state) && bound_image->IsCompatibleAliasing(image_state)) {
853 auto inserted = bound_image->aliasing_images.emplace(image_state);
854 if (inserted.second) {
855 image_state->aliasing_images.emplace(bound_image);
locke-lunargd556cc32019-09-17 01:21:23 -0600856 }
857 }
858 }
859}
860
861void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
John Zulaufd13b38e2021-03-05 08:17:38 -0700862 for (auto *alias_state : image_state->aliasing_images) {
863 assert(alias_state);
864 alias_state->aliasing_images.erase(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600865 }
866 image_state->aliasing_images.clear();
867}
868
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700869void ValidationStateTracker::RemoveAliasingImages(const layer_data::unordered_set<IMAGE_STATE *> &bound_images) {
locke-lunargd556cc32019-09-17 01:21:23 -0600870 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
871 // reference. It doesn't need two ways clear.
John Zulaufd13b38e2021-03-05 08:17:38 -0700872 for (auto *bound_image : bound_images) {
873 if (bound_image) {
874 bound_image->aliasing_images.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600875 }
876 }
877}
878
locke-lunargd556cc32019-09-17 01:21:23 -0600879const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
880 auto it = queueMap.find(queue);
881 if (it == queueMap.cend()) {
882 return nullptr;
883 }
884 return &it->second;
885}
886
887QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
888 auto it = queueMap.find(queue);
889 if (it == queueMap.end()) {
890 return nullptr;
891 }
892 return &it->second;
893}
894
895const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
896 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
897 auto it = phys_dev_map->find(phys);
898 if (it == phys_dev_map->end()) {
899 return nullptr;
900 }
901 return &it->second;
902}
903
904PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
905 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
906 auto it = phys_dev_map->find(phys);
907 if (it == phys_dev_map->end()) {
908 return nullptr;
909 }
910 return &it->second;
911}
912
913PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
914const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
915
916// Return ptr to memory binding for given handle of specified type
917template <typename State, typename Result>
918static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
919 switch (typed_handle.type) {
920 case kVulkanObjectTypeImage:
921 return state->GetImageState(typed_handle.Cast<VkImage>());
922 case kVulkanObjectTypeBuffer:
923 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
924 case kVulkanObjectTypeAccelerationStructureNV:
sourav parmarcd5fb182020-07-17 12:58:44 -0700925 return state->GetAccelerationStructureStateNV(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -0600926 default:
927 break;
928 }
929 return nullptr;
930}
931
932const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
933 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
934}
935
936BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
937 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
938}
939
940void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
941 assert(object != NULL);
942
John Zulauf79952712020-04-07 11:25:54 -0600943 auto fake_address = fake_memory.Alloc(pAllocateInfo->allocationSize);
944 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo, fake_address);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500945 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600946
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700947 auto dedicated = LvlFindInChain<VkMemoryDedicatedAllocateInfo>(pAllocateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600948 if (dedicated) {
949 mem_info->is_dedicated = true;
950 mem_info->dedicated_buffer = dedicated->buffer;
951 mem_info->dedicated_image = dedicated->image;
952 }
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700953 auto export_info = LvlFindInChain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600954 if (export_info) {
955 mem_info->is_export = true;
956 mem_info->export_handle_type_flags = export_info->handleTypes;
957 }
sfricke-samsung23068272020-06-21 14:49:51 -0700958
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700959 auto alloc_flags = LvlFindInChain<VkMemoryAllocateFlagsInfo>(pAllocateInfo->pNext);
Mark Lobodzinski7bda6ed2020-07-24 09:57:38 -0600960 if (alloc_flags) {
961 auto dev_mask = alloc_flags->deviceMask;
962 if ((dev_mask != 0) && (dev_mask & (dev_mask - 1))) {
963 mem_info->multi_instance = true;
964 }
965 }
966 auto heap_index = phys_dev_mem_props.memoryTypes[mem_info->alloc_info.memoryTypeIndex].heapIndex;
Tony-LunarGb399fb02020-08-06 14:20:59 -0600967 mem_info->multi_instance |= (((phys_dev_mem_props.memoryHeaps[heap_index].flags & VK_MEMORY_HEAP_MULTI_INSTANCE_BIT) != 0) &&
968 physical_device_count > 1);
Mark Lobodzinski7bda6ed2020-07-24 09:57:38 -0600969
sfricke-samsung23068272020-06-21 14:49:51 -0700970 // Assumes validation already for only a single import operation in the pNext
971#ifdef VK_USE_PLATFORM_WIN32_KHR
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700972 auto win32_import = LvlFindInChain<VkImportMemoryWin32HandleInfoKHR>(pAllocateInfo->pNext);
sfricke-samsung23068272020-06-21 14:49:51 -0700973 if (win32_import) {
974 mem_info->is_import = true;
975 mem_info->import_handle_type_flags = win32_import->handleType;
976 }
977#endif
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700978 auto fd_import = LvlFindInChain<VkImportMemoryFdInfoKHR>(pAllocateInfo->pNext);
sfricke-samsung23068272020-06-21 14:49:51 -0700979 if (fd_import) {
980 mem_info->is_import = true;
981 mem_info->import_handle_type_flags = fd_import->handleType;
982 }
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700983 auto host_pointer_import = LvlFindInChain<VkImportMemoryHostPointerInfoEXT>(pAllocateInfo->pNext);
sfricke-samsung23068272020-06-21 14:49:51 -0700984 if (host_pointer_import) {
985 mem_info->is_import = true;
986 mem_info->import_handle_type_flags = host_pointer_import->handleType;
987 }
988#ifdef VK_USE_PLATFORM_ANDROID_KHR
989 // AHB Import doesn't have handle in the pNext struct
990 // It should be assumed that all imported AHB can only have the same, single handleType
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700991 auto ahb_import = LvlFindInChain<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
sfricke-samsung23068272020-06-21 14:49:51 -0700992 if ((ahb_import) && (ahb_import->buffer != nullptr)) {
993 mem_info->is_import_ahb = true;
994 mem_info->is_import = true;
995 mem_info->import_handle_type_flags = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
996 }
sfricke-samsung98acb882020-11-03 19:16:10 -0800997#endif // VK_USE_PLATFORM_ANDROID_KHR
sfricke-samsungedce77a2020-07-03 22:35:13 -0700998
999 const VkMemoryType memory_type = phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex];
1000 mem_info->unprotected = ((memory_type.propertyFlags & VK_MEMORY_PROPERTY_PROTECTED_BIT) == 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001001}
1002
1003// Create binding link between given sampler and command buffer node
1004void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001005 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -06001006 return;
1007 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001008 AddCommandBufferBinding(sampler_state->cb_bindings,
1009 VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -06001010}
1011
1012// Create binding link between given image node and command buffer node
1013void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001014 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -06001015 return;
1016 }
1017 // Skip validation if this image was created through WSI
1018 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
1019 // First update cb binding for image
Jeff Bolzadbfa852019-10-04 13:53:30 -05001020 if (AddCommandBufferBinding(image_state->cb_bindings,
1021 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -06001022 // Now update CB binding in MemObj mini CB list
John Zulauf79f06582021-02-27 18:38:39 -07001023 for (auto *mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07001024 // Now update CBInfo's Mem reference list
1025 AddCommandBufferBinding(mem_binding->cb_bindings,
1026 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -06001027 }
1028 }
1029 }
1030}
1031
1032// Create binding link between given image view node and its image with command buffer node
1033void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001034 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -06001035 return;
1036 }
1037 // First add bindings for imageView
Jeff Bolzadbfa852019-10-04 13:53:30 -05001038 if (AddCommandBufferBinding(view_state->cb_bindings,
1039 VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -06001040 // Only need to continue if this is a new item
Jeff Bolzadbfa852019-10-04 13:53:30 -05001041 auto image_state = view_state->image_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -06001042 // Add bindings for image within imageView
1043 if (image_state) {
1044 AddCommandBufferBindingImage(cb_node, image_state);
1045 }
1046 }
1047}
1048
1049// Create binding link between given buffer node and command buffer node
1050void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001051 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -06001052 return;
1053 }
1054 // First update cb binding for buffer
Jeff Bolzadbfa852019-10-04 13:53:30 -05001055 if (AddCommandBufferBinding(buffer_state->cb_bindings,
1056 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -06001057 // Now update CB binding in MemObj mini CB list
John Zulauf79f06582021-02-27 18:38:39 -07001058 for (auto *mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07001059 // Now update CBInfo's Mem reference list
1060 AddCommandBufferBinding(mem_binding->cb_bindings,
1061 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -06001062 }
1063 }
1064}
1065
1066// Create binding link between given buffer view node and its buffer with command buffer node
1067void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001068 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -06001069 return;
1070 }
1071 // First add bindings for bufferView
Jeff Bolzadbfa852019-10-04 13:53:30 -05001072 if (AddCommandBufferBinding(view_state->cb_bindings,
1073 VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
1074 auto buffer_state = view_state->buffer_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -06001075 // Add bindings for buffer within bufferView
1076 if (buffer_state) {
1077 AddCommandBufferBindingBuffer(cb_node, buffer_state);
1078 }
1079 }
1080}
1081
1082// Create binding link between given acceleration structure and command buffer node
1083void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
1084 ACCELERATION_STRUCTURE_STATE *as_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001085 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -06001086 return;
1087 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001088 if (AddCommandBufferBinding(
1089 as_state->cb_bindings,
1090 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -06001091 // Now update CB binding in MemObj mini CB list
John Zulauf79f06582021-02-27 18:38:39 -07001092 for (auto *mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07001093 // Now update CBInfo's Mem reference list
1094 AddCommandBufferBinding(mem_binding->cb_bindings,
1095 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -06001096 }
1097 }
1098}
1099
sourav parmarcd5fb182020-07-17 12:58:44 -07001100// Create binding link between given acceleration structure and command buffer node
1101void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
1102 ACCELERATION_STRUCTURE_STATE_KHR *as_state) {
1103 if (disabled[command_buffer_state]) {
1104 return;
1105 }
1106 if (AddCommandBufferBinding(
1107 as_state->cb_bindings,
1108 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureKHR, as_state), cb_node)) {
1109 // Now update CB binding in MemObj mini CB list
John Zulauf79f06582021-02-27 18:38:39 -07001110 for (auto *mem_binding : as_state->GetBoundMemory()) {
sourav parmarcd5fb182020-07-17 12:58:44 -07001111 // Now update CBInfo's Mem reference list
1112 AddCommandBufferBinding(mem_binding->cb_bindings,
1113 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
1114 }
1115 }
1116}
1117
locke-lunargd556cc32019-09-17 01:21:23 -06001118// Clear a single object binding from given memory object
locke-lunarg5f59e782019-12-19 10:32:23 -07001119void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
locke-lunargd556cc32019-09-17 01:21:23 -06001120 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
1121 if (mem_info) {
1122 mem_info->obj_bindings.erase(typed_handle);
1123 }
1124}
1125
1126// ClearMemoryObjectBindings clears the binding of objects to memory
1127// For the given object it pulls the memory bindings and makes sure that the bindings
1128// no longer refer to the object being cleared. This occurs when objects are destroyed.
1129void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
1130 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
1131 if (mem_binding) {
1132 if (!mem_binding->sparse) {
locke-lunarg5f59e782019-12-19 10:32:23 -07001133 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06001134 } else { // Sparse, clear all bindings
1135 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
locke-lunarg5f59e782019-12-19 10:32:23 -07001136 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06001137 }
1138 }
1139 }
1140}
1141
1142// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
1143// Corresponding valid usage checks are in ValidateSetMemBinding().
1144void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
1145 const VulkanTypedHandle &typed_handle) {
1146 assert(mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -06001147
1148 if (mem != VK_NULL_HANDLE) {
locke-lunargcf04d582019-11-26 00:31:50 -07001149 mem_binding->binding.mem_state = GetShared<DEVICE_MEMORY_STATE>(mem);
1150 if (mem_binding->binding.mem_state) {
locke-lunarg5f59e782019-12-19 10:32:23 -07001151 mem_binding->binding.offset = memory_offset;
1152 mem_binding->binding.size = mem_binding->requirements.size;
locke-lunargcf04d582019-11-26 00:31:50 -07001153 mem_binding->binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -06001154 // For image objects, make sure default memory state is correctly set
1155 // TODO : What's the best/correct way to handle this?
1156 if (kVulkanObjectTypeImage == typed_handle.type) {
1157 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
1158 if (image_state) {
1159 VkImageCreateInfo ici = image_state->createInfo;
1160 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
1161 // TODO:: More memory state transition stuff.
1162 }
1163 }
1164 }
locke-lunargcf04d582019-11-26 00:31:50 -07001165 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
locke-lunargd556cc32019-09-17 01:21:23 -06001166 }
1167 }
1168}
1169
1170// For NULL mem case, clear any previous binding Else...
1171// Make sure given object is in its object map
1172// IF a previous binding existed, update binding
1173// Add reference from objectInfo to memoryInfo
1174// Add reference off of object's binding info
1175// Return VK_TRUE if addition is successful, VK_FALSE otherwise
locke-lunargcf04d582019-11-26 00:31:50 -07001176bool ValidationStateTracker::SetSparseMemBinding(const VkDeviceMemory mem, const VkDeviceSize mem_offset,
1177 const VkDeviceSize mem_size, const VulkanTypedHandle &typed_handle) {
locke-lunargd556cc32019-09-17 01:21:23 -06001178 bool skip = VK_FALSE;
1179 // Handle NULL case separately, just clear previous binding & decrement reference
locke-lunargcf04d582019-11-26 00:31:50 -07001180 if (mem == VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -06001181 // TODO : This should cause the range of the resource to be unbound according to spec
1182 } else {
1183 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
1184 assert(mem_binding);
1185 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
1186 assert(mem_binding->sparse);
locke-lunargcf04d582019-11-26 00:31:50 -07001187 MEM_BINDING binding = {GetShared<DEVICE_MEMORY_STATE>(mem), mem_offset, mem_size};
1188 if (binding.mem_state) {
1189 binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -06001190 // Need to set mem binding for this object
1191 mem_binding->sparse_bindings.insert(binding);
1192 mem_binding->UpdateBoundMemorySet();
1193 }
1194 }
1195 }
1196 return skip;
1197}
1198
locke-lunarg540b2252020-08-03 13:23:36 -06001199void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, const VkPipelineBindPoint bind_point,
1200 const char *function) {
locke-lunargb8d7a7a2020-10-25 16:01:52 -06001201 const auto lv_bind_point = ConvertToLvlBindPoint(bind_point);
1202 auto &state = cb_state->lastBound[lv_bind_point];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001203 PIPELINE_STATE *pipe = state.pipeline_state;
locke-lunargd556cc32019-09-17 01:21:23 -06001204 if (VK_NULL_HANDLE != state.pipeline_layout) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001205 for (const auto &set_binding_pair : pipe->active_slots) {
1206 uint32_t set_index = set_binding_pair.first;
locke-lunargd556cc32019-09-17 01:21:23 -06001207 // Pull the set node
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001208 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[set_index].bound_descriptor_set;
locke-lunargd556cc32019-09-17 01:21:23 -06001209
Tony-LunarG77822802020-05-28 16:35:46 -06001210 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
locke-lunargd556cc32019-09-17 01:21:23 -06001211
Tony-LunarG77822802020-05-28 16:35:46 -06001212 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
1213 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
1214 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001215 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pipe);
Tony-LunarG77822802020-05-28 16:35:46 -06001216
1217 if (reduced_map.IsManyDescriptors()) {
1218 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001219 descriptor_set->UpdateValidationCache(*cb_state, *pipe, binding_req_map);
Tony-LunarG77822802020-05-28 16:35:46 -06001220 }
1221
1222 // We can skip updating the state if "nothing" has changed since the last validation.
1223 // See CoreChecks::ValidateCmdBufDrawState for more details.
1224 bool descriptor_set_changed =
1225 !reduced_map.IsManyDescriptors() ||
1226 // Update if descriptor set (or contents) has changed
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001227 state.per_set[set_index].validated_set != descriptor_set ||
1228 state.per_set[set_index].validated_set_change_count != descriptor_set->GetChangeCount() ||
Tony-LunarG77822802020-05-28 16:35:46 -06001229 (!disabled[image_layout_validation] &&
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001230 state.per_set[set_index].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
Tony-LunarG77822802020-05-28 16:35:46 -06001231 bool need_update = descriptor_set_changed ||
1232 // Update if previous bindingReqMap doesn't include new bindingReqMap
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001233 !std::includes(state.per_set[set_index].validated_set_binding_req_map.begin(),
1234 state.per_set[set_index].validated_set_binding_req_map.end(), binding_req_map.begin(),
Tony-LunarG77822802020-05-28 16:35:46 -06001235 binding_req_map.end());
1236
1237 if (need_update) {
1238 // Bind this set and its active descriptor resources to the command buffer
1239 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
1240 // Only record the bindings that haven't already been recorded
1241 BindingReqMap delta_reqs;
1242 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001243 state.per_set[set_index].validated_set_binding_req_map.begin(),
1244 state.per_set[set_index].validated_set_binding_req_map.end(),
Jeremy Gebbencbf22862021-03-03 12:01:22 -07001245 layer_data::insert_iterator<BindingReqMap>(delta_reqs, delta_reqs.begin()));
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001246 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pipe, delta_reqs, function);
Tony-LunarG77822802020-05-28 16:35:46 -06001247 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001248 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pipe, binding_req_map, function);
locke-lunargd556cc32019-09-17 01:21:23 -06001249 }
1250
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001251 state.per_set[set_index].validated_set = descriptor_set;
1252 state.per_set[set_index].validated_set_change_count = descriptor_set->GetChangeCount();
1253 state.per_set[set_index].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
Tony-LunarG77822802020-05-28 16:35:46 -06001254 if (reduced_map.IsManyDescriptors()) {
1255 // Check whether old == new before assigning, the equality check is much cheaper than
1256 // freeing and reallocating the map.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001257 if (state.per_set[set_index].validated_set_binding_req_map != set_binding_pair.second) {
1258 state.per_set[set_index].validated_set_binding_req_map = set_binding_pair.second;
Jeff Bolz56308942019-10-06 22:05:23 -05001259 }
Tony-LunarG77822802020-05-28 16:35:46 -06001260 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001261 state.per_set[set_index].validated_set_binding_req_map = BindingReqMap();
locke-lunargd556cc32019-09-17 01:21:23 -06001262 }
1263 }
1264 }
1265 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001266 if (!pipe->vertex_binding_descriptions_.empty()) {
locke-lunargd556cc32019-09-17 01:21:23 -06001267 cb_state->vertex_buffer_used = true;
1268 }
1269}
1270
1271// Remove set from setMap and delete the set
1272void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001273 descriptor_set->destroyed = true;
Jeff Bolzadbfa852019-10-04 13:53:30 -05001274 const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
Jeff Bolz41a1ced2019-10-11 11:40:49 -05001275 // Any bound cmd buffers are now invalid
Jeff Bolzadbfa852019-10-04 13:53:30 -05001276 InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
Jeff Bolz41a1ced2019-10-11 11:40:49 -05001277
locke-lunargd556cc32019-09-17 01:21:23 -06001278 setMap.erase(descriptor_set->GetSet());
1279}
1280
1281// Free all DS Pools including their Sets & related sub-structs
1282// NOTE : Calls to this function should be wrapped in mutex
1283void ValidationStateTracker::DeleteDescriptorSetPools() {
1284 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
1285 // Remove this pools' sets from setMap and delete them
John Zulauf79f06582021-02-27 18:38:39 -07001286 for (auto *ds : ii->second->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06001287 FreeDescriptorSet(ds);
1288 }
1289 ii->second->sets.clear();
1290 ii = descriptorPoolMap.erase(ii);
1291 }
1292}
1293
1294// For given object struct return a ptr of BASE_NODE type for its wrapping struct
1295BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001296 if (object_struct.node) {
1297#ifdef _DEBUG
1298 // assert that lookup would find the same object
1299 VulkanTypedHandle other = object_struct;
1300 other.node = nullptr;
1301 assert(object_struct.node == GetStateStructPtrFromObject(other));
1302#endif
1303 return object_struct.node;
1304 }
locke-lunargd556cc32019-09-17 01:21:23 -06001305 BASE_NODE *base_ptr = nullptr;
1306 switch (object_struct.type) {
1307 case kVulkanObjectTypeDescriptorSet: {
1308 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
1309 break;
1310 }
1311 case kVulkanObjectTypeSampler: {
1312 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
1313 break;
1314 }
1315 case kVulkanObjectTypeQueryPool: {
1316 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
1317 break;
1318 }
1319 case kVulkanObjectTypePipeline: {
1320 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
1321 break;
1322 }
1323 case kVulkanObjectTypeBuffer: {
1324 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
1325 break;
1326 }
1327 case kVulkanObjectTypeBufferView: {
1328 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
1329 break;
1330 }
1331 case kVulkanObjectTypeImage: {
1332 base_ptr = GetImageState(object_struct.Cast<VkImage>());
1333 break;
1334 }
1335 case kVulkanObjectTypeImageView: {
1336 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
1337 break;
1338 }
1339 case kVulkanObjectTypeEvent: {
1340 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
1341 break;
1342 }
1343 case kVulkanObjectTypeDescriptorPool: {
1344 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
1345 break;
1346 }
1347 case kVulkanObjectTypeCommandPool: {
1348 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
1349 break;
1350 }
1351 case kVulkanObjectTypeFramebuffer: {
1352 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
1353 break;
1354 }
1355 case kVulkanObjectTypeRenderPass: {
1356 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
1357 break;
1358 }
1359 case kVulkanObjectTypeDeviceMemory: {
1360 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
1361 break;
1362 }
1363 case kVulkanObjectTypeAccelerationStructureNV: {
sourav parmarcd5fb182020-07-17 12:58:44 -07001364 base_ptr = GetAccelerationStructureStateNV(object_struct.Cast<VkAccelerationStructureNV>());
1365 break;
1366 }
1367 case kVulkanObjectTypeAccelerationStructureKHR: {
1368 base_ptr = GetAccelerationStructureStateKHR(object_struct.Cast<VkAccelerationStructureKHR>());
locke-lunargd556cc32019-09-17 01:21:23 -06001369 break;
1370 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001371 case kVulkanObjectTypeUnknown:
1372 // This can happen if an element of the object_bindings vector has been
1373 // zeroed out, after an object is destroyed.
1374 break;
locke-lunargd556cc32019-09-17 01:21:23 -06001375 default:
1376 // TODO : Any other objects to be handled here?
1377 assert(0);
1378 break;
1379 }
1380 return base_ptr;
1381}
1382
sfricke-samsungbf1a2ed2020-06-14 23:31:00 -07001383// Gets union of all features defined by Potential Format Features
1384// except, does not handle the external format case for AHB as that only can be used for sampled images
sfricke-samsungbe3584f2020-04-22 14:58:06 -07001385VkFormatFeatureFlags ValidationStateTracker::GetPotentialFormatFeatures(VkFormat format) const {
1386 VkFormatFeatureFlags format_features = 0;
1387
1388 if (format != VK_FORMAT_UNDEFINED) {
1389 VkFormatProperties format_properties;
1390 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
1391 format_features |= format_properties.linearTilingFeatures;
1392 format_features |= format_properties.optimalTilingFeatures;
1393 if (device_extensions.vk_ext_image_drm_format_modifier) {
1394 // VK_KHR_get_physical_device_properties2 is required in this case
1395 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2};
1396 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
1397 nullptr};
1398 format_properties_2.pNext = (void *)&drm_properties_list;
Marc Alcala Prieto773871c2021-02-04 19:24:43 +01001399
1400 // First call is to get the number of modifiers compatible with the queried format
sfricke-samsungbe3584f2020-04-22 14:58:06 -07001401 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
Marc Alcala Prieto773871c2021-02-04 19:24:43 +01001402
1403 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
1404 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
1405 drm_properties_list.pDrmFormatModifierProperties = drm_properties.data();
1406
1407 // Second call, now with an allocated array in pDrmFormatModifierProperties, is to get the modifiers
1408 // compatible with the queried format
1409 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
1410
sfricke-samsungbe3584f2020-04-22 14:58:06 -07001411 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
1412 format_features |= drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
1413 }
1414 }
1415 }
1416
1417 return format_features;
1418}
1419
locke-lunargd556cc32019-09-17 01:21:23 -06001420// Tie the VulkanTypedHandle to the cmd buffer which includes:
1421// Add object_binding to cmd buffer
1422// Add cb_binding to object
Jeremy Gebbencbf22862021-03-03 12:01:22 -07001423bool ValidationStateTracker::AddCommandBufferBinding(BASE_NODE::BindingsType &cb_bindings,
locke-lunargd556cc32019-09-17 01:21:23 -06001424 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001425 if (disabled[command_buffer_state]) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001426 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001427 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001428 // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
1429 // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
Jeremy Gebbenfc6f8152021-03-18 16:58:55 -06001430 auto inserted = cb_bindings.emplace(cb_node, -1);
Jeff Bolzadbfa852019-10-04 13:53:30 -05001431 if (inserted.second) {
1432 cb_node->object_bindings.push_back(obj);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001433 inserted.first->second = static_cast<int>(cb_node->object_bindings.size()) - 1;
Jeff Bolzadbfa852019-10-04 13:53:30 -05001434 return true;
1435 }
1436 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001437}
1438
1439// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
1440void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
1441 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
1442 if (base_obj) base_obj->cb_bindings.erase(cb_node);
1443}
1444
1445// Reset the command buffer state
1446// Maintain the createInfo and set state to CB_NEW, but clear all other state
1447void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001448 CMD_BUFFER_STATE *cb_state = GetCBState(cb);
1449 if (cb_state) {
1450 cb_state->in_use.store(0);
locke-lunargd556cc32019-09-17 01:21:23 -06001451 // Reset CB state (note that createInfo is not cleared)
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001452 cb_state->commandBuffer = cb;
1453 memset(&cb_state->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
1454 memset(&cb_state->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
1455 cb_state->hasDrawCmd = false;
1456 cb_state->hasTraceRaysCmd = false;
1457 cb_state->hasBuildAccelerationStructureCmd = false;
1458 cb_state->hasDispatchCmd = false;
1459 cb_state->state = CB_NEW;
1460 cb_state->commandCount = 0;
1461 cb_state->submitCount = 0;
1462 cb_state->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
1463 cb_state->status = 0;
1464 cb_state->static_status = 0;
David Zhao Akeley44139b12021-04-26 16:16:13 -07001465 cb_state->inheritedViewportDepths.clear();
1466 cb_state->usedViewportScissorCount = 0;
1467 cb_state->pipelineStaticViewportCount = 0;
1468 cb_state->pipelineStaticScissorCount = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001469 cb_state->viewportMask = 0;
1470 cb_state->viewportWithCountMask = 0;
1471 cb_state->viewportWithCountCount = 0;
1472 cb_state->scissorMask = 0;
1473 cb_state->scissorWithCountMask = 0;
David Zhao Akeley44139b12021-04-26 16:16:13 -07001474 cb_state->scissorWithCountCount = 0;
1475 cb_state->trashedViewportMask = 0;
1476 cb_state->trashedScissorMask = 0;
1477 cb_state->trashedViewportCount = false;
1478 cb_state->trashedScissorCount = false;
1479 cb_state->usedDynamicViewportCount = false;
1480 cb_state->usedDynamicScissorCount = false;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001481 cb_state->primitiveTopology = VK_PRIMITIVE_TOPOLOGY_MAX_ENUM;
locke-lunargd556cc32019-09-17 01:21:23 -06001482
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001483 for (auto &item : cb_state->lastBound) {
locke-lunargb8d7a7a2020-10-25 16:01:52 -06001484 item.reset();
locke-lunargd556cc32019-09-17 01:21:23 -06001485 }
1486
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001487 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo();
1488 cb_state->activeRenderPass = nullptr;
1489 cb_state->active_attachments = nullptr;
1490 cb_state->active_subpasses = nullptr;
1491 cb_state->attachments_view_states.clear();
1492 cb_state->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
1493 cb_state->activeSubpass = 0;
1494 cb_state->broken_bindings.clear();
1495 cb_state->waitedEvents.clear();
1496 cb_state->events.clear();
1497 cb_state->writeEventsBeforeWait.clear();
1498 cb_state->activeQueries.clear();
1499 cb_state->startedQueries.clear();
1500 cb_state->image_layout_map.clear();
1501 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
1502 cb_state->vertex_buffer_used = false;
1503 cb_state->primaryCommandBuffer = VK_NULL_HANDLE;
locke-lunargd556cc32019-09-17 01:21:23 -06001504 // If secondary, invalidate any primary command buffer that may call us.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001505 if (cb_state->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
1506 InvalidateLinkedCommandBuffers(cb_state->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -06001507 }
1508
1509 // Remove reverse command buffer links.
John Zulauf79f06582021-02-27 18:38:39 -07001510 for (auto *sub_cb : cb_state->linkedCommandBuffers) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001511 sub_cb->linkedCommandBuffers.erase(cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06001512 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001513 cb_state->linkedCommandBuffers.clear();
1514 cb_state->queue_submit_functions.clear();
1515 cb_state->cmd_execute_commands_functions.clear();
1516 cb_state->eventUpdates.clear();
1517 cb_state->queryUpdates.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001518
1519 // Remove object bindings
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001520 for (const auto &obj : cb_state->object_bindings) {
1521 RemoveCommandBufferBinding(obj, cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06001522 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001523 cb_state->object_bindings.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001524 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
John Zulauf79f06582021-02-27 18:38:39 -07001525 for (auto &framebuffer : cb_state->framebuffers) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001526 framebuffer->cb_bindings.erase(cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06001527 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001528 cb_state->framebuffers.clear();
1529 cb_state->activeFramebuffer = VK_NULL_HANDLE;
1530 cb_state->index_buffer_binding.reset();
locke-lunargd556cc32019-09-17 01:21:23 -06001531
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001532 cb_state->qfo_transfer_image_barriers.Reset();
1533 cb_state->qfo_transfer_buffer_barriers.Reset();
locke-lunargd556cc32019-09-17 01:21:23 -06001534
1535 // Clean up the label data
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001536 ResetCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
1537 cb_state->debug_label.Reset();
1538 cb_state->validate_descriptorsets_in_queuesubmit.clear();
Attilio Provenzano02859b22020-02-27 14:17:28 +00001539
1540 // Best practices info
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001541 cb_state->small_indexed_draw_call_count = 0;
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06001542
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001543 cb_state->transform_feedback_active = false;
locke-lunargd556cc32019-09-17 01:21:23 -06001544 }
1545 if (command_buffer_reset_callback) {
1546 (*command_buffer_reset_callback)(cb);
1547 }
1548}
1549
1550void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
1551 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
1552 VkResult result) {
1553 if (VK_SUCCESS != result) return;
1554
Locke Linf3873542021-04-26 11:25:10 -06001555 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
1556 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
1557 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
1558
locke-lunargd556cc32019-09-17 01:21:23 -06001559 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
1560 if (nullptr == enabled_features_found) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001561 const auto *features2 = LvlFindInChain<VkPhysicalDeviceFeatures2>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001562 if (features2) {
1563 enabled_features_found = &(features2->features);
Locke Linf3873542021-04-26 11:25:10 -06001564
1565 const auto *provoking_vertex_features = lvl_find_in_chain<VkPhysicalDeviceProvokingVertexFeaturesEXT>(features2->pNext);
1566 if (provoking_vertex_features) {
1567 state_tracker->enabled_features.provoking_vertex_features = *provoking_vertex_features;
1568 }
locke-lunargd556cc32019-09-17 01:21:23 -06001569 }
1570 }
1571
locke-lunargd556cc32019-09-17 01:21:23 -06001572 if (nullptr == enabled_features_found) {
1573 state_tracker->enabled_features.core = {};
1574 } else {
1575 state_tracker->enabled_features.core = *enabled_features_found;
1576 }
1577
1578 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
1579 // previously set them through an explicit API call.
1580 uint32_t count;
1581 auto pd_state = GetPhysicalDeviceState(gpu);
1582 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
1583 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
1584 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
1585 // Save local link to this device's physical device state
1586 state_tracker->physical_device_state = pd_state;
1587
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001588 const auto *vulkan_12_features = LvlFindInChain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001589 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001590 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001591 } else {
sfricke-samsung27c70722020-05-02 08:42:39 -07001592 // Set Extension Feature Aliases to false as there is no struct to check
1593 state_tracker->enabled_features.core12.drawIndirectCount = VK_FALSE;
1594 state_tracker->enabled_features.core12.samplerMirrorClampToEdge = VK_FALSE;
1595 state_tracker->enabled_features.core12.descriptorIndexing = VK_FALSE;
1596 state_tracker->enabled_features.core12.samplerFilterMinmax = VK_FALSE;
1597 state_tracker->enabled_features.core12.shaderOutputLayer = VK_FALSE;
1598 state_tracker->enabled_features.core12.shaderOutputViewportIndex = VK_FALSE;
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001599 state_tracker->enabled_features.core12.subgroupBroadcastDynamicId = VK_FALSE;
sfricke-samsung27c70722020-05-02 08:42:39 -07001600
1601 // These structs are only allowed in pNext chain if there is no VkPhysicalDeviceVulkan12Features
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001602
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001603 const auto *eight_bit_storage_features = LvlFindInChain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001604 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001605 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
1606 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
1607 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
1608 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001609 }
1610
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001611 const auto *float16_int8_features = LvlFindInChain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001612 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001613 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
1614 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001615 }
1616
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001617 const auto *descriptor_indexing_features = LvlFindInChain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001618 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001619 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
1620 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
1621 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
1622 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
1623 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
1624 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
1625 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
1626 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
1627 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
1628 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
1629 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
1630 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
1631 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
1632 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
1633 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
1634 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
1635 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
1636 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
1637 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
1638 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
1639 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
1640 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
1641 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
1642 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
1643 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
1644 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
1645 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
1646 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
1647 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
1648 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
1649 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
1650 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
1651 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
1652 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
1653 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
1654 descriptor_indexing_features->descriptorBindingPartiallyBound;
1655 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
1656 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
1657 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001658 }
1659
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001660 const auto *scalar_block_layout_features = LvlFindInChain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001661 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001662 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001663 }
1664
1665 const auto *imageless_framebuffer_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001666 LvlFindInChain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001667 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001668 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001669 }
1670
1671 const auto *uniform_buffer_standard_layout_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001672 LvlFindInChain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001673 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001674 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
1675 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001676 }
1677
1678 const auto *subgroup_extended_types_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001679 LvlFindInChain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001680 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001681 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
1682 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001683 }
1684
1685 const auto *separate_depth_stencil_layouts_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001686 LvlFindInChain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001687 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001688 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
1689 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001690 }
1691
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001692 const auto *host_query_reset_features = LvlFindInChain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001693 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001694 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001695 }
1696
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001697 const auto *timeline_semaphore_features = LvlFindInChain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001698 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001699 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001700 }
1701
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001702 const auto *buffer_device_address = LvlFindInChain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001703 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001704 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
1705 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1706 buffer_device_address->bufferDeviceAddressCaptureReplay;
1707 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1708 buffer_device_address->bufferDeviceAddressMultiDevice;
1709 }
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001710
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001711 const auto *atomic_int64_features = LvlFindInChain<VkPhysicalDeviceShaderAtomicInt64Features>(pCreateInfo->pNext);
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001712 if (atomic_int64_features) {
1713 state_tracker->enabled_features.core12.shaderBufferInt64Atomics = atomic_int64_features->shaderBufferInt64Atomics;
1714 state_tracker->enabled_features.core12.shaderSharedInt64Atomics = atomic_int64_features->shaderSharedInt64Atomics;
1715 }
1716
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001717 const auto *memory_model_features = LvlFindInChain<VkPhysicalDeviceVulkanMemoryModelFeatures>(pCreateInfo->pNext);
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001718 if (memory_model_features) {
1719 state_tracker->enabled_features.core12.vulkanMemoryModel = memory_model_features->vulkanMemoryModel;
1720 state_tracker->enabled_features.core12.vulkanMemoryModelDeviceScope =
1721 memory_model_features->vulkanMemoryModelDeviceScope;
1722 state_tracker->enabled_features.core12.vulkanMemoryModelAvailabilityVisibilityChains =
1723 memory_model_features->vulkanMemoryModelAvailabilityVisibilityChains;
1724 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001725 }
1726
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001727 const auto *vulkan_11_features = LvlFindInChain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001728 if (vulkan_11_features) {
1729 state_tracker->enabled_features.core11 = *vulkan_11_features;
1730 } else {
1731 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
1732
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001733 const auto *sixteen_bit_storage_features = LvlFindInChain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001734 if (sixteen_bit_storage_features) {
1735 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
1736 sixteen_bit_storage_features->storageBuffer16BitAccess;
1737 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
1738 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
1739 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
1740 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
1741 }
1742
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001743 const auto *multiview_features = LvlFindInChain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001744 if (multiview_features) {
1745 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1746 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1747 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1748 }
1749
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001750 const auto *variable_pointers_features = LvlFindInChain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001751 if (variable_pointers_features) {
1752 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1753 variable_pointers_features->variablePointersStorageBuffer;
1754 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1755 }
1756
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001757 const auto *protected_memory_features = LvlFindInChain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001758 if (protected_memory_features) {
1759 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1760 }
1761
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001762 const auto *ycbcr_conversion_features = LvlFindInChain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001763 if (ycbcr_conversion_features) {
1764 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1765 }
1766
1767 const auto *shader_draw_parameters_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001768 LvlFindInChain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001769 if (shader_draw_parameters_features) {
1770 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001771 }
1772 }
1773
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001774 const auto *device_group_ci = LvlFindInChain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
Tony-LunarGca4891a2020-08-10 15:46:46 -06001775 if (device_group_ci) {
1776 state_tracker->physical_device_count = device_group_ci->physicalDeviceCount;
1777 state_tracker->device_group_create_info = *device_group_ci;
1778 } else {
1779 state_tracker->physical_device_count = 1;
1780 }
locke-lunargd556cc32019-09-17 01:21:23 -06001781
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001782 const auto *exclusive_scissor_features = LvlFindInChain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001783 if (exclusive_scissor_features) {
1784 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1785 }
1786
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001787 const auto *shading_rate_image_features = LvlFindInChain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001788 if (shading_rate_image_features) {
1789 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1790 }
1791
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001792 const auto *mesh_shader_features = LvlFindInChain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001793 if (mesh_shader_features) {
1794 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1795 }
1796
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001797 const auto *inline_uniform_block_features = LvlFindInChain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001798 if (inline_uniform_block_features) {
1799 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1800 }
1801
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001802 const auto *transform_feedback_features = LvlFindInChain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001803 if (transform_feedback_features) {
1804 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1805 }
1806
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001807 const auto *vtx_attrib_div_features = LvlFindInChain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001808 if (vtx_attrib_div_features) {
1809 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1810 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001811
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001812 const auto *buffer_device_address_ext = LvlFindInChain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001813 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001814 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001815 }
1816
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001817 const auto *cooperative_matrix_features = LvlFindInChain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001818 if (cooperative_matrix_features) {
1819 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1820 }
1821
locke-lunargd556cc32019-09-17 01:21:23 -06001822 const auto *compute_shader_derivatives_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001823 LvlFindInChain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001824 if (compute_shader_derivatives_features) {
1825 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1826 }
1827
1828 const auto *fragment_shader_barycentric_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001829 LvlFindInChain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001830 if (fragment_shader_barycentric_features) {
1831 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1832 }
1833
1834 const auto *shader_image_footprint_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001835 LvlFindInChain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001836 if (shader_image_footprint_features) {
1837 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1838 }
1839
1840 const auto *fragment_shader_interlock_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001841 LvlFindInChain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001842 if (fragment_shader_interlock_features) {
1843 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1844 }
1845
1846 const auto *demote_to_helper_invocation_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001847 LvlFindInChain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001848 if (demote_to_helper_invocation_features) {
1849 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1850 }
1851
1852 const auto *texel_buffer_alignment_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001853 LvlFindInChain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001854 if (texel_buffer_alignment_features) {
1855 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1856 }
1857
locke-lunargd556cc32019-09-17 01:21:23 -06001858 const auto *pipeline_exe_props_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001859 LvlFindInChain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001860 if (pipeline_exe_props_features) {
1861 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1862 }
1863
Jeff Bolz82f854d2019-09-17 14:56:47 -05001864 const auto *dedicated_allocation_image_aliasing_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001865 LvlFindInChain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
Jeff Bolz82f854d2019-09-17 14:56:47 -05001866 if (dedicated_allocation_image_aliasing_features) {
1867 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1868 *dedicated_allocation_image_aliasing_features;
1869 }
1870
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001871 const auto *performance_query_features = LvlFindInChain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001872 if (performance_query_features) {
1873 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1874 }
1875
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001876 const auto *device_coherent_memory_features = LvlFindInChain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
Tobias Hector782bcde2019-11-28 16:19:42 +00001877 if (device_coherent_memory_features) {
1878 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1879 }
1880
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001881 const auto *ycbcr_image_array_features = LvlFindInChain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsungcead0802020-01-30 22:20:10 -08001882 if (ycbcr_image_array_features) {
1883 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1884 }
1885
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001886 const auto *ray_query_features = LvlFindInChain<VkPhysicalDeviceRayQueryFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001887 if (ray_query_features) {
1888 state_tracker->enabled_features.ray_query_features = *ray_query_features;
1889 }
1890
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001891 const auto *ray_tracing_pipeline_features = LvlFindInChain<VkPhysicalDeviceRayTracingPipelineFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001892 if (ray_tracing_pipeline_features) {
1893 state_tracker->enabled_features.ray_tracing_pipeline_features = *ray_tracing_pipeline_features;
1894 }
1895
1896 const auto *ray_tracing_acceleration_structure_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001897 LvlFindInChain<VkPhysicalDeviceAccelerationStructureFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001898 if (ray_tracing_acceleration_structure_features) {
1899 state_tracker->enabled_features.ray_tracing_acceleration_structure_features = *ray_tracing_acceleration_structure_features;
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001900 }
1901
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001902 const auto *robustness2_features = LvlFindInChain<VkPhysicalDeviceRobustness2FeaturesEXT>(pCreateInfo->pNext);
Jeff Bolz165818a2020-05-08 11:19:03 -05001903 if (robustness2_features) {
1904 state_tracker->enabled_features.robustness2_features = *robustness2_features;
1905 }
1906
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001907 const auto *fragment_density_map_features = LvlFindInChain<VkPhysicalDeviceFragmentDensityMapFeaturesEXT>(pCreateInfo->pNext);
janharaldfredriksen-arm3b793772020-05-12 18:55:53 +02001908 if (fragment_density_map_features) {
1909 state_tracker->enabled_features.fragment_density_map_features = *fragment_density_map_features;
1910 }
1911
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001912 const auto *fragment_density_map_features2 = LvlFindInChain<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT>(pCreateInfo->pNext);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001913 if (fragment_density_map_features2) {
1914 state_tracker->enabled_features.fragment_density_map2_features = *fragment_density_map_features2;
1915 }
1916
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001917 const auto *astc_decode_features = LvlFindInChain<VkPhysicalDeviceASTCDecodeFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0c4a06f2020-06-27 01:24:32 -07001918 if (astc_decode_features) {
1919 state_tracker->enabled_features.astc_decode_features = *astc_decode_features;
1920 }
1921
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001922 const auto *custom_border_color_features = LvlFindInChain<VkPhysicalDeviceCustomBorderColorFeaturesEXT>(pCreateInfo->pNext);
Tony-LunarG7337b312020-04-15 16:40:25 -06001923 if (custom_border_color_features) {
1924 state_tracker->enabled_features.custom_border_color_features = *custom_border_color_features;
1925 }
1926
sfricke-samsungfd661d62020-05-16 00:57:27 -07001927 const auto *pipeline_creation_cache_control_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001928 LvlFindInChain<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsungfd661d62020-05-16 00:57:27 -07001929 if (pipeline_creation_cache_control_features) {
1930 state_tracker->enabled_features.pipeline_creation_cache_control_features = *pipeline_creation_cache_control_features;
1931 }
1932
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001933 const auto *fragment_shading_rate_features = LvlFindInChain<VkPhysicalDeviceFragmentShadingRateFeaturesKHR>(pCreateInfo->pNext);
Tobias Hector6663c9b2020-11-05 10:18:02 +00001934 if (fragment_shading_rate_features) {
1935 state_tracker->enabled_features.fragment_shading_rate_features = *fragment_shading_rate_features;
1936 }
1937
Piers Daniell39842ee2020-07-10 16:42:33 -06001938 const auto *extended_dynamic_state_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001939 LvlFindInChain<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT>(pCreateInfo->pNext);
Piers Daniell39842ee2020-07-10 16:42:33 -06001940 if (extended_dynamic_state_features) {
1941 state_tracker->enabled_features.extended_dynamic_state_features = *extended_dynamic_state_features;
1942 }
1943
Vikram Kushwahaa57b0c32021-04-19 12:21:46 -07001944 const auto *extended_dynamic_state2_features =
1945 LvlFindInChain<VkPhysicalDeviceExtendedDynamicState2FeaturesEXT>(pCreateInfo->pNext);
1946 if (extended_dynamic_state2_features) {
1947 state_tracker->enabled_features.extended_dynamic_state2_features = *extended_dynamic_state2_features;
1948 }
1949
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001950 const auto *multiview_features = LvlFindInChain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
locke-lunarg3fa463a2020-10-23 16:39:04 -06001951 if (multiview_features) {
1952 state_tracker->enabled_features.multiview_features = *multiview_features;
1953 }
1954
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001955 const auto *portability_features = LvlFindInChain<VkPhysicalDevicePortabilitySubsetFeaturesKHR>(pCreateInfo->pNext);
Nathaniel Cesariob3f2d702020-11-09 09:20:49 -07001956 if (portability_features) {
1957 state_tracker->enabled_features.portability_subset_features = *portability_features;
1958 }
1959
sfricke-samsung0065ce02020-12-03 22:46:37 -08001960 const auto *shader_integer_functions2_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001961 LvlFindInChain<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001962 if (shader_integer_functions2_features) {
1963 state_tracker->enabled_features.shader_integer_functions2_features = *shader_integer_functions2_features;
1964 }
1965
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001966 const auto *shader_sm_builtins_feature = LvlFindInChain<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001967 if (shader_sm_builtins_feature) {
1968 state_tracker->enabled_features.shader_sm_builtins_feature = *shader_sm_builtins_feature;
1969 }
1970
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001971 const auto *shader_atomic_float_feature = LvlFindInChain<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001972 if (shader_atomic_float_feature) {
1973 state_tracker->enabled_features.shader_atomic_float_feature = *shader_atomic_float_feature;
1974 }
1975
1976 const auto *shader_image_atomic_int64_feature =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001977 LvlFindInChain<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001978 if (shader_image_atomic_int64_feature) {
1979 state_tracker->enabled_features.shader_image_atomic_int64_feature = *shader_image_atomic_int64_feature;
1980 }
1981
sfricke-samsung486a51e2021-01-02 00:10:15 -08001982 const auto *shader_clock_feature = LvlFindInChain<VkPhysicalDeviceShaderClockFeaturesKHR>(pCreateInfo->pNext);
1983 if (shader_clock_feature) {
1984 state_tracker->enabled_features.shader_clock_feature = *shader_clock_feature;
1985 }
1986
Jeremy Gebben5f585ae2021-02-02 09:03:06 -07001987 const auto *conditional_rendering_features =
1988 LvlFindInChain<VkPhysicalDeviceConditionalRenderingFeaturesEXT>(pCreateInfo->pNext);
1989 if (conditional_rendering_features) {
1990 state_tracker->enabled_features.conditional_rendering = *conditional_rendering_features;
1991 }
1992
Shannon McPhersondb287d42021-02-02 15:27:32 -07001993 const auto *workgroup_memory_explicit_layout_features =
1994 LvlFindInChain<VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>(pCreateInfo->pNext);
1995 if (workgroup_memory_explicit_layout_features) {
1996 state_tracker->enabled_features.workgroup_memory_explicit_layout_features = *workgroup_memory_explicit_layout_features;
1997 }
1998
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001999 const auto *synchronization2_features =
2000 LvlFindInChain<VkPhysicalDeviceSynchronization2FeaturesKHR>(pCreateInfo->pNext);
2001 if (synchronization2_features) {
2002 state_tracker->enabled_features.synchronization2_features = *synchronization2_features;
2003 }
2004
Locke Linf3873542021-04-26 11:25:10 -06002005 const auto *provoking_vertex_features = lvl_find_in_chain<VkPhysicalDeviceProvokingVertexFeaturesEXT>(pCreateInfo->pNext);
2006 if (provoking_vertex_features) {
2007 state_tracker->enabled_features.provoking_vertex_features = *provoking_vertex_features;
2008 }
2009
Piers Daniellcb6d8032021-04-19 18:51:26 -06002010 const auto *vertex_input_dynamic_state_features =
2011 LvlFindInChain<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT>(pCreateInfo->pNext);
2012 if (vertex_input_dynamic_state_features) {
2013 state_tracker->enabled_features.vertex_input_dynamic_state_features = *vertex_input_dynamic_state_features;
2014 }
2015
David Zhao Akeley44139b12021-04-26 16:16:13 -07002016 const auto *inherited_viewport_scissor_features =
2017 LvlFindInChain<VkPhysicalDeviceInheritedViewportScissorFeaturesNV>(pCreateInfo->pNext);
2018 if (inherited_viewport_scissor_features) {
2019 state_tracker->enabled_features.inherited_viewport_scissor_features = *inherited_viewport_scissor_features;
2020 }
2021
locke-lunargd556cc32019-09-17 01:21:23 -06002022 // Store physical device properties and physical device mem limits into CoreChecks structs
2023 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
2024 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002025 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
2026 &state_tracker->phys_dev_props_core11);
2027 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
2028 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06002029
2030 const auto &dev_ext = state_tracker->device_extensions;
2031 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
2032
2033 if (dev_ext.vk_khr_push_descriptor) {
2034 // Get the needed push_descriptor limits
2035 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
2036 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
2037 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
2038 }
2039
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002040 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08002041 VkPhysicalDeviceDescriptorIndexingProperties descriptor_indexing_prop;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002042 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
2043 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
2044 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
2045 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
2046 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
2047 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
2048 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
2049 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
2050 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
2051 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
2052 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
2053 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
2054 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
2055 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
2056 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
2057 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
2058 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
2059 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
2060 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
2061 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
2062 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
2063 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
2064 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
2065 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
2066 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
2067 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
2068 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
2069 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
2070 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
2071 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
2072 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
2073 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
2074 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
2075 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
2076 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
2077 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
2078 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
2079 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
2080 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
2081 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
2082 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
2083 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
2084 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
2085 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
2086 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
2087 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
2088 }
2089
locke-lunargd556cc32019-09-17 01:21:23 -06002090 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
2091 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
2092 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
2093 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002094
2095 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08002096 VkPhysicalDeviceDepthStencilResolveProperties depth_stencil_resolve_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002097 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
2098 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
2099 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
2100 depth_stencil_resolve_props.supportedStencilResolveModes;
2101 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
2102 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
2103 }
2104
locke-lunargd556cc32019-09-17 01:21:23 -06002105 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002106 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
sourav parmarcd5fb182020-07-17 12:58:44 -07002107 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing_pipeline, &phys_dev_props->ray_tracing_propsKHR);
2108 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_acceleration_structure, &phys_dev_props->acc_structure_props);
locke-lunargd556cc32019-09-17 01:21:23 -06002109 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
2110 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02002111 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map_2, &phys_dev_props->fragment_density_map2_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002112 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
sfricke-samsung8f658d42020-05-03 20:12:24 -07002113 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_sample_locations, &phys_dev_props->sample_locations_props);
Tony-LunarG7337b312020-04-15 16:40:25 -06002114 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_custom_border_color, &phys_dev_props->custom_border_color_props);
locke-lunarg3fa463a2020-10-23 16:39:04 -06002115 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_multiview, &phys_dev_props->multiview_props);
Nathaniel Cesario3291c912020-11-17 16:54:41 -07002116 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_portability_subset, &phys_dev_props->portability_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002117
2118 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08002119 VkPhysicalDeviceTimelineSemaphoreProperties timeline_semaphore_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002120 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
2121 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
2122 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
2123 }
2124
2125 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08002126 VkPhysicalDeviceFloatControlsProperties float_controls_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002127 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
2128 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
2129 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
2130 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
2131 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
2132 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
2133 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
2134 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
2135 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
2136 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
2137 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
2138 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
2139 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
2140 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
2141 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
2142 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
2143 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
2144 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
2145 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
2146 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
2147 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
2148 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07002149
locke-lunargd556cc32019-09-17 01:21:23 -06002150 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
2151 // Get the needed cooperative_matrix properties
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07002152 auto cooperative_matrix_props = LvlInitStruct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
2153 auto prop2 = LvlInitStruct<VkPhysicalDeviceProperties2>(&cooperative_matrix_props);
locke-lunargd556cc32019-09-17 01:21:23 -06002154 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
2155 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
2156
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002157 uint32_t num_cooperative_matrix_properties = 0;
2158 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &num_cooperative_matrix_properties, NULL);
2159 state_tracker->cooperative_matrix_properties.resize(num_cooperative_matrix_properties,
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07002160 LvlInitStruct<VkCooperativeMatrixPropertiesNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002161
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002162 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &num_cooperative_matrix_properties,
locke-lunargd556cc32019-09-17 01:21:23 -06002163 state_tracker->cooperative_matrix_properties.data());
2164 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002165 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06002166 // Get the needed subgroup limits
Locke Linf3873542021-04-26 11:25:10 -06002167 auto provoking_vertex_prop = LvlInitStruct<VkPhysicalDeviceProvokingVertexPropertiesEXT>();
2168 auto subgroup_prop = LvlInitStruct<VkPhysicalDeviceSubgroupProperties>(&provoking_vertex_prop);
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07002169 auto prop2 = LvlInitStruct<VkPhysicalDeviceProperties2>(&subgroup_prop);
locke-lunargd556cc32019-09-17 01:21:23 -06002170 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
2171
Piers Daniell41b8c5d2020-01-10 15:42:00 -07002172 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
2173 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
2174 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
2175 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
Locke Linf3873542021-04-26 11:25:10 -06002176 state_tracker->phys_dev_ext_props.provoking_vertex_props = provoking_vertex_prop;
locke-lunargd556cc32019-09-17 01:21:23 -06002177 }
2178
Tobias Hector6663c9b2020-11-05 10:18:02 +00002179 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_fragment_shading_rate, &phys_dev_props->fragment_shading_rate_props);
2180
locke-lunargd556cc32019-09-17 01:21:23 -06002181 // Store queue family data
2182 if (pCreateInfo->pQueueCreateInfos != nullptr) {
2183 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
sfricke-samsung590ae1e2020-04-25 01:18:05 -07002184 const VkDeviceQueueCreateInfo &queue_create_info = pCreateInfo->pQueueCreateInfos[i];
Jeremy Gebbencbf22862021-03-03 12:01:22 -07002185 state_tracker->queue_family_index_map.emplace(queue_create_info.queueFamilyIndex, queue_create_info.queueCount);
2186 state_tracker->queue_family_create_flags_map.emplace( queue_create_info.queueFamilyIndex, queue_create_info.flags);
locke-lunargd556cc32019-09-17 01:21:23 -06002187 }
2188 }
2189}
2190
2191void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
2192 if (!device) return;
2193
locke-lunargd556cc32019-09-17 01:21:23 -06002194 // Reset all command buffers before destroying them, to unlink object_bindings.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002195 for (auto &command_buffer : commandBufferMap) {
2196 ResetCommandBufferState(command_buffer.first);
locke-lunargd556cc32019-09-17 01:21:23 -06002197 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05002198 pipelineMap.clear();
2199 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06002200 commandBufferMap.clear();
2201
2202 // This will also delete all sets in the pool & remove them from setMap
2203 DeleteDescriptorSetPools();
2204 // All sets should be removed
2205 assert(setMap.empty());
2206 descriptorSetLayoutMap.clear();
2207 imageViewMap.clear();
2208 imageMap.clear();
2209 bufferViewMap.clear();
2210 bufferMap.clear();
2211 // Queues persist until device is destroyed
2212 queueMap.clear();
2213}
2214
2215// Loop through bound objects and increment their in_use counts.
2216void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
John Zulauf79f06582021-02-27 18:38:39 -07002217 for (const auto &obj : cb_node->object_bindings) {
locke-lunargd556cc32019-09-17 01:21:23 -06002218 auto base_obj = GetStateStructPtrFromObject(obj);
2219 if (base_obj) {
2220 base_obj->in_use.fetch_add(1);
2221 }
2222 }
2223}
2224
2225// Track which resources are in-flight by atomically incrementing their "in_use" count
2226void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
2227 cb_node->submitCount++;
2228 cb_node->in_use.fetch_add(1);
2229
2230 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
2231 IncrementBoundObjects(cb_node);
2232 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
2233 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
2234 // should then be flagged prior to calling this function
2235 for (auto event : cb_node->writeEventsBeforeWait) {
2236 auto event_state = GetEventState(event);
2237 if (event_state) event_state->write_in_use++;
2238 }
2239}
2240
2241// Decrement in-use count for objects bound to command buffer
2242void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
2243 BASE_NODE *base_obj = nullptr;
John Zulauf79f06582021-02-27 18:38:39 -07002244 for (const auto &obj : cb_node->object_bindings) {
locke-lunargd556cc32019-09-17 01:21:23 -06002245 base_obj = GetStateStructPtrFromObject(obj);
2246 if (base_obj) {
2247 base_obj->in_use.fetch_sub(1);
2248 }
2249 }
2250}
2251
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002252void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
Jeremy Gebbencbf22862021-03-03 12:01:22 -07002253 layer_data::unordered_map<VkQueue, uint64_t> other_queue_seqs;
2254 layer_data::unordered_map<VkSemaphore, uint64_t> timeline_semaphore_counters;
locke-lunargd556cc32019-09-17 01:21:23 -06002255
2256 // Roll this queue forward, one submission at a time.
2257 while (pQueue->seq < seq) {
2258 auto &submission = pQueue->submissions.front();
2259
2260 for (auto &wait : submission.waitSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002261 auto semaphore_state = GetSemaphoreState(wait.semaphore);
2262 if (semaphore_state) {
2263 semaphore_state->in_use.fetch_sub(1);
locke-lunargd556cc32019-09-17 01:21:23 -06002264 }
Mike Schuchardt2df08912020-12-15 16:28:09 -08002265 if (wait.type == VK_SEMAPHORE_TYPE_TIMELINE) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002266 auto &last_counter = timeline_semaphore_counters[wait.semaphore];
2267 last_counter = std::max(last_counter, wait.payload);
Marshall Drew-Brook03847582020-11-06 15:10:45 -08002268 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002269 auto &last_seq = other_queue_seqs[wait.queue];
2270 last_seq = std::max(last_seq, wait.seq);
Marshall Drew-Brook03847582020-11-06 15:10:45 -08002271 }
locke-lunargd556cc32019-09-17 01:21:23 -06002272 }
2273
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01002274 for (auto &signal : submission.signalSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002275 auto semaphore_state = GetSemaphoreState(signal.semaphore);
2276 if (semaphore_state) {
2277 semaphore_state->in_use.fetch_sub(1);
Mike Schuchardt2df08912020-12-15 16:28:09 -08002278 if (semaphore_state->type == VK_SEMAPHORE_TYPE_TIMELINE && semaphore_state->payload < signal.payload) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002279 semaphore_state->payload = signal.payload;
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01002280 }
locke-lunargd556cc32019-09-17 01:21:23 -06002281 }
2282 }
2283
2284 for (auto &semaphore : submission.externalSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002285 auto semaphore_state = GetSemaphoreState(semaphore);
2286 if (semaphore_state) {
2287 semaphore_state->in_use.fetch_sub(1);
locke-lunargd556cc32019-09-17 01:21:23 -06002288 }
2289 }
2290
2291 for (auto cb : submission.cbs) {
2292 auto cb_node = GetCBState(cb);
2293 if (!cb_node) {
2294 continue;
2295 }
2296 // First perform decrement on general case bound objects
2297 DecrementBoundResources(cb_node);
2298 for (auto event : cb_node->writeEventsBeforeWait) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002299 auto event_node = eventMap.find(event);
2300 if (event_node != eventMap.end()) {
John Zulauf48057322020-12-02 11:59:31 -07002301 event_node->second->write_in_use--;
locke-lunargd556cc32019-09-17 01:21:23 -06002302 }
2303 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002304 QueryMap local_query_to_state_map;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002305 VkQueryPool first_pool = VK_NULL_HANDLE;
Jeff Bolz310775c2019-10-09 00:46:33 -05002306 for (auto &function : cb_node->queryUpdates) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002307 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &local_query_to_state_map);
Jeff Bolz310775c2019-10-09 00:46:33 -05002308 }
2309
John Zulauf79f06582021-02-27 18:38:39 -07002310 for (const auto &query_state_pair : local_query_to_state_map) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002311 if (query_state_pair.second == QUERYSTATE_ENDED) {
2312 queryToStateMap[query_state_pair.first] = QUERYSTATE_AVAILABLE;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002313 }
locke-lunargd556cc32019-09-17 01:21:23 -06002314 }
locke-lunargd556cc32019-09-17 01:21:23 -06002315 cb_node->in_use.fetch_sub(1);
2316 }
2317
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002318 auto fence_state = GetFenceState(submission.fence);
2319 if (fence_state && fence_state->scope == kSyncScopeInternal) {
2320 fence_state->state = FENCE_RETIRED;
locke-lunargd556cc32019-09-17 01:21:23 -06002321 }
2322
2323 pQueue->submissions.pop_front();
2324 pQueue->seq++;
2325 }
2326
2327 // Roll other queues forward to the highest seq we saw a wait for
John Zulauf79f06582021-02-27 18:38:39 -07002328 for (const auto &qs : other_queue_seqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002329 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002330 }
John Zulauf79f06582021-02-27 18:38:39 -07002331 for (const auto &sc : timeline_semaphore_counters) {
Marshall Drew-Brook03847582020-11-06 15:10:45 -08002332 RetireTimelineSemaphore(sc.first, sc.second);
2333 }
locke-lunargd556cc32019-09-17 01:21:23 -06002334}
2335
2336// Submit a fence to a queue, delimiting previous fences and previous untracked
2337// work by it.
2338static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
2339 pFence->state = FENCE_INFLIGHT;
2340 pFence->signaler.first = pQueue->queue;
2341 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
2342}
2343
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002344uint64_t ValidationStateTracker::RecordSubmitFence(QUEUE_STATE *queue_state, VkFence fence, uint32_t submit_count) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002345 auto fence_state = GetFenceState(fence);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002346 uint64_t early_retire_seq = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002347 if (fence_state) {
2348 if (fence_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06002349 // Mark fence in use
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002350 SubmitFence(queue_state, fence_state, std::max(1u, submit_count));
2351 if (!submit_count) {
locke-lunargd556cc32019-09-17 01:21:23 -06002352 // If no submissions, but just dropping a fence on the end of the queue,
2353 // record an empty submission with just the fence, so we can determine
2354 // its completion.
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002355 CB_SUBMISSION submission;
2356 submission.fence = fence;
2357 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06002358 }
2359 } else {
2360 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002361 early_retire_seq = queue_state->seq + queue_state->submissions.size();
locke-lunargd556cc32019-09-17 01:21:23 -06002362 }
2363 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002364 return early_retire_seq;
2365}
2366
2367void ValidationStateTracker::RecordSubmitCommandBuffer(CB_SUBMISSION &submission, VkCommandBuffer command_buffer) {
2368 auto cb_node = GetCBState(command_buffer);
2369 if (cb_node) {
2370 submission.cbs.push_back(command_buffer);
John Zulauf79f06582021-02-27 18:38:39 -07002371 for (auto *secondary_cmd_buffer : cb_node->linkedCommandBuffers) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002372 submission.cbs.push_back(secondary_cmd_buffer->commandBuffer);
2373 IncrementResources(secondary_cmd_buffer);
2374 }
2375 IncrementResources(cb_node);
2376
2377 VkQueryPool first_pool = VK_NULL_HANDLE;
2378 EventToStageMap local_event_to_stage_map;
2379 QueryMap local_query_to_state_map;
2380 for (auto &function : cb_node->queryUpdates) {
2381 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &local_query_to_state_map);
2382 }
2383
John Zulauf79f06582021-02-27 18:38:39 -07002384 for (const auto &query_state_pair : local_query_to_state_map) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002385 queryToStateMap[query_state_pair.first] = query_state_pair.second;
2386 }
2387
John Zulauf79f06582021-02-27 18:38:39 -07002388 for (const auto &function : cb_node->eventUpdates) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002389 function(nullptr, /*do_validate*/ false, &local_event_to_stage_map);
2390 }
2391
John Zulauf79f06582021-02-27 18:38:39 -07002392 for (const auto &eventStagePair : local_event_to_stage_map) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002393 eventMap[eventStagePair.first]->stageMask = eventStagePair.second;
2394 }
2395 }
2396}
2397
2398void ValidationStateTracker::RecordSubmitWaitSemaphore(CB_SUBMISSION &submission, VkQueue queue, VkSemaphore semaphore,
2399 uint64_t value, uint64_t next_seq) {
2400 auto semaphore_state = GetSemaphoreState(semaphore);
2401 if (semaphore_state) {
2402 if (semaphore_state->scope == kSyncScopeInternal) {
2403 SEMAPHORE_WAIT wait;
2404 wait.semaphore = semaphore;
2405 wait.type = semaphore_state->type;
2406 if (semaphore_state->type == VK_SEMAPHORE_TYPE_BINARY) {
2407 if (semaphore_state->signaler.first != VK_NULL_HANDLE) {
2408 wait.queue = semaphore_state->signaler.first;
2409 wait.seq = semaphore_state->signaler.second;
2410 submission.waitSemaphores.emplace_back(std::move(wait));
2411 semaphore_state->in_use.fetch_add(1);
2412 }
2413 semaphore_state->signaler.first = VK_NULL_HANDLE;
2414 semaphore_state->signaled = false;
2415 } else if (semaphore_state->payload < value) {
2416 wait.queue = queue;
2417 wait.seq = next_seq;
2418 wait.payload = value;
2419 submission.waitSemaphores.emplace_back(std::move(wait));
2420 semaphore_state->in_use.fetch_add(1);
2421 }
2422 } else {
2423 submission.externalSemaphores.push_back(semaphore);
2424 semaphore_state->in_use.fetch_add(1);
2425 if (semaphore_state->scope == kSyncScopeExternalTemporary) {
2426 semaphore_state->scope = kSyncScopeInternal;
2427 }
2428 }
2429 }
2430}
2431
2432bool ValidationStateTracker::RecordSubmitSignalSemaphore(CB_SUBMISSION &submission, VkQueue queue, VkSemaphore semaphore,
2433 uint64_t value, uint64_t next_seq) {
2434 bool retire_early = false;
2435 auto semaphore_state = GetSemaphoreState(semaphore);
2436 if (semaphore_state) {
2437 if (semaphore_state->scope == kSyncScopeInternal) {
2438 SEMAPHORE_SIGNAL signal;
2439 signal.semaphore = semaphore;
2440 signal.seq = next_seq;
2441 if (semaphore_state->type == VK_SEMAPHORE_TYPE_BINARY) {
2442 semaphore_state->signaler.first = queue;
2443 semaphore_state->signaler.second = next_seq;
2444 semaphore_state->signaled = true;
2445 } else {
2446 signal.payload = value;
2447 }
2448 semaphore_state->in_use.fetch_add(1);
2449 submission.signalSemaphores.emplace_back(std::move(signal));
2450 } else {
2451 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
2452 retire_early = true;
2453 }
2454 }
2455 return retire_early;
2456}
2457
2458void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
2459 VkFence fence, VkResult result) {
2460 if (result != VK_SUCCESS) return;
2461 auto queue_state = GetQueueState(queue);
2462
2463 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, submitCount);
locke-lunargd556cc32019-09-17 01:21:23 -06002464
2465 // Now process each individual submit
2466 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002467 CB_SUBMISSION submission;
locke-lunargd556cc32019-09-17 01:21:23 -06002468 const VkSubmitInfo *submit = &pSubmits[submit_idx];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002469 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002470 auto *timeline_semaphore_submit = LvlFindInChain<VkTimelineSemaphoreSubmitInfo>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002471 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
Jeremy Gebben4ae5cc72021-03-10 15:34:44 -07002472 uint64_t value = 0;
2473 if (timeline_semaphore_submit && timeline_semaphore_submit->pWaitSemaphoreValues != nullptr &&
2474 (i < timeline_semaphore_submit->waitSemaphoreValueCount)) {
2475 value = timeline_semaphore_submit->pWaitSemaphoreValues[i];
2476 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002477 RecordSubmitWaitSemaphore(submission, queue, submit->pWaitSemaphores[i], value, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002478 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002479
2480 bool retire_early = false;
locke-lunargd556cc32019-09-17 01:21:23 -06002481 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
Jeremy Gebben4ae5cc72021-03-10 15:34:44 -07002482 uint64_t value = 0;
2483 if (timeline_semaphore_submit && timeline_semaphore_submit->pSignalSemaphoreValues != nullptr &&
2484 (i < timeline_semaphore_submit->signalSemaphoreValueCount)) {
2485 value = timeline_semaphore_submit->pSignalSemaphoreValues[i];
2486 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002487 retire_early |= RecordSubmitSignalSemaphore(submission, queue, submit->pSignalSemaphores[i], value, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002488 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002489 if (retire_early) {
2490 early_retire_seq = std::max(early_retire_seq, next_seq);
2491 }
2492
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002493 const auto perf_submit = LvlFindInChain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002494 submission.perf_submit_pass = perf_submit ? perf_submit->counterPassIndex : 0;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002495
locke-lunargd556cc32019-09-17 01:21:23 -06002496 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002497 RecordSubmitCommandBuffer(submission, submit->pCommandBuffers[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002498 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002499 submission.fence = submit_idx == (submitCount - 1) ? fence : VK_NULL_HANDLE;
2500 queue_state->submissions.emplace_back(std::move(submission));
2501 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002502
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002503 if (early_retire_seq) {
2504 RetireWorkOnQueue(queue_state, early_retire_seq);
2505 }
2506}
2507
2508void ValidationStateTracker::PostCallRecordQueueSubmit2KHR(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR *pSubmits,
2509 VkFence fence, VkResult result) {
2510 if (result != VK_SUCCESS) return;
2511 auto queue_state = GetQueueState(queue);
2512
2513 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, submitCount);
2514
2515 // Now process each individual submit
2516 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
2517 CB_SUBMISSION submission;
2518 const VkSubmitInfo2KHR *submit = &pSubmits[submit_idx];
2519 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
2520 for (uint32_t i = 0; i < submit->waitSemaphoreInfoCount; ++i) {
2521 const auto &sem_info = submit->pWaitSemaphoreInfos[i];
2522 RecordSubmitWaitSemaphore(submission, queue, sem_info.semaphore, sem_info.value, next_seq);
2523 }
2524 bool retire_early = false;
2525 for (uint32_t i = 0; i < submit->signalSemaphoreInfoCount; ++i) {
2526 const auto &sem_info = submit->pSignalSemaphoreInfos[i];
2527 retire_early |= RecordSubmitSignalSemaphore(submission, queue, sem_info.semaphore, sem_info.value, next_seq);
2528 }
2529 if (retire_early) {
2530 early_retire_seq = std::max(early_retire_seq, next_seq);
2531 }
2532 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
2533 submission.perf_submit_pass = perf_submit ? perf_submit->counterPassIndex : 0;
2534
2535 for (uint32_t i = 0; i < submit->commandBufferInfoCount; i++) {
2536 RecordSubmitCommandBuffer(submission, submit->pCommandBufferInfos[i].commandBuffer);
2537 }
2538 submission.fence = submit_idx == (submitCount - 1) ? fence : VK_NULL_HANDLE;
2539 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06002540 }
2541
2542 if (early_retire_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002543 RetireWorkOnQueue(queue_state, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002544 }
2545}
2546
2547void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
2548 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
2549 VkResult result) {
2550 if (VK_SUCCESS == result) {
2551 AddMemObjInfo(device, *pMemory, pAllocateInfo);
2552 }
2553 return;
2554}
2555
2556void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
2557 if (!mem) return;
2558 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
2559 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
2560
2561 // Clear mem binding for any bound objects
2562 for (const auto &obj : mem_info->obj_bindings) {
2563 BINDABLE *bindable_state = nullptr;
2564 switch (obj.type) {
2565 case kVulkanObjectTypeImage:
2566 bindable_state = GetImageState(obj.Cast<VkImage>());
2567 break;
2568 case kVulkanObjectTypeBuffer:
2569 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
2570 break;
2571 case kVulkanObjectTypeAccelerationStructureNV:
sourav parmarcd5fb182020-07-17 12:58:44 -07002572 bindable_state = GetAccelerationStructureStateNV(obj.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002573 break;
2574
2575 default:
2576 // Should only have acceleration structure, buffer, or image objects bound to memory
2577 assert(0);
2578 }
2579
2580 if (bindable_state) {
Jeff Bolz41e29052020-03-29 22:33:55 -05002581 // Remove any sparse bindings bound to the resource that use this memory.
2582 for (auto it = bindable_state->sparse_bindings.begin(); it != bindable_state->sparse_bindings.end();) {
2583 auto nextit = it;
2584 nextit++;
2585
2586 auto &sparse_mem_binding = *it;
2587 if (sparse_mem_binding.mem_state.get() == mem_info) {
2588 bindable_state->sparse_bindings.erase(it);
2589 }
2590
2591 it = nextit;
2592 }
locke-lunargd556cc32019-09-17 01:21:23 -06002593 bindable_state->UpdateBoundMemorySet();
2594 }
2595 }
2596 // Any bound cmd buffers are now invalid
2597 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
2598 RemoveAliasingImages(mem_info->bound_images);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002599 mem_info->destroyed = true;
John Zulauf79952712020-04-07 11:25:54 -06002600 fake_memory.Free(mem_info->fake_base_address);
locke-lunargd556cc32019-09-17 01:21:23 -06002601 memObjMap.erase(mem);
2602}
2603
2604void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
2605 VkFence fence, VkResult result) {
2606 if (result != VK_SUCCESS) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002607 auto queue_state = GetQueueState(queue);
locke-lunargd556cc32019-09-17 01:21:23 -06002608
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002609 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, bindInfoCount);
locke-lunargd556cc32019-09-17 01:21:23 -06002610
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002611 for (uint32_t bind_idx = 0; bind_idx < bindInfoCount; ++bind_idx) {
2612 const VkBindSparseInfo &bind_info = pBindInfo[bind_idx];
locke-lunargd556cc32019-09-17 01:21:23 -06002613 // Track objects tied to memory
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002614 for (uint32_t j = 0; j < bind_info.bufferBindCount; j++) {
2615 for (uint32_t k = 0; k < bind_info.pBufferBinds[j].bindCount; k++) {
2616 auto sparse_binding = bind_info.pBufferBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07002617 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002618 VulkanTypedHandle(bind_info.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -06002619 }
2620 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002621 for (uint32_t j = 0; j < bind_info.imageOpaqueBindCount; j++) {
2622 for (uint32_t k = 0; k < bind_info.pImageOpaqueBinds[j].bindCount; k++) {
2623 auto sparse_binding = bind_info.pImageOpaqueBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07002624 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002625 VulkanTypedHandle(bind_info.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
locke-lunargd556cc32019-09-17 01:21:23 -06002626 }
2627 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002628 for (uint32_t j = 0; j < bind_info.imageBindCount; j++) {
2629 for (uint32_t k = 0; k < bind_info.pImageBinds[j].bindCount; k++) {
2630 auto sparse_binding = bind_info.pImageBinds[j].pBinds[k];
locke-lunargd556cc32019-09-17 01:21:23 -06002631 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
2632 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
locke-lunargcf04d582019-11-26 00:31:50 -07002633 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, size,
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002634 VulkanTypedHandle(bind_info.pImageBinds[j].image, kVulkanObjectTypeImage));
locke-lunargd556cc32019-09-17 01:21:23 -06002635 }
2636 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002637 CB_SUBMISSION submission;
2638 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002639 for (uint32_t i = 0; i < bind_info.waitSemaphoreCount; ++i) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002640 RecordSubmitWaitSemaphore(submission, queue, bind_info.pWaitSemaphores[i], 0, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002641 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002642 bool retire_early = false;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002643 for (uint32_t i = 0; i < bind_info.signalSemaphoreCount; ++i) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002644 retire_early |= RecordSubmitSignalSemaphore(submission, queue, bind_info.pSignalSemaphores[i], 0, next_seq);
2645 }
2646 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
2647 if (retire_early) {
2648 early_retire_seq = std::max(early_retire_seq, queue_state->seq + queue_state->submissions.size() + 1);
locke-lunargd556cc32019-09-17 01:21:23 -06002649 }
2650
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002651 submission.fence = bind_idx == (bindInfoCount - 1) ? fence : VK_NULL_HANDLE;
2652 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06002653 }
2654
2655 if (early_retire_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002656 RetireWorkOnQueue(queue_state, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002657 }
2658}
2659
2660void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
2661 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
2662 VkResult result) {
2663 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002664 auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002665 semaphore_state->signaler.first = VK_NULL_HANDLE;
2666 semaphore_state->signaler.second = 0;
2667 semaphore_state->signaled = false;
2668 semaphore_state->scope = kSyncScopeInternal;
Mike Schuchardt2df08912020-12-15 16:28:09 -08002669 semaphore_state->type = VK_SEMAPHORE_TYPE_BINARY;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002670 semaphore_state->payload = 0;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002671 auto semaphore_type_create_info = LvlFindInChain<VkSemaphoreTypeCreateInfo>(pCreateInfo->pNext);
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002672 if (semaphore_type_create_info) {
2673 semaphore_state->type = semaphore_type_create_info->semaphoreType;
2674 semaphore_state->payload = semaphore_type_create_info->initialValue;
2675 }
locke-lunargd556cc32019-09-17 01:21:23 -06002676 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
2677}
2678
Mike Schuchardt2df08912020-12-15 16:28:09 -08002679void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBits handle_type,
2680 VkSemaphoreImportFlags flags) {
locke-lunargd556cc32019-09-17 01:21:23 -06002681 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
2682 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08002683 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) &&
locke-lunargd556cc32019-09-17 01:21:23 -06002684 sema_node->scope == kSyncScopeInternal) {
2685 sema_node->scope = kSyncScopeExternalTemporary;
2686 } else {
2687 sema_node->scope = kSyncScopeExternalPermanent;
2688 }
2689 }
2690}
2691
Mike Schuchardt2df08912020-12-15 16:28:09 -08002692void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfo *pSignalInfo,
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002693 VkResult result) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002694 auto *semaphore_state = GetSemaphoreState(pSignalInfo->semaphore);
2695 semaphore_state->payload = pSignalInfo->value;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002696}
2697
locke-lunargd556cc32019-09-17 01:21:23 -06002698void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
2699 auto mem_info = GetDevMemState(mem);
2700 if (mem_info) {
2701 mem_info->mapped_range.offset = offset;
2702 mem_info->mapped_range.size = size;
2703 mem_info->p_driver_data = *ppData;
2704 }
2705}
2706
2707void ValidationStateTracker::RetireFence(VkFence fence) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002708 auto fence_state = GetFenceState(fence);
2709 if (fence_state && fence_state->scope == kSyncScopeInternal) {
2710 if (fence_state->signaler.first != VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -06002711 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002712 RetireWorkOnQueue(GetQueueState(fence_state->signaler.first), fence_state->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002713 } else {
2714 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
2715 // the fence as retired.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002716 fence_state->state = FENCE_RETIRED;
locke-lunargd556cc32019-09-17 01:21:23 -06002717 }
2718 }
2719}
2720
2721void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2722 VkBool32 waitAll, uint64_t timeout, VkResult result) {
2723 if (VK_SUCCESS != result) return;
2724
2725 // When we know that all fences are complete we can clean/remove their CBs
2726 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
2727 for (uint32_t i = 0; i < fenceCount; i++) {
2728 RetireFence(pFences[i]);
2729 }
2730 }
2731 // NOTE : Alternate case not handled here is when some fences have completed. In
2732 // this case for app to guarantee which fences completed it will have to call
2733 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
2734}
2735
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002736void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002737 auto semaphore_state = GetSemaphoreState(semaphore);
2738 if (semaphore_state) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002739 for (auto &pair : queueMap) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002740 QUEUE_STATE &queue_state = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002741 uint64_t max_seq = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002742 for (const auto &submission : queue_state.submissions) {
2743 for (const auto &signal_semaphore : submission.signalSemaphores) {
2744 if (signal_semaphore.semaphore == semaphore && signal_semaphore.payload <= until_payload) {
2745 if (signal_semaphore.seq > max_seq) {
2746 max_seq = signal_semaphore.seq;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002747 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002748 }
2749 }
2750 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06002751 if (max_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002752 RetireWorkOnQueue(&queue_state, max_seq);
Tony-LunarG47d5e272020-04-07 15:35:55 -06002753 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002754 }
2755 }
2756}
2757
John Zulauff89de662020-04-13 18:57:34 -06002758void ValidationStateTracker::RecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2759 VkResult result) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002760 if (VK_SUCCESS != result) return;
2761
2762 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
2763 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
2764 }
2765}
2766
John Zulauff89de662020-04-13 18:57:34 -06002767void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2768 VkResult result) {
2769 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2770}
2771
2772void ValidationStateTracker::PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo,
2773 uint64_t timeout, VkResult result) {
2774 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2775}
2776
Adrian Coca Lorentec7d76102020-09-28 13:58:16 +02002777void ValidationStateTracker::RecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2778 VkResult result) {
2779 if (VK_SUCCESS != result) return;
2780
2781 RetireTimelineSemaphore(semaphore, *pValue);
2782}
2783
2784void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2785 VkResult result) {
2786 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2787}
2788void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2789 VkResult result) {
2790 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2791}
2792
locke-lunargd556cc32019-09-17 01:21:23 -06002793void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
2794 if (VK_SUCCESS != result) return;
2795 RetireFence(fence);
2796}
2797
2798void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
2799 // Add queue to tracking set only if it is new
2800 auto queue_is_new = queues.emplace(queue);
2801 if (queue_is_new.second == true) {
2802 QUEUE_STATE *queue_state = &queueMap[queue];
2803 queue_state->queue = queue;
2804 queue_state->queueFamilyIndex = queue_family_index;
2805 queue_state->seq = 0;
2806 }
2807}
2808
2809void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2810 VkQueue *pQueue) {
2811 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2812}
2813
2814void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2815 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2816}
2817
2818void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2819 if (VK_SUCCESS != result) return;
2820 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002821 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002822}
2823
2824void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2825 if (VK_SUCCESS != result) return;
2826 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002827 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002828 }
2829}
2830
2831void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2832 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002833 auto fence_state = GetFenceState(fence);
2834 fence_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002835 fenceMap.erase(fence);
2836}
2837
2838void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2839 const VkAllocationCallbacks *pAllocator) {
2840 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002841 auto semaphore_state = GetSemaphoreState(semaphore);
2842 semaphore_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002843 semaphoreMap.erase(semaphore);
2844}
2845
2846void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2847 if (!event) return;
John Zulauf48057322020-12-02 11:59:31 -07002848 EVENT_STATE *event_state = Get<EVENT_STATE>(event);
locke-lunargd556cc32019-09-17 01:21:23 -06002849 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
2850 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
John Zulauf48057322020-12-02 11:59:31 -07002851 event_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002852 eventMap.erase(event);
2853}
2854
2855void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2856 const VkAllocationCallbacks *pAllocator) {
2857 if (!queryPool) return;
2858 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
2859 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
2860 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002861 qp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002862 queryPoolMap.erase(queryPool);
2863}
2864
John Zulaufd13b38e2021-03-05 08:17:38 -07002865void ValidationStateTracker::InsertImageMemoryRange(IMAGE_STATE *image_state, DEVICE_MEMORY_STATE *mem_info,
2866 VkDeviceSize mem_offset) {
2867 mem_info->bound_images.insert(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002868}
2869
John Zulaufd13b38e2021-03-05 08:17:38 -07002870void ValidationStateTracker::RemoveImageMemoryRange(IMAGE_STATE *image_state, DEVICE_MEMORY_STATE *mem_info) {
2871 mem_info->bound_images.erase(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002872}
2873
locke-lunargd556cc32019-09-17 01:21:23 -06002874void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2875 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2876 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002877 // Track objects tied to memory
2878 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
2879 }
2880}
2881
2882void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2883 VkDeviceSize memoryOffset, VkResult result) {
2884 if (VK_SUCCESS != result) return;
2885 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2886}
2887
2888void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002889 const VkBindBufferMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06002890 for (uint32_t i = 0; i < bindInfoCount; i++) {
2891 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2892 }
2893}
2894
2895void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002896 const VkBindBufferMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06002897 for (uint32_t i = 0; i < bindInfoCount; i++) {
2898 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2899 }
2900}
2901
Spencer Fricke6c127102020-04-16 06:25:20 -07002902void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer) {
locke-lunargd556cc32019-09-17 01:21:23 -06002903 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2904 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002905 buffer_state->memory_requirements_checked = true;
2906 }
2907}
2908
2909void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2910 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002911 RecordGetBufferMemoryRequirementsState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002912}
2913
2914void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002915 const VkBufferMemoryRequirementsInfo2 *pInfo,
2916 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002917 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002918}
2919
2920void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002921 const VkBufferMemoryRequirementsInfo2 *pInfo,
2922 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002923 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002924}
2925
Spencer Fricke6c127102020-04-16 06:25:20 -07002926void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002927 const VkImagePlaneMemoryRequirementsInfo *plane_info =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002928 (pInfo == nullptr) ? nullptr : LvlFindInChain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002929 IMAGE_STATE *image_state = GetImageState(image);
2930 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002931 if (plane_info != nullptr) {
2932 // Multi-plane image
2933 image_state->memory_requirements_checked = false; // Each image plane needs to be checked itself
2934 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
2935 image_state->plane0_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002936 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
2937 image_state->plane1_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002938 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
2939 image_state->plane2_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002940 }
2941 } else {
2942 // Single Plane image
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002943 image_state->memory_requirements_checked = true;
2944 }
locke-lunargd556cc32019-09-17 01:21:23 -06002945 }
2946}
2947
2948void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2949 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002950 RecordGetImageMemoryRequirementsState(image, nullptr);
locke-lunargd556cc32019-09-17 01:21:23 -06002951}
2952
2953void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2954 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002955 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002956}
2957
2958void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2959 const VkImageMemoryRequirementsInfo2 *pInfo,
2960 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002961 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002962}
2963
2964static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2965 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2966 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2967 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2968 image_state->sparse_metadata_required = true;
2969 }
2970}
2971
2972void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2973 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2974 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2975 auto image_state = GetImageState(image);
2976 image_state->get_sparse_reqs_called = true;
2977 if (!pSparseMemoryRequirements) return;
2978 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2979 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2980 }
2981}
2982
2983void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
Mike Schuchardt2df08912020-12-15 16:28:09 -08002984 VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount,
2985 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements) {
locke-lunargd556cc32019-09-17 01:21:23 -06002986 auto image_state = GetImageState(pInfo->image);
2987 image_state->get_sparse_reqs_called = true;
2988 if (!pSparseMemoryRequirements) return;
2989 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2990 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2991 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2992 }
2993}
2994
2995void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08002996 VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount,
2997 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements) {
locke-lunargd556cc32019-09-17 01:21:23 -06002998 auto image_state = GetImageState(pInfo->image);
2999 image_state->get_sparse_reqs_called = true;
3000 if (!pSparseMemoryRequirements) return;
3001 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
3002 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
3003 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
3004 }
3005}
3006
3007void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
3008 const VkAllocationCallbacks *pAllocator) {
3009 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003010 auto shader_module_state = GetShaderModuleState(shaderModule);
3011 shader_module_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003012 shaderModuleMap.erase(shaderModule);
3013}
3014
3015void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
3016 const VkAllocationCallbacks *pAllocator) {
3017 if (!pipeline) return;
3018 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
3019 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
3020 // Any bound cmd buffers are now invalid
3021 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003022 pipeline_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003023 pipelineMap.erase(pipeline);
3024}
3025
3026void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
3027 const VkAllocationCallbacks *pAllocator) {
3028 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003029 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
3030 pipeline_layout_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003031 pipelineLayoutMap.erase(pipelineLayout);
3032}
3033
3034void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
3035 const VkAllocationCallbacks *pAllocator) {
3036 if (!sampler) return;
3037 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
3038 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
3039 // Any bound cmd buffers are now invalid
3040 if (sampler_state) {
3041 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
Yuly Novikov424cdd52020-05-26 16:45:12 -04003042
3043 if (sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
3044 sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
3045 custom_border_color_sampler_count--;
3046 }
3047
3048 sampler_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003049 }
3050 samplerMap.erase(sampler);
3051}
3052
3053void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
3054 const VkAllocationCallbacks *pAllocator) {
3055 if (!descriptorSetLayout) return;
3056 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
3057 if (layout_it != descriptorSetLayoutMap.end()) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05003058 layout_it->second.get()->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003059 descriptorSetLayoutMap.erase(layout_it);
3060 }
3061}
3062
3063void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
3064 const VkAllocationCallbacks *pAllocator) {
3065 if (!descriptorPool) return;
3066 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
3067 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
3068 if (desc_pool_state) {
3069 // Any bound cmd buffers are now invalid
3070 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
3071 // Free sets that were in this pool
John Zulauf79f06582021-02-27 18:38:39 -07003072 for (auto *ds : desc_pool_state->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06003073 FreeDescriptorSet(ds);
3074 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003075 desc_pool_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003076 descriptorPoolMap.erase(descriptorPool);
3077 }
3078}
3079
3080// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
3081void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
3082 const VkCommandBuffer *command_buffers) {
3083 for (uint32_t i = 0; i < command_buffer_count; i++) {
John Zulaufd1f85d42020-04-15 12:23:15 -06003084 // Allow any derived class to clean up command buffer state
3085 if (command_buffer_free_callback) {
3086 (*command_buffer_free_callback)(command_buffers[i]);
3087 }
3088
locke-lunargd556cc32019-09-17 01:21:23 -06003089 auto cb_state = GetCBState(command_buffers[i]);
3090 // Remove references to command buffer's state and delete
3091 if (cb_state) {
3092 // reset prior to delete, removing various references to it.
3093 // TODO: fix this, it's insane.
3094 ResetCommandBufferState(cb_state->commandBuffer);
3095 // Remove the cb_state's references from COMMAND_POOL_STATEs
3096 pool_state->commandBuffers.erase(command_buffers[i]);
3097 // Remove the cb debug labels
3098 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
3099 // Remove CBState from CB map
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003100 cb_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003101 commandBufferMap.erase(cb_state->commandBuffer);
3102 }
3103 }
3104}
3105
3106void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
3107 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003108 auto pool = GetCommandPoolState(commandPool);
3109 FreeCommandBufferStates(pool, commandBufferCount, pCommandBuffers);
locke-lunargd556cc32019-09-17 01:21:23 -06003110}
3111
3112void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
3113 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
3114 VkResult result) {
3115 if (VK_SUCCESS != result) return;
sfricke-samsungc1543372020-08-18 22:37:27 -07003116 VkCommandPool command_pool = *pCommandPool;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003117 auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
sfricke-samsungc1543372020-08-18 22:37:27 -07003118 cmd_pool_state->commandPool = command_pool;
locke-lunargd556cc32019-09-17 01:21:23 -06003119 cmd_pool_state->createFlags = pCreateInfo->flags;
3120 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
sfricke-samsung0c45edc2020-07-01 22:19:53 -07003121 cmd_pool_state->unprotected = ((pCreateInfo->flags & VK_COMMAND_POOL_CREATE_PROTECTED_BIT) == 0);
sfricke-samsungc1543372020-08-18 22:37:27 -07003122 commandPoolMap[command_pool] = std::move(cmd_pool_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003123}
3124
3125void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
3126 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
3127 VkResult result) {
3128 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003129 auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003130 query_pool_state->createInfo = *pCreateInfo;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003131 query_pool_state->pool = *pQueryPool;
3132 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07003133 const auto *perf = LvlFindInChain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
Mark Lobodzinski7e948e42020-09-09 10:23:36 -06003134 query_pool_state->perf_counter_index_count = perf->counterIndexCount;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003135
Mark Lobodzinski7e948e42020-09-09 10:23:36 -06003136 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003137 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
3138 const auto &counter = counters.counters[perf->pCounterIndices[i]];
3139 switch (counter.scope) {
3140 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
3141 query_pool_state->has_perf_scope_command_buffer = true;
3142 break;
3143 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
3144 query_pool_state->has_perf_scope_render_pass = true;
3145 break;
3146 default:
3147 break;
3148 }
3149 }
3150
3151 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
3152 &query_pool_state->n_performance_passes);
3153 }
3154
locke-lunargd556cc32019-09-17 01:21:23 -06003155 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
3156
3157 QueryObject query_obj{*pQueryPool, 0u};
3158 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
3159 query_obj.query = i;
3160 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
3161 }
3162}
3163
3164void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
3165 const VkAllocationCallbacks *pAllocator) {
3166 if (!commandPool) return;
3167 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
3168 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
3169 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
3170 if (cp_state) {
3171 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
3172 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
3173 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003174 cp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003175 commandPoolMap.erase(commandPool);
3176 }
3177}
3178
3179void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
3180 VkCommandPoolResetFlags flags, VkResult result) {
3181 if (VK_SUCCESS != result) return;
3182 // Reset all of the CBs allocated from this pool
3183 auto command_pool_state = GetCommandPoolState(commandPool);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003184 for (auto cmd_buffer : command_pool_state->commandBuffers) {
3185 ResetCommandBufferState(cmd_buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06003186 }
3187}
3188
3189void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
3190 VkResult result) {
3191 for (uint32_t i = 0; i < fenceCount; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003192 auto fence_state = GetFenceState(pFences[i]);
3193 if (fence_state) {
3194 if (fence_state->scope == kSyncScopeInternal) {
3195 fence_state->state = FENCE_UNSIGNALED;
3196 } else if (fence_state->scope == kSyncScopeExternalTemporary) {
3197 fence_state->scope = kSyncScopeInternal;
locke-lunargd556cc32019-09-17 01:21:23 -06003198 }
3199 }
3200 }
3201}
3202
Jeff Bolzadbfa852019-10-04 13:53:30 -05003203// For given cb_nodes, invalidate them and track object causing invalidation.
3204// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
3205// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
3206// can also unlink objects from command buffers.
Jeremy Gebbencbf22862021-03-03 12:01:22 -07003207void ValidationStateTracker::InvalidateCommandBuffers(BASE_NODE::BindingsType &cb_nodes,
Jeff Bolzadbfa852019-10-04 13:53:30 -05003208 const VulkanTypedHandle &obj, bool unlink) {
3209 for (const auto &cb_node_pair : cb_nodes) {
3210 auto &cb_node = cb_node_pair.first;
3211 if (cb_node->state == CB_RECORDING) {
3212 cb_node->state = CB_INVALID_INCOMPLETE;
3213 } else if (cb_node->state == CB_RECORDED) {
3214 cb_node->state = CB_INVALID_COMPLETE;
3215 }
3216 cb_node->broken_bindings.push_back(obj);
3217
3218 // if secondary, then propagate the invalidation to the primaries that will call us.
3219 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
3220 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
3221 }
3222 if (unlink) {
3223 int index = cb_node_pair.second;
3224 assert(cb_node->object_bindings[index] == obj);
3225 cb_node->object_bindings[index] = VulkanTypedHandle();
3226 }
3227 }
3228 if (unlink) {
3229 cb_nodes.clear();
3230 }
3231}
3232
Jeremy Gebbencbf22862021-03-03 12:01:22 -07003233void ValidationStateTracker::InvalidateLinkedCommandBuffers(layer_data::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
Jeff Bolzadbfa852019-10-04 13:53:30 -05003234 const VulkanTypedHandle &obj) {
John Zulauf79f06582021-02-27 18:38:39 -07003235 for (auto *cb_node : cb_nodes) {
locke-lunargd556cc32019-09-17 01:21:23 -06003236 if (cb_node->state == CB_RECORDING) {
3237 cb_node->state = CB_INVALID_INCOMPLETE;
3238 } else if (cb_node->state == CB_RECORDED) {
3239 cb_node->state = CB_INVALID_COMPLETE;
3240 }
3241 cb_node->broken_bindings.push_back(obj);
3242
3243 // if secondary, then propagate the invalidation to the primaries that will call us.
3244 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003245 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
locke-lunargd556cc32019-09-17 01:21:23 -06003246 }
3247 }
3248}
3249
3250void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
3251 const VkAllocationCallbacks *pAllocator) {
3252 if (!framebuffer) return;
3253 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
3254 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
3255 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003256 framebuffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003257 frameBufferMap.erase(framebuffer);
3258}
3259
3260void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
3261 const VkAllocationCallbacks *pAllocator) {
3262 if (!renderPass) return;
3263 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
3264 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
3265 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003266 rp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003267 renderPassMap.erase(renderPass);
3268}
3269
3270void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
3271 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
3272 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003273 auto fence_state = std::make_shared<FENCE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003274 fence_state->fence = *pFence;
3275 fence_state->createInfo = *pCreateInfo;
3276 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
3277 fenceMap[*pFence] = std::move(fence_state);
3278}
3279
3280bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
3281 const VkGraphicsPipelineCreateInfo *pCreateInfos,
3282 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003283 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003284 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
3285 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
3286 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
3287 cgpl_state->pipe_state.reserve(count);
3288 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003289 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05003290 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003291 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003292 }
3293 return false;
3294}
3295
3296void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
3297 const VkGraphicsPipelineCreateInfo *pCreateInfos,
3298 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
3299 VkResult result, void *cgpl_state_data) {
3300 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
3301 // This API may create pipelines regardless of the return value
3302 for (uint32_t i = 0; i < count; i++) {
3303 if (pPipelines[i] != VK_NULL_HANDLE) {
3304 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3305 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
3306 }
3307 }
3308 cgpl_state->pipe_state.clear();
3309}
3310
3311bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
3312 const VkComputePipelineCreateInfo *pCreateInfos,
3313 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003314 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003315 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
3316 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
3317 ccpl_state->pipe_state.reserve(count);
3318 for (uint32_t i = 0; i < count; i++) {
3319 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003320 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06003321 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003322 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003323 }
3324 return false;
3325}
3326
3327void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
3328 const VkComputePipelineCreateInfo *pCreateInfos,
3329 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
3330 VkResult result, void *ccpl_state_data) {
3331 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
3332
3333 // This API may create pipelines regardless of the return value
3334 for (uint32_t i = 0; i < count; i++) {
3335 if (pPipelines[i] != VK_NULL_HANDLE) {
3336 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3337 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
3338 }
3339 }
3340 ccpl_state->pipe_state.clear();
3341}
3342
3343bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
3344 uint32_t count,
3345 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
3346 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003347 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003348 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
3349 crtpl_state->pipe_state.reserve(count);
3350 for (uint32_t i = 0; i < count; i++) {
3351 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003352 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05003353 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003354 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003355 }
3356 return false;
3357}
3358
3359void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
3360 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
3361 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
3362 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
3363 // This API may create pipelines regardless of the return value
3364 for (uint32_t i = 0; i < count; i++) {
3365 if (pPipelines[i] != VK_NULL_HANDLE) {
3366 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3367 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
3368 }
3369 }
3370 crtpl_state->pipe_state.clear();
3371}
3372
sourav parmarcd5fb182020-07-17 12:58:44 -07003373bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
3374 VkPipelineCache pipelineCache, uint32_t count,
Jeff Bolz443c2ca2020-03-19 12:11:51 -05003375 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
3376 const VkAllocationCallbacks *pAllocator,
3377 VkPipeline *pPipelines, void *crtpl_state_data) const {
3378 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
3379 crtpl_state->pipe_state.reserve(count);
3380 for (uint32_t i = 0; i < count; i++) {
3381 // Create and initialize internal tracking data structure
3382 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
3383 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
3384 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
3385 }
3386 return false;
3387}
3388
sourav parmarcd5fb182020-07-17 12:58:44 -07003389void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
3390 VkPipelineCache pipelineCache, uint32_t count,
3391 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
3392 const VkAllocationCallbacks *pAllocator,
3393 VkPipeline *pPipelines, VkResult result,
3394 void *crtpl_state_data) {
Jeff Bolz443c2ca2020-03-19 12:11:51 -05003395 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
3396 // This API may create pipelines regardless of the return value
3397 for (uint32_t i = 0; i < count; i++) {
3398 if (pPipelines[i] != VK_NULL_HANDLE) {
3399 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3400 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
3401 }
3402 }
3403 crtpl_state->pipe_state.clear();
3404}
3405
locke-lunargd556cc32019-09-17 01:21:23 -06003406void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
3407 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
3408 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003409 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003410 if (pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
3411 pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
Tony-LunarG7337b312020-04-15 16:40:25 -06003412 custom_border_color_sampler_count++;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003413 }
locke-lunargd556cc32019-09-17 01:21:23 -06003414}
3415
3416void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
3417 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
3418 const VkAllocationCallbacks *pAllocator,
3419 VkDescriptorSetLayout *pSetLayout, VkResult result) {
3420 if (VK_SUCCESS != result) return;
3421 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
3422}
3423
3424// For repeatable sorting, not very useful for "memory in range" search
3425struct PushConstantRangeCompare {
3426 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
3427 if (lhs->offset == rhs->offset) {
3428 if (lhs->size == rhs->size) {
3429 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
3430 return lhs->stageFlags < rhs->stageFlags;
3431 }
3432 // If the offsets are the same then sorting by the end of range is useful for validation
3433 return lhs->size < rhs->size;
3434 }
3435 return lhs->offset < rhs->offset;
3436 }
3437};
3438
3439static PushConstantRangesDict push_constant_ranges_dict;
3440
3441PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
3442 if (!info->pPushConstantRanges) {
3443 // Hand back the empty entry (creating as needed)...
3444 return push_constant_ranges_dict.look_up(PushConstantRanges());
3445 }
3446
3447 // Sort the input ranges to ensure equivalent ranges map to the same id
3448 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
3449 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
3450 sorted.insert(info->pPushConstantRanges + i);
3451 }
3452
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07003453 PushConstantRanges ranges;
3454 ranges.reserve(sorted.size());
John Zulauf79f06582021-02-27 18:38:39 -07003455 for (const auto *range : sorted) {
locke-lunargd556cc32019-09-17 01:21:23 -06003456 ranges.emplace_back(*range);
3457 }
3458 return push_constant_ranges_dict.look_up(std::move(ranges));
3459}
3460
3461// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
3462static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
3463
3464// Dictionary of canonical form of the "compatible for set" records
3465static PipelineLayoutCompatDict pipeline_layout_compat_dict;
3466
3467static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
3468 const PipelineLayoutSetLayoutsId set_layouts_id) {
3469 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
3470}
3471
3472void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
3473 const VkAllocationCallbacks *pAllocator,
3474 VkPipelineLayout *pPipelineLayout, VkResult result) {
3475 if (VK_SUCCESS != result) return;
3476
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003477 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003478 pipeline_layout_state->layout = *pPipelineLayout;
3479 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
3480 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
3481 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05003482 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003483 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
3484 }
3485
3486 // Get canonical form IDs for the "compatible for set" contents
3487 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
3488 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
3489 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
3490
3491 // Create table of "compatible for set N" cannonical forms for trivial accept validation
3492 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
3493 pipeline_layout_state->compat_for_set.emplace_back(
3494 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
3495 }
3496 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
3497}
3498
3499void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
3500 const VkAllocationCallbacks *pAllocator,
3501 VkDescriptorPool *pDescriptorPool, VkResult result) {
3502 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003503 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003504}
3505
3506void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
3507 VkDescriptorPoolResetFlags flags, VkResult result) {
3508 if (VK_SUCCESS != result) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003509 DESCRIPTOR_POOL_STATE *pool = GetDescriptorPoolState(descriptorPool);
locke-lunargd556cc32019-09-17 01:21:23 -06003510 // TODO: validate flags
3511 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
John Zulauf79f06582021-02-27 18:38:39 -07003512 for (auto *ds : pool->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06003513 FreeDescriptorSet(ds);
3514 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003515 pool->sets.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06003516 // Reset available count for each type and available sets for this pool
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003517 for (auto it = pool->availableDescriptorTypeCount.begin(); it != pool->availableDescriptorTypeCount.end(); ++it) {
3518 pool->availableDescriptorTypeCount[it->first] = pool->maxDescriptorTypeCount[it->first];
locke-lunargd556cc32019-09-17 01:21:23 -06003519 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003520 pool->availableSets = pool->maxSets;
locke-lunargd556cc32019-09-17 01:21:23 -06003521}
3522
3523bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
3524 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003525 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003526 // Always update common data
3527 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
3528 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
3529 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
3530
3531 return false;
3532}
3533
3534// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
3535void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
3536 VkDescriptorSet *pDescriptorSets, VkResult result,
3537 void *ads_state_data) {
3538 if (VK_SUCCESS != result) return;
3539 // All the updates are contained in a single cvdescriptorset function
3540 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
3541 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
3542 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
3543}
3544
3545void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
3546 const VkDescriptorSet *pDescriptorSets) {
3547 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
3548 // Update available descriptor sets in pool
3549 pool_state->availableSets += count;
3550
3551 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
3552 for (uint32_t i = 0; i < count; ++i) {
3553 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
3554 auto descriptor_set = setMap[pDescriptorSets[i]].get();
3555 uint32_t type_index = 0, descriptor_count = 0;
3556 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
3557 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
3558 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
3559 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
3560 }
3561 FreeDescriptorSet(descriptor_set);
3562 pool_state->sets.erase(descriptor_set);
3563 }
3564 }
3565}
3566
3567void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
3568 const VkWriteDescriptorSet *pDescriptorWrites,
3569 uint32_t descriptorCopyCount,
3570 const VkCopyDescriptorSet *pDescriptorCopies) {
3571 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
3572 pDescriptorCopies);
3573}
3574
3575void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
3576 VkCommandBuffer *pCommandBuffer, VkResult result) {
3577 if (VK_SUCCESS != result) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003578 auto pool = GetCommandPoolShared(pCreateInfo->commandPool);
3579 if (pool) {
locke-lunargd556cc32019-09-17 01:21:23 -06003580 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
3581 // Add command buffer to its commandPool map
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003582 pool->commandBuffers.insert(pCommandBuffer[i]);
3583 auto cb_state = std::make_shared<CMD_BUFFER_STATE>();
3584 cb_state->createInfo = *pCreateInfo;
3585 cb_state->command_pool = pool;
3586 cb_state->unprotected = pool->unprotected;
locke-lunargd556cc32019-09-17 01:21:23 -06003587 // Add command buffer to map
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003588 commandBufferMap[pCommandBuffer[i]] = std::move(cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003589 ResetCommandBufferState(pCommandBuffer[i]);
3590 }
3591 }
3592}
3593
3594// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
3595void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003596 AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003597 cb_state);
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -07003598 // If imageless fb, skip fb binding
Mike Schuchardt2df08912020-12-15 16:28:09 -08003599 if (!fb_state || fb_state->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003600 const uint32_t attachment_count = fb_state->createInfo.attachmentCount;
3601 for (uint32_t attachment = 0; attachment < attachment_count; ++attachment) {
locke-lunargfc78e932020-11-19 17:06:24 -07003602 auto view_state = GetActiveAttachmentImageViewState(cb_state, attachment);
locke-lunargd556cc32019-09-17 01:21:23 -06003603 if (view_state) {
3604 AddCommandBufferBindingImageView(cb_state, view_state);
3605 }
3606 }
3607}
3608
locke-lunargfc78e932020-11-19 17:06:24 -07003609void UpdateSubpassAttachments(const safe_VkSubpassDescription2 &subpass, std::vector<SUBPASS_INFO> &subpasses) {
3610 for (uint32_t index = 0; index < subpass.inputAttachmentCount; ++index) {
3611 const uint32_t attachment_index = subpass.pInputAttachments[index].attachment;
3612 if (attachment_index != VK_ATTACHMENT_UNUSED) {
3613 subpasses[attachment_index].used = true;
3614 subpasses[attachment_index].usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
3615 subpasses[attachment_index].layout = subpass.pInputAttachments[index].layout;
3616 }
3617 }
3618
3619 for (uint32_t index = 0; index < subpass.colorAttachmentCount; ++index) {
3620 const uint32_t attachment_index = subpass.pColorAttachments[index].attachment;
3621 if (attachment_index != VK_ATTACHMENT_UNUSED) {
3622 subpasses[attachment_index].used = true;
3623 subpasses[attachment_index].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
3624 subpasses[attachment_index].layout = subpass.pColorAttachments[index].layout;
3625 }
3626 if (subpass.pResolveAttachments) {
3627 const uint32_t attachment_index2 = subpass.pResolveAttachments[index].attachment;
3628 if (attachment_index2 != VK_ATTACHMENT_UNUSED) {
3629 subpasses[attachment_index2].used = true;
3630 subpasses[attachment_index2].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
3631 subpasses[attachment_index2].layout = subpass.pResolveAttachments[index].layout;
3632 }
3633 }
3634 }
3635
3636 if (subpass.pDepthStencilAttachment) {
3637 const uint32_t attachment_index = subpass.pDepthStencilAttachment->attachment;
3638 if (attachment_index != VK_ATTACHMENT_UNUSED) {
3639 subpasses[attachment_index].used = true;
3640 subpasses[attachment_index].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
3641 subpasses[attachment_index].layout = subpass.pDepthStencilAttachment->layout;
3642 }
3643 }
3644}
3645
3646void UpdateAttachmentsView(ValidationStateTracker &tracker, CMD_BUFFER_STATE &cb_state, const FRAMEBUFFER_STATE &framebuffer,
3647 const VkRenderPassBeginInfo *pRenderPassBegin) {
3648 auto &attachments = *(cb_state.active_attachments.get());
3649 const bool imageless = (framebuffer.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) ? true : false;
3650 const VkRenderPassAttachmentBeginInfo *attachment_info_struct = nullptr;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07003651 if (pRenderPassBegin) attachment_info_struct = LvlFindInChain<VkRenderPassAttachmentBeginInfo>(pRenderPassBegin->pNext);
locke-lunargfc78e932020-11-19 17:06:24 -07003652
3653 for (uint32_t i = 0; i < attachments.size(); ++i) {
3654 if (imageless) {
3655 if (attachment_info_struct && i < attachment_info_struct->attachmentCount) {
3656 auto res = cb_state.attachments_view_states.insert(
3657 tracker.GetShared<IMAGE_VIEW_STATE>(attachment_info_struct->pAttachments[i]));
3658 attachments[i] = res.first->get();
3659 }
3660 } else {
3661 auto res = cb_state.attachments_view_states.insert(framebuffer.attachments_view_state[i]);
3662 attachments[i] = res.first->get();
3663 }
3664 }
3665}
3666
locke-lunargd556cc32019-09-17 01:21:23 -06003667void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
3668 const VkCommandBufferBeginInfo *pBeginInfo) {
3669 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3670 if (!cb_state) return;
locke-lunargfc78e932020-11-19 17:06:24 -07003671
locke-lunargd556cc32019-09-17 01:21:23 -06003672 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
3673 ResetCommandBufferState(commandBuffer);
3674 }
3675 // Set updated state here in case implicit reset occurs above
3676 cb_state->state = CB_RECORDING;
3677 cb_state->beginInfo = *pBeginInfo;
Tony-LunarG3c287f62020-12-17 12:39:49 -07003678 if (cb_state->beginInfo.pInheritanceInfo && (cb_state->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY)) {
locke-lunargd556cc32019-09-17 01:21:23 -06003679 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
3680 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
3681 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
3682 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
3683 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
locke-lunargaecf2152020-05-12 17:15:41 -06003684 cb_state->activeRenderPass = GetShared<RENDER_PASS_STATE>(cb_state->beginInfo.pInheritanceInfo->renderPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003685 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
locke-lunargfc78e932020-11-19 17:06:24 -07003686
locke-lunargaecf2152020-05-12 17:15:41 -06003687 if (cb_state->beginInfo.pInheritanceInfo->framebuffer) {
3688 cb_state->activeFramebuffer = GetShared<FRAMEBUFFER_STATE>(cb_state->beginInfo.pInheritanceInfo->framebuffer);
locke-lunargfc78e932020-11-19 17:06:24 -07003689 cb_state->active_subpasses = nullptr;
3690 cb_state->active_attachments = nullptr;
3691
3692 if (cb_state->activeFramebuffer) {
3693 cb_state->framebuffers.insert(cb_state->activeFramebuffer);
3694
3695 // Set cb_state->active_subpasses
3696 cb_state->active_subpasses =
3697 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
3698 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
3699 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
3700
3701 // Set cb_state->active_attachments & cb_state->attachments_view_states
3702 cb_state->active_attachments =
3703 std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
3704 UpdateAttachmentsView(*this, *cb_state, *cb_state->activeFramebuffer, nullptr);
3705
3706 // Connect this framebuffer and its children to this cmdBuffer
3707 AddFramebufferBinding(cb_state, cb_state->activeFramebuffer.get());
3708 }
locke-lunargaecf2152020-05-12 17:15:41 -06003709 }
David Zhao Akeley44139b12021-04-26 16:16:13 -07003710
3711 // Check for VkCommandBufferInheritanceViewportScissorInfoNV (VK_NV_inherited_viewport_scissor)
3712 auto p_inherited_viewport_scissor_info =
3713 LvlFindInChain<VkCommandBufferInheritanceViewportScissorInfoNV>(cb_state->beginInfo.pInheritanceInfo->pNext);
3714 if (p_inherited_viewport_scissor_info != nullptr && p_inherited_viewport_scissor_info->viewportScissor2D) {
3715 auto pViewportDepths = p_inherited_viewport_scissor_info->pViewportDepths;
3716 cb_state->inheritedViewportDepths.assign(
3717 pViewportDepths, pViewportDepths + p_inherited_viewport_scissor_info->viewportDepthCount);
3718 }
locke-lunargd556cc32019-09-17 01:21:23 -06003719 }
3720 }
3721
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07003722 auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06003723 if (chained_device_group_struct) {
3724 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
3725 } else {
3726 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
3727 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003728
3729 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06003730}
3731
3732void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
3733 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3734 if (!cb_state) return;
3735 // Cached validation is specific to a specific recording of a specific command buffer.
John Zulauf79f06582021-02-27 18:38:39 -07003736 for (auto *descriptor_set : cb_state->validated_descriptor_sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06003737 descriptor_set->ClearCachedValidation(cb_state);
3738 }
3739 cb_state->validated_descriptor_sets.clear();
3740 if (VK_SUCCESS == result) {
3741 cb_state->state = CB_RECORDED;
3742 }
3743}
3744
3745void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
3746 VkResult result) {
3747 if (VK_SUCCESS == result) {
3748 ResetCommandBufferState(commandBuffer);
3749 }
3750}
3751
3752CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
3753 // initially assume everything is static state
3754 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
3755
3756 if (ds) {
3757 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
locke-lunarg4189aa22020-10-21 00:23:48 -06003758 flags &= ~ConvertToCBStatusFlagBits(ds->pDynamicStates[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003759 }
3760 }
locke-lunargd556cc32019-09-17 01:21:23 -06003761 return flags;
3762}
3763
3764// Validation cache:
3765// CV is the bottommost implementor of this extension. Don't pass calls down.
3766// utility function to set collective state for pipeline
3767void SetPipelineState(PIPELINE_STATE *pPipe) {
3768 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
3769 if (pPipe->graphicsPipelineCI.pColorBlendState) {
3770 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
3771 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
3772 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3773 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3774 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3775 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3776 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3777 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3778 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3779 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
3780 pPipe->blendConstantsEnabled = true;
3781 }
3782 }
3783 }
3784 }
sfricke-samsung8f658d42020-05-03 20:12:24 -07003785 // Check if sample location is enabled
3786 if (pPipe->graphicsPipelineCI.pMultisampleState) {
3787 const VkPipelineSampleLocationsStateCreateInfoEXT *sample_location_state =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07003788 LvlFindInChain<VkPipelineSampleLocationsStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pMultisampleState->pNext);
sfricke-samsung8f658d42020-05-03 20:12:24 -07003789 if (sample_location_state != nullptr) {
3790 pPipe->sample_location_enabled = sample_location_state->sampleLocationsEnable;
3791 }
3792 }
locke-lunargd556cc32019-09-17 01:21:23 -06003793}
3794
locke-lunargb8be8222020-10-20 00:34:37 -06003795void UpdateSamplerDescriptorsUsedByImage(LAST_BOUND_STATE &last_bound_state) {
3796 if (!last_bound_state.pipeline_state) return;
3797 if (last_bound_state.per_set.empty()) return;
3798
3799 for (auto &slot : last_bound_state.pipeline_state->active_slots) {
3800 for (auto &req : slot.second) {
3801 for (auto &samplers : req.second.samplers_used_by_image) {
3802 for (auto &sampler : samplers) {
3803 if (sampler.first.sampler_slot.first < last_bound_state.per_set.size() &&
3804 last_bound_state.per_set[sampler.first.sampler_slot.first].bound_descriptor_set) {
3805 sampler.second = last_bound_state.per_set[sampler.first.sampler_slot.first]
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003806 .bound_descriptor_set->GetDescriptorFromBinding(sampler.first.sampler_slot.second,
3807 sampler.first.sampler_index);
locke-lunargb8be8222020-10-20 00:34:37 -06003808 }
3809 }
3810 }
3811 }
3812 }
3813}
3814
locke-lunargd556cc32019-09-17 01:21:23 -06003815void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
3816 VkPipeline pipeline) {
3817 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3818 assert(cb_state);
3819
3820 auto pipe_state = GetPipelineState(pipeline);
3821 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003822 bool rasterization_enabled = VK_FALSE == pipe_state->graphicsPipelineCI.ptr()->pRasterizationState->rasterizerDiscardEnable;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003823 const auto* viewport_state = pipe_state->graphicsPipelineCI.ptr()->pViewportState;
3824 const auto* dynamic_state = pipe_state->graphicsPipelineCI.ptr()->pDynamicState;
locke-lunargd556cc32019-09-17 01:21:23 -06003825 cb_state->status &= ~cb_state->static_status;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003826 cb_state->static_status = MakeStaticStateMask(dynamic_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003827 cb_state->status |= cb_state->static_status;
locke-lunarg4189aa22020-10-21 00:23:48 -06003828 cb_state->dynamic_status = CBSTATUS_ALL_STATE_SET & (~cb_state->static_status);
David Zhao Akeley44139b12021-04-26 16:16:13 -07003829
3830 // Used to calculate CMD_BUFFER_STATE::usedViewportScissorCount upon draw command with this graphics pipeline.
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003831 // If rasterization disabled (no viewport/scissors used), or the actual number of viewports/scissors is dynamic (unknown at
3832 // this time), then these are set to 0 to disable this checking.
David Zhao Akeley44139b12021-04-26 16:16:13 -07003833 auto has_dynamic_viewport_count = cb_state->dynamic_status & CBSTATUS_VIEWPORT_WITH_COUNT_SET;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003834 auto has_dynamic_scissor_count = cb_state->dynamic_status & CBSTATUS_SCISSOR_WITH_COUNT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003835 cb_state->pipelineStaticViewportCount =
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003836 has_dynamic_viewport_count || !rasterization_enabled ? 0 : viewport_state->viewportCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003837 cb_state->pipelineStaticScissorCount =
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003838 has_dynamic_scissor_count || !rasterization_enabled ? 0 : viewport_state->scissorCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003839
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003840 // Trash dynamic viewport/scissor state if pipeline defines static state and enabled rasterization.
David Zhao Akeley44139b12021-04-26 16:16:13 -07003841 // akeley98 NOTE: There's a bit of an ambiguity in the spec, whether binding such a pipeline overwrites
3842 // the entire viewport (scissor) array, or only the subsection defined by the viewport (scissor) count.
3843 // I am taking the latter interpretation based on the implementation details of NVIDIA's Vulkan driver.
David Zhao Akeley44139b12021-04-26 16:16:13 -07003844 if (!has_dynamic_viewport_count) {
3845 cb_state->trashedViewportCount = true;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003846 if (rasterization_enabled && (cb_state->static_status & CBSTATUS_VIEWPORT_SET)) {
David Zhao Akeley44139b12021-04-26 16:16:13 -07003847 cb_state->trashedViewportMask |= (uint32_t(1) << viewport_state->viewportCount) - 1u;
3848 // should become = ~uint32_t(0) if the other interpretation is correct.
3849 }
3850 }
3851 if (!has_dynamic_scissor_count) {
3852 cb_state->trashedScissorCount = true;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003853 if (rasterization_enabled && (cb_state->static_status & CBSTATUS_SCISSOR_SET)) {
David Zhao Akeley44139b12021-04-26 16:16:13 -07003854 cb_state->trashedScissorMask |= (uint32_t(1) << viewport_state->scissorCount) - 1u;
3855 // should become = ~uint32_t(0) if the other interpretation is correct.
3856 }
3857 }
locke-lunargd556cc32019-09-17 01:21:23 -06003858 }
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003859 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
3860 cb_state->lastBound[lv_bind_point].pipeline_state = pipe_state;
locke-lunargd556cc32019-09-17 01:21:23 -06003861 SetPipelineState(pipe_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003862 AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
locke-lunargb8be8222020-10-20 00:34:37 -06003863
3864 for (auto &slot : pipe_state->active_slots) {
3865 for (auto &req : slot.second) {
3866 for (auto &sampler : req.second.samplers_used_by_image) {
3867 for (auto &des : sampler) {
3868 des.second = nullptr;
3869 }
3870 }
3871 }
3872 }
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003873 UpdateSamplerDescriptorsUsedByImage(cb_state->lastBound[lv_bind_point]);
locke-lunargd556cc32019-09-17 01:21:23 -06003874}
3875
3876void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3877 uint32_t viewportCount, const VkViewport *pViewports) {
3878 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07003879 uint32_t bits = ((1u << viewportCount) - 1u) << firstViewport;
3880 cb_state->viewportMask |= bits;
3881 cb_state->trashedViewportMask &= ~bits;
locke-lunargd556cc32019-09-17 01:21:23 -06003882 cb_state->status |= CBSTATUS_VIEWPORT_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003883 cb_state->static_status &= ~CBSTATUS_VIEWPORT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003884
3885 cb_state->dynamicViewports.resize(std::max(size_t(firstViewport + viewportCount), cb_state->dynamicViewports.size()));
3886 for (size_t i = 0; i < viewportCount; ++i) {
3887 cb_state->dynamicViewports[firstViewport + i] = pViewports[i];
3888 }
locke-lunargd556cc32019-09-17 01:21:23 -06003889}
3890
3891void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
3892 uint32_t exclusiveScissorCount,
3893 const VkRect2D *pExclusiveScissors) {
3894 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3895 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
3896 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
3897 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003898 cb_state->static_status &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003899}
3900
3901void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3902 VkImageLayout imageLayout) {
3903 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3904
3905 if (imageView != VK_NULL_HANDLE) {
3906 auto view_state = GetImageViewState(imageView);
3907 AddCommandBufferBindingImageView(cb_state, view_state);
3908 }
3909}
3910
3911void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3912 uint32_t viewportCount,
3913 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3914 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3915 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3916 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3917 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003918 cb_state->static_status &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003919}
3920
3921void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3922 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3923 const VkAllocationCallbacks *pAllocator,
3924 VkAccelerationStructureNV *pAccelerationStructure,
3925 VkResult result) {
3926 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003927 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003928
3929 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003930 auto as_memory_requirements_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003931 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
3932 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3933 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3934
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003935 auto scratch_memory_req_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003936 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
3937 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3938 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3939 &as_state->build_scratch_memory_requirements);
3940
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003941 auto update_memory_req_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003942 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
3943 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3944 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3945 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003946 as_state->allocator = pAllocator;
locke-lunargd556cc32019-09-17 01:21:23 -06003947 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3948}
3949
Jeff Bolz95176d02020-04-01 00:36:16 -05003950void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3951 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3952 const VkAllocationCallbacks *pAllocator,
3953 VkAccelerationStructureKHR *pAccelerationStructure,
3954 VkResult result) {
3955 if (VK_SUCCESS != result) return;
sourav parmarcd5fb182020-07-17 12:58:44 -07003956 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE_KHR>(*pAccelerationStructure, pCreateInfo);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003957 as_state->allocator = pAllocator;
sourav parmarcd5fb182020-07-17 12:58:44 -07003958 accelerationStructureMap_khr[*pAccelerationStructure] = std::move(as_state);
Jeff Bolz95176d02020-04-01 00:36:16 -05003959}
3960
sourav parmarcd5fb182020-07-17 12:58:44 -07003961void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructuresKHR(
3962 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
3963 const VkAccelerationStructureBuildRangeInfoKHR *const *ppBuildRangeInfos) {
3964 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3965 if (cb_state == nullptr) {
3966 return;
3967 }
3968 for (uint32_t i = 0; i < infoCount; ++i) {
3969 ACCELERATION_STRUCTURE_STATE_KHR *dst_as_state = GetAccelerationStructureStateKHR(pInfos[i].dstAccelerationStructure);
3970 ACCELERATION_STRUCTURE_STATE_KHR *src_as_state = GetAccelerationStructureStateKHR(pInfos[i].srcAccelerationStructure);
3971 if (dst_as_state != nullptr) {
3972 dst_as_state->built = true;
3973 dst_as_state->build_info_khr.initialize(&pInfos[i]);
3974 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3975 }
3976 if (src_as_state != nullptr) {
3977 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3978 }
3979 }
3980 cb_state->hasBuildAccelerationStructureCmd = true;
3981}
3982
3983void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructuresIndirectKHR(
3984 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
3985 const VkDeviceAddress *pIndirectDeviceAddresses, const uint32_t *pIndirectStrides,
3986 const uint32_t *const *ppMaxPrimitiveCounts) {
3987 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3988 if (cb_state == nullptr) {
3989 return;
3990 }
3991 for (uint32_t i = 0; i < infoCount; ++i) {
3992 ACCELERATION_STRUCTURE_STATE_KHR *dst_as_state = GetAccelerationStructureStateKHR(pInfos[i].dstAccelerationStructure);
3993 ACCELERATION_STRUCTURE_STATE_KHR *src_as_state = GetAccelerationStructureStateKHR(pInfos[i].srcAccelerationStructure);
3994 if (dst_as_state != nullptr) {
3995 dst_as_state->built = true;
3996 dst_as_state->build_info_khr.initialize(&pInfos[i]);
3997 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3998 }
3999 if (src_as_state != nullptr) {
4000 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
4001 }
4002 }
4003 cb_state->hasBuildAccelerationStructureCmd = true;
4004}
locke-lunargd556cc32019-09-17 01:21:23 -06004005void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004006 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2 *pMemoryRequirements) {
sourav parmarcd5fb182020-07-17 12:58:44 -07004007 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureStateNV(pInfo->accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06004008 if (as_state != nullptr) {
4009 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
4010 as_state->memory_requirements = *pMemoryRequirements;
4011 as_state->memory_requirements_checked = true;
4012 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
4013 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
4014 as_state->build_scratch_memory_requirements_checked = true;
4015 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
4016 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
4017 as_state->update_scratch_memory_requirements_checked = true;
4018 }
4019 }
4020}
4021
sourav parmarcd5fb182020-07-17 12:58:44 -07004022void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
4023 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004024 if (VK_SUCCESS != result) return;
4025 for (uint32_t i = 0; i < bindInfoCount; i++) {
sourav parmarcd5fb182020-07-17 12:58:44 -07004026 const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06004027
sourav parmarcd5fb182020-07-17 12:58:44 -07004028 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureStateNV(info.accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06004029 if (as_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06004030 // Track objects tied to memory
4031 SetMemBinding(info.memory, as_state, info.memoryOffset,
sourav parmarcd5fb182020-07-17 12:58:44 -07004032 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureNV));
locke-lunargd556cc32019-09-17 01:21:23 -06004033
4034 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05004035 // XXX TODO: Query device address for KHR extension
sourav parmarcd5fb182020-07-17 12:58:44 -07004036 if (enabled[gpu_validation]) {
locke-lunargd556cc32019-09-17 01:21:23 -06004037 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
4038 }
4039 }
4040 }
4041}
4042
4043void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
4044 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
4045 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
4046 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4047 if (cb_state == nullptr) {
4048 return;
4049 }
4050
sourav parmarcd5fb182020-07-17 12:58:44 -07004051 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureStateNV(dst);
4052 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureStateNV(src);
locke-lunargd556cc32019-09-17 01:21:23 -06004053 if (dst_as_state != nullptr) {
4054 dst_as_state->built = true;
4055 dst_as_state->build_info.initialize(pInfo);
4056 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
4057 }
4058 if (src_as_state != nullptr) {
4059 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
4060 }
4061 cb_state->hasBuildAccelerationStructureCmd = true;
4062}
4063
4064void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
4065 VkAccelerationStructureNV dst,
4066 VkAccelerationStructureNV src,
4067 VkCopyAccelerationStructureModeNV mode) {
4068 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4069 if (cb_state) {
sourav parmarcd5fb182020-07-17 12:58:44 -07004070 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureStateNV(src);
4071 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureStateNV(dst);
locke-lunargd556cc32019-09-17 01:21:23 -06004072 if (dst_as_state != nullptr && src_as_state != nullptr) {
4073 dst_as_state->built = true;
4074 dst_as_state->build_info = src_as_state->build_info;
4075 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
4076 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
4077 }
4078 }
4079}
4080
Jeff Bolz95176d02020-04-01 00:36:16 -05004081void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
4082 VkAccelerationStructureKHR accelerationStructure,
4083 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06004084 if (!accelerationStructure) return;
sourav parmarcd5fb182020-07-17 12:58:44 -07004085 auto *as_state = GetAccelerationStructureStateKHR(accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06004086 if (as_state) {
Jeff Bolz95176d02020-04-01 00:36:16 -05004087 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06004088 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
locke-lunargd556cc32019-09-17 01:21:23 -06004089 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004090 as_state->destroyed = true;
sourav parmarcd5fb182020-07-17 12:58:44 -07004091 accelerationStructureMap_khr.erase(accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06004092 }
4093}
4094
Jeff Bolz95176d02020-04-01 00:36:16 -05004095void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
4096 VkAccelerationStructureNV accelerationStructure,
4097 const VkAllocationCallbacks *pAllocator) {
sourav parmarcd5fb182020-07-17 12:58:44 -07004098 if (!accelerationStructure) return;
4099 auto *as_state = GetAccelerationStructureStateNV(accelerationStructure);
4100 if (as_state) {
4101 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
4102 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
sourav parmarcd5fb182020-07-17 12:58:44 -07004103 ClearMemoryObjectBindings(obj_struct);
4104 as_state->destroyed = true;
4105 accelerationStructureMap.erase(accelerationStructure);
4106 }
Jeff Bolz95176d02020-04-01 00:36:16 -05004107}
4108
Chris Mayer9ded5eb2019-09-19 16:33:26 +02004109void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
4110 uint32_t viewportCount,
4111 const VkViewportWScalingNV *pViewportWScalings) {
4112 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4113 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004114 cb_state->static_status &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02004115}
4116
locke-lunargd556cc32019-09-17 01:21:23 -06004117void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
4118 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4119 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004120 cb_state->static_status &= ~CBSTATUS_LINE_WIDTH_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004121}
4122
4123void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
4124 uint16_t lineStipplePattern) {
4125 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4126 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004127 cb_state->static_status &= ~CBSTATUS_LINE_STIPPLE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004128}
4129
4130void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
4131 float depthBiasClamp, float depthBiasSlopeFactor) {
4132 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4133 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004134 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004135}
4136
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004137void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
4138 const VkRect2D *pScissors) {
4139 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07004140 uint32_t bits = ((1u << scissorCount) - 1u) << firstScissor;
4141 cb_state->scissorMask |= bits;
4142 cb_state->trashedScissorMask &= ~bits;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004143 cb_state->status |= CBSTATUS_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004144 cb_state->static_status &= ~CBSTATUS_SCISSOR_SET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004145}
4146
locke-lunargd556cc32019-09-17 01:21:23 -06004147void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
4148 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4149 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004150 cb_state->static_status &= ~CBSTATUS_BLEND_CONSTANTS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004151}
4152
4153void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
4154 float maxDepthBounds) {
4155 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4156 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004157 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004158}
4159
4160void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
4161 uint32_t compareMask) {
4162 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4163 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004164 cb_state->static_status &= ~CBSTATUS_STENCIL_READ_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004165}
4166
4167void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
4168 uint32_t writeMask) {
4169 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4170 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004171 cb_state->static_status &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004172}
4173
4174void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
4175 uint32_t reference) {
4176 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4177 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06004178 cb_state->static_status &= ~CBSTATUS_STENCIL_REFERENCE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06004179}
4180
4181// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
4182// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
4183// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
4184void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
4185 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
4186 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
4187 cvdescriptorset::DescriptorSet *push_descriptor_set,
4188 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
4189 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
4190 // Defensive
4191 assert(pipeline_layout);
4192 if (!pipeline_layout) return;
4193
4194 uint32_t required_size = first_set + set_count;
4195 const uint32_t last_binding_index = required_size - 1;
4196 assert(last_binding_index < pipeline_layout->compat_for_set.size());
4197
4198 // Some useful shorthand
locke-lunargb8d7a7a2020-10-25 16:01:52 -06004199 const auto lv_bind_point = ConvertToLvlBindPoint(pipeline_bind_point);
4200 auto &last_bound = cb_state->lastBound[lv_bind_point];
locke-lunargd556cc32019-09-17 01:21:23 -06004201 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
4202 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
4203
4204 // We need this three times in this function, but nowhere else
4205 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
4206 if (ds && ds->IsPushDescriptor()) {
4207 assert(ds == last_bound.push_descriptor_set.get());
4208 last_bound.push_descriptor_set = nullptr;
4209 return true;
4210 }
4211 return false;
4212 };
4213
4214 // Clean up the "disturbed" before and after the range to be set
4215 if (required_size < current_size) {
4216 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
4217 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
4218 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
4219 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
4220 }
4221 } else {
4222 // We're not disturbing past last, so leave the upper binding data alone.
4223 required_size = current_size;
4224 }
4225 }
4226
4227 // We resize if we need more set entries or if those past "last" are disturbed
4228 if (required_size != current_size) {
4229 last_bound.per_set.resize(required_size);
4230 }
4231
4232 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
4233 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
4234 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
4235 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
4236 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
4237 last_bound.per_set[set_idx].dynamicOffsets.clear();
4238 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
4239 }
4240 }
4241
4242 // Now update the bound sets with the input sets
4243 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
4244 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
4245 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
4246 cvdescriptorset::DescriptorSet *descriptor_set =
4247 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
4248
4249 // Record binding (or push)
4250 if (descriptor_set != last_bound.push_descriptor_set.get()) {
4251 // Only cleanup the push descriptors if they aren't the currently used set.
4252 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
4253 }
4254 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
4255 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
4256
4257 if (descriptor_set) {
4258 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
4259 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
4260 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
4261 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
4262 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
4263 input_dynamic_offsets = end_offset;
4264 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
4265 } else {
4266 last_bound.per_set[set_idx].dynamicOffsets.clear();
4267 }
4268 if (!descriptor_set->IsPushDescriptor()) {
4269 // Can't cache validation of push_descriptors
4270 cb_state->validated_descriptor_sets.insert(descriptor_set);
4271 }
4272 }
4273 }
4274}
4275
4276// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
4277void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
4278 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
4279 uint32_t firstSet, uint32_t setCount,
4280 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
4281 const uint32_t *pDynamicOffsets) {
4282 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4283 auto pipeline_layout = GetPipelineLayout(layout);
4284
4285 // Resize binding arrays
4286 uint32_t last_set_index = firstSet + setCount - 1;
locke-lunargb8d7a7a2020-10-25 16:01:52 -06004287 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
4288 if (last_set_index >= cb_state->lastBound[lv_bind_point].per_set.size()) {
4289 cb_state->lastBound[lv_bind_point].per_set.resize(last_set_index + 1);
locke-lunargd556cc32019-09-17 01:21:23 -06004290 }
4291
4292 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
4293 dynamicOffsetCount, pDynamicOffsets);
locke-lunargb8d7a7a2020-10-25 16:01:52 -06004294 cb_state->lastBound[lv_bind_point].pipeline_layout = layout;
locke-lunargb8d7a7a2020-10-25 16:01:52 -06004295 UpdateSamplerDescriptorsUsedByImage(cb_state->lastBound[lv_bind_point]);
locke-lunargd556cc32019-09-17 01:21:23 -06004296}
4297
4298void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
4299 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
4300 const VkWriteDescriptorSet *pDescriptorWrites) {
4301 const auto &pipeline_layout = GetPipelineLayout(layout);
4302 // Short circuit invalid updates
4303 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004304 !pipeline_layout->set_layouts[set]->IsPushDescriptor()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004305 return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004306 }
locke-lunargd556cc32019-09-17 01:21:23 -06004307
4308 // We need a descriptor set to update the bindings with, compatible with the passed layout
Jeremy Gebben50fb1832021-03-19 09:10:13 -06004309 const auto& dsl = pipeline_layout->set_layouts[set];
locke-lunargb8d7a7a2020-10-25 16:01:52 -06004310 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
4311 auto &last_bound = cb_state->lastBound[lv_bind_point];
locke-lunargd556cc32019-09-17 01:21:23 -06004312 auto &push_descriptor_set = last_bound.push_descriptor_set;
4313 // If we are disturbing the current push_desriptor_set clear it
4314 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
John Zulaufd2c3dae2019-12-12 11:02:17 -07004315 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06004316 }
4317
4318 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
4319 nullptr);
4320 last_bound.pipeline_layout = layout;
4321
4322 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05004323 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06004324}
4325
4326void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
4327 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
4328 uint32_t set, uint32_t descriptorWriteCount,
4329 const VkWriteDescriptorSet *pDescriptorWrites) {
4330 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4331 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
4332}
4333
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06004334void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
4335 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
4336 const void *pValues) {
4337 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4338 if (cb_state != nullptr) {
4339 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
4340
4341 auto &push_constant_data = cb_state->push_constant_data;
4342 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
4343 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
locke-lunargde3f0fa2020-09-10 11:55:31 -06004344 cb_state->push_constant_pipeline_layout_set = layout;
4345
4346 auto flags = stageFlags;
4347 uint32_t bit_shift = 0;
4348 while (flags) {
4349 if (flags & 1) {
4350 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
4351 const auto it = cb_state->push_constant_data_update.find(flag);
4352
4353 if (it != cb_state->push_constant_data_update.end()) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06004354 std::memset(it->second.data() + offset, PC_Byte_Updated, static_cast<std::size_t>(size));
locke-lunargde3f0fa2020-09-10 11:55:31 -06004355 }
4356 }
4357 flags = flags >> 1;
4358 ++bit_shift;
4359 }
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06004360 }
4361}
4362
locke-lunargd556cc32019-09-17 01:21:23 -06004363void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4364 VkIndexType indexType) {
locke-lunargd556cc32019-09-17 01:21:23 -06004365 auto cb_state = GetCBState(commandBuffer);
4366
4367 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
Piers Daniell39842ee2020-07-10 16:42:33 -06004368 cb_state->static_status &= ~CBSTATUS_INDEX_BUFFER_BOUND;
locke-lunarg1ae57d62020-11-18 10:49:19 -07004369 cb_state->index_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(buffer);
4370 cb_state->index_buffer_binding.size = cb_state->index_buffer_binding.buffer_state->createInfo.size;
locke-lunargd556cc32019-09-17 01:21:23 -06004371 cb_state->index_buffer_binding.offset = offset;
4372 cb_state->index_buffer_binding.index_type = indexType;
4373 // Add binding for this index buffer to this commandbuffer
locke-lunarg1ae57d62020-11-18 10:49:19 -07004374 AddCommandBufferBindingBuffer(cb_state, cb_state->index_buffer_binding.buffer_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06004375}
4376
4377void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
4378 uint32_t bindingCount, const VkBuffer *pBuffers,
4379 const VkDeviceSize *pOffsets) {
4380 auto cb_state = GetCBState(commandBuffer);
4381
4382 uint32_t end = firstBinding + bindingCount;
4383 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
4384 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
4385 }
4386
4387 for (uint32_t i = 0; i < bindingCount; ++i) {
4388 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07004389 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06004390 vertex_buffer_binding.offset = pOffsets[i];
Piers Daniell39842ee2020-07-10 16:42:33 -06004391 vertex_buffer_binding.size = VK_WHOLE_SIZE;
4392 vertex_buffer_binding.stride = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06004393 // Add binding for this vertex buffer to this commandbuffer
Jeff Bolz165818a2020-05-08 11:19:03 -05004394 if (pBuffers[i]) {
locke-lunarg1ae57d62020-11-18 10:49:19 -07004395 AddCommandBufferBindingBuffer(cb_state, vertex_buffer_binding.buffer_state.get());
Jeff Bolz165818a2020-05-08 11:19:03 -05004396 }
locke-lunargd556cc32019-09-17 01:21:23 -06004397 }
4398}
4399
4400void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
4401 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
4402 auto cb_state = GetCBState(commandBuffer);
4403 auto dst_buffer_state = GetBufferState(dstBuffer);
4404
4405 // Update bindings between buffer and cmd buffer
4406 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
4407}
4408
Jeremy Gebben74aa7622020-12-15 11:18:00 -07004409bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags2KHR stageMask,
Jeff Bolz310775c2019-10-09 00:46:33 -05004410 EventToStageMap *localEventToStageMap) {
4411 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06004412 return false;
4413}
4414
Jeremy Gebben74aa7622020-12-15 11:18:00 -07004415void ValidationStateTracker::RecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask) {
locke-lunargd556cc32019-09-17 01:21:23 -06004416 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4417 auto event_state = GetEventState(event);
4418 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05004419 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004420 }
4421 cb_state->events.push_back(event);
4422 if (!cb_state->waitedEvents.count(event)) {
4423 cb_state->writeEventsBeforeWait.push_back(event);
4424 }
Jeff Bolz310775c2019-10-09 00:46:33 -05004425 cb_state->eventUpdates.emplace_back(
4426 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
4427 return SetEventStageMask(event, stageMask, localEventToStageMap);
4428 });
locke-lunargd556cc32019-09-17 01:21:23 -06004429}
4430
Jeremy Gebben74aa7622020-12-15 11:18:00 -07004431void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
4432 VkPipelineStageFlags stageMask) {
4433 RecordCmdSetEvent(commandBuffer, event, stageMask);
4434}
4435
4436void ValidationStateTracker::PreCallRecordCmdSetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event,
4437 const VkDependencyInfoKHR *pDependencyInfo) {
4438 auto stage_masks = sync_utils::GetGlobalStageMasks(*pDependencyInfo);
4439
4440 RecordCmdSetEvent(commandBuffer, event, stage_masks.src);
4441}
4442
4443void ValidationStateTracker::RecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
4444 VkPipelineStageFlags2KHR stageMask) {
locke-lunargd556cc32019-09-17 01:21:23 -06004445 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4446 auto event_state = GetEventState(event);
4447 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05004448 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004449 }
4450 cb_state->events.push_back(event);
4451 if (!cb_state->waitedEvents.count(event)) {
4452 cb_state->writeEventsBeforeWait.push_back(event);
4453 }
4454
4455 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05004456 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07004457 return SetEventStageMask(event, VkPipelineStageFlags2KHR(0), localEventToStageMap);
Jeff Bolz310775c2019-10-09 00:46:33 -05004458 });
locke-lunargd556cc32019-09-17 01:21:23 -06004459}
4460
Jeremy Gebben74aa7622020-12-15 11:18:00 -07004461void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
4462 VkPipelineStageFlags stageMask) {
4463 RecordCmdResetEvent(commandBuffer, event, stageMask);
4464}
4465
4466void ValidationStateTracker::PreCallRecordCmdResetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event,
4467 VkPipelineStageFlags2KHR stageMask) {
4468 RecordCmdResetEvent(commandBuffer, event, stageMask);
4469}
4470
4471void ValidationStateTracker::RecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents) {
locke-lunargd556cc32019-09-17 01:21:23 -06004472 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4473 for (uint32_t i = 0; i < eventCount; ++i) {
4474 auto event_state = GetEventState(pEvents[i]);
4475 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05004476 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
4477 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004478 }
4479 cb_state->waitedEvents.insert(pEvents[i]);
4480 cb_state->events.push_back(pEvents[i]);
4481 }
4482}
4483
Jeremy Gebben74aa7622020-12-15 11:18:00 -07004484void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
4485 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
4486 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
4487 uint32_t bufferMemoryBarrierCount,
4488 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
4489 uint32_t imageMemoryBarrierCount,
4490 const VkImageMemoryBarrier *pImageMemoryBarriers) {
4491 RecordCmdWaitEvents(commandBuffer, eventCount, pEvents);
4492}
4493
4494void ValidationStateTracker::PreCallRecordCmdWaitEvents2KHR(VkCommandBuffer commandBuffer, uint32_t eventCount,
4495 const VkEvent *pEvents, const VkDependencyInfoKHR *pDependencyInfos) {
4496 RecordCmdWaitEvents(commandBuffer, eventCount, pEvents);
4497}
4498
Jeff Bolz310775c2019-10-09 00:46:33 -05004499bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
4500 (*localQueryToStateMap)[object] = value;
4501 return false;
4502}
4503
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004504bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
4505 QueryState value, QueryMap *localQueryToStateMap) {
Jeff Bolz310775c2019-10-09 00:46:33 -05004506 for (uint32_t i = 0; i < queryCount; i++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004507 QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
Jeff Bolz310775c2019-10-09 00:46:33 -05004508 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06004509 }
4510 return false;
4511}
4512
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004513QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex,
4514 uint32_t perfPass) const {
4515 QueryObject query = QueryObject(QueryObject(queryPool, queryIndex), perfPass);
locke-lunargd556cc32019-09-17 01:21:23 -06004516
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004517 auto iter = localQueryToStateMap->find(query);
4518 if (iter != localQueryToStateMap->end()) return iter->second;
Jeff Bolz310775c2019-10-09 00:46:33 -05004519
Jeff Bolz310775c2019-10-09 00:46:33 -05004520 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06004521}
4522
4523void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004524 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004525 cb_state->activeQueries.insert(query_obj);
4526 cb_state->startedQueries.insert(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004527 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
4528 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4529 QueryMap *localQueryToStateMap) {
4530 SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
4531 return false;
4532 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004533 auto pool_state = GetQueryPoolState(query_obj.pool);
4534 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
4535 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004536}
4537
4538void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
4539 VkFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004540 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004541 QueryObject query = {queryPool, slot};
4542 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4543 RecordCmdBeginQuery(cb_state, query);
4544}
4545
4546void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004547 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004548 cb_state->activeQueries.erase(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004549 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
4550 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4551 QueryMap *localQueryToStateMap) {
4552 return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
4553 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004554 auto pool_state = GetQueryPoolState(query_obj.pool);
4555 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
4556 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004557}
4558
4559void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004560 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004561 QueryObject query_obj = {queryPool, slot};
4562 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4563 RecordCmdEndQuery(cb_state, query_obj);
4564}
4565
4566void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4567 uint32_t firstQuery, uint32_t queryCount) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004568 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004569 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4570
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02004571 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
4572 QueryObject query = {queryPool, slot};
4573 cb_state->resetQueries.insert(query);
4574 }
4575
Jeff Bolz310775c2019-10-09 00:46:33 -05004576 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004577 bool do_validate, VkQueryPool &firstPerfQueryPool,
4578 uint32_t perfQueryPass,
4579 QueryMap *localQueryToStateMap) {
4580 return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06004581 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004582 auto pool_state = GetQueryPoolState(queryPool);
4583 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004584 cb_state);
4585}
4586
4587void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4588 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
4589 VkDeviceSize dstOffset, VkDeviceSize stride,
4590 VkQueryResultFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004591 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004592 auto cb_state = GetCBState(commandBuffer);
4593 auto dst_buff_state = GetBufferState(dstBuffer);
4594 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05004595 auto pool_state = GetQueryPoolState(queryPool);
4596 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004597 cb_state);
4598}
4599
4600void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
4601 VkQueryPool queryPool, uint32_t slot) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07004602 PostCallRecordCmdWriteTimestamp2KHR(commandBuffer, pipelineStage, queryPool, slot);
4603}
4604
4605void ValidationStateTracker::PostCallRecordCmdWriteTimestamp2KHR(VkCommandBuffer commandBuffer,
4606 VkPipelineStageFlags2KHR pipelineStage, VkQueryPool queryPool,
4607 uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004608 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004609 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeff Bolzadbfa852019-10-04 13:53:30 -05004610 auto pool_state = GetQueryPoolState(queryPool);
4611 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004612 cb_state);
4613 QueryObject query = {queryPool, slot};
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004614 cb_state->queryUpdates.emplace_back([query](const ValidationStateTracker *device_data, bool do_validate,
4615 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4616 QueryMap *localQueryToStateMap) {
4617 return SetQueryState(QueryObject(query, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
4618 });
locke-lunargd556cc32019-09-17 01:21:23 -06004619}
4620
Marijn Suijten6750fdc2020-12-30 22:06:42 +01004621void ValidationStateTracker::PostCallRecordCmdWriteAccelerationStructuresPropertiesKHR(
4622 VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR *pAccelerationStructures,
4623 VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) {
4624 if (disabled[query_validation]) return;
4625 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4626 auto pool_state = GetQueryPoolState(queryPool);
4627 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
4628 cb_state);
4629 cb_state->queryUpdates.emplace_back(
4630 [queryPool, firstQuery, accelerationStructureCount](const ValidationStateTracker *device_data, bool do_validate,
4631 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4632 QueryMap *localQueryToStateMap) {
4633 return SetQueryStateMulti(queryPool, firstQuery, accelerationStructureCount, perfQueryPass, QUERYSTATE_ENDED,
4634 localQueryToStateMap);
4635 });
4636}
4637
locke-lunargd556cc32019-09-17 01:21:23 -06004638void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
4639 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
4640 VkResult result) {
4641 if (VK_SUCCESS != result) return;
4642 // Shadow create info and store in map
Jeff Bolz6ae39612019-10-11 20:57:36 -05004643 auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
locke-lunargd556cc32019-09-17 01:21:23 -06004644
Mike Schuchardt2df08912020-12-15 16:28:09 -08004645 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) == 0) {
locke-lunarg1ae57d62020-11-18 10:49:19 -07004646 fb_state->attachments_view_state.resize(pCreateInfo->attachmentCount);
4647
locke-lunargd556cc32019-09-17 01:21:23 -06004648 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
locke-lunarg1ae57d62020-11-18 10:49:19 -07004649 fb_state->attachments_view_state[i] = GetShared<IMAGE_VIEW_STATE>(pCreateInfo->pAttachments[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06004650 }
4651 }
4652 frameBufferMap[*pFramebuffer] = std::move(fb_state);
4653}
4654
Mike Schuchardt2df08912020-12-15 16:28:09 -08004655void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2 *pCreateInfo,
locke-lunargd556cc32019-09-17 01:21:23 -06004656 RENDER_PASS_STATE *render_pass) {
4657 auto &subpass_to_node = render_pass->subpassToNode;
4658 subpass_to_node.resize(pCreateInfo->subpassCount);
4659 auto &self_dependencies = render_pass->self_dependencies;
4660 self_dependencies.resize(pCreateInfo->subpassCount);
John Zulauf4aff5d92020-02-21 08:29:35 -07004661 auto &subpass_dependencies = render_pass->subpass_dependencies;
4662 subpass_dependencies.resize(pCreateInfo->subpassCount);
locke-lunargd556cc32019-09-17 01:21:23 -06004663
4664 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
4665 subpass_to_node[i].pass = i;
4666 self_dependencies[i].clear();
John Zulauf4aff5d92020-02-21 08:29:35 -07004667 subpass_dependencies[i].pass = i;
locke-lunargd556cc32019-09-17 01:21:23 -06004668 }
4669 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004670 const auto &dependency = pCreateInfo->pDependencies[i];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004671 const auto src_subpass = dependency.srcSubpass;
4672 const auto dst_subpass = dependency.dstSubpass;
locke-lunargd556cc32019-09-17 01:21:23 -06004673 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
4674 if (dependency.srcSubpass == dependency.dstSubpass) {
4675 self_dependencies[dependency.srcSubpass].push_back(i);
4676 } else {
4677 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
4678 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
4679 }
4680 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004681 if (src_subpass == VK_SUBPASS_EXTERNAL) {
4682 assert(dst_subpass != VK_SUBPASS_EXTERNAL); // this is invalid per VUID-VkSubpassDependency-srcSubpass-00865
4683 subpass_dependencies[dst_subpass].barrier_from_external.emplace_back(&dependency);
4684 } else if (dst_subpass == VK_SUBPASS_EXTERNAL) {
4685 subpass_dependencies[src_subpass].barrier_to_external.emplace_back(&dependency);
John Zulauf4aff5d92020-02-21 08:29:35 -07004686 } else if (dependency.srcSubpass != dependency.dstSubpass) {
4687 // ignore self dependencies in prev and next
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004688 subpass_dependencies[src_subpass].next[&subpass_dependencies[dst_subpass]].emplace_back(&dependency);
4689 subpass_dependencies[dst_subpass].prev[&subpass_dependencies[src_subpass]].emplace_back(&dependency);
John Zulauf4aff5d92020-02-21 08:29:35 -07004690 }
4691 }
4692
4693 //
4694 // Determine "asynchrononous" subpassess
4695 // syncronization is only interested in asyncronous stages *earlier* that the current one... so we'll only look towards those.
4696 // NOTE: This is O(N^3), which we could shrink to O(N^2logN) using sets instead of arrays, but given that N is likely to be
4697 // small and the K for |= from the prev is must less than for set, we'll accept the brute force.
4698 std::vector<std::vector<bool>> pass_depends(pCreateInfo->subpassCount);
4699 for (uint32_t i = 1; i < pCreateInfo->subpassCount; ++i) {
4700 auto &depends = pass_depends[i];
4701 depends.resize(i);
4702 auto &subpass_dep = subpass_dependencies[i];
4703 for (const auto &prev : subpass_dep.prev) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004704 const auto prev_pass = prev.first->pass;
John Zulauf4aff5d92020-02-21 08:29:35 -07004705 const auto &prev_depends = pass_depends[prev_pass];
4706 for (uint32_t j = 0; j < prev_pass; j++) {
4707 depends[j] = depends[j] | prev_depends[j];
4708 }
4709 depends[prev_pass] = true;
4710 }
4711 for (uint32_t pass = 0; pass < subpass_dep.pass; pass++) {
4712 if (!depends[pass]) {
4713 subpass_dep.async.push_back(pass);
4714 }
4715 }
locke-lunargd556cc32019-09-17 01:21:23 -06004716 }
4717}
4718
John Zulauf4aff5d92020-02-21 08:29:35 -07004719static VkSubpassDependency2 ImplicitDependencyFromExternal(uint32_t subpass) {
4720 VkSubpassDependency2 from_external = {VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
4721 nullptr,
4722 VK_SUBPASS_EXTERNAL,
4723 subpass,
4724 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
4725 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
4726 0,
4727 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
4728 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
4729 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
4730 0,
4731 0};
4732 return from_external;
4733}
4734
4735static VkSubpassDependency2 ImplicitDependencyToExternal(uint32_t subpass) {
4736 VkSubpassDependency2 to_external = {VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
4737 nullptr,
4738 subpass,
4739 VK_SUBPASS_EXTERNAL,
4740 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
4741 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
4742 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
4743 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
4744 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
4745 0,
4746 0,
4747 0};
4748 return to_external;
4749}
4750
locke-lunargd556cc32019-09-17 01:21:23 -06004751void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
4752 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
4753 VkRenderPass *pRenderPass) {
4754 render_pass->renderPass = *pRenderPass;
4755 auto create_info = render_pass->createInfo.ptr();
4756
4757 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
4758
John Zulauf8863c332020-03-20 10:34:33 -06004759 struct AttachmentTracker { // This is really only of local interest, but a bit big for a lambda
4760 RENDER_PASS_STATE *const rp;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004761 std::vector<uint32_t> &first;
John Zulauf1507ee42020-05-18 11:33:09 -06004762 std::vector<bool> &first_is_transition;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004763 std::vector<uint32_t> &last;
John Zulauf8863c332020-03-20 10:34:33 -06004764 std::vector<std::vector<RENDER_PASS_STATE::AttachmentTransition>> &subpass_transitions;
Jeremy Gebbencbf22862021-03-03 12:01:22 -07004765 layer_data::unordered_map<uint32_t, bool> &first_read;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004766 const uint32_t attachment_count;
John Zulauf8863c332020-03-20 10:34:33 -06004767 std::vector<VkImageLayout> attachment_layout;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004768 std::vector<std::vector<VkImageLayout>> subpass_attachment_layout;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004769 explicit AttachmentTracker(std::shared_ptr<RENDER_PASS_STATE> &render_pass)
John Zulauf8863c332020-03-20 10:34:33 -06004770 : rp(render_pass.get()),
4771 first(rp->attachment_first_subpass),
John Zulauf1507ee42020-05-18 11:33:09 -06004772 first_is_transition(rp->attachment_first_is_transition),
John Zulauf8863c332020-03-20 10:34:33 -06004773 last(rp->attachment_last_subpass),
4774 subpass_transitions(rp->subpass_transitions),
4775 first_read(rp->attachment_first_read),
4776 attachment_count(rp->createInfo.attachmentCount),
John Zulauf2bc1fde2020-04-24 15:09:51 -06004777 attachment_layout(),
4778 subpass_attachment_layout() {
John Zulaufbb9f07f2020-03-19 16:53:06 -06004779 first.resize(attachment_count, VK_SUBPASS_EXTERNAL);
John Zulauf1507ee42020-05-18 11:33:09 -06004780 first_is_transition.resize(attachment_count, false);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004781 last.resize(attachment_count, VK_SUBPASS_EXTERNAL);
John Zulauf8863c332020-03-20 10:34:33 -06004782 subpass_transitions.resize(rp->createInfo.subpassCount + 1); // Add an extra for EndRenderPass
4783 attachment_layout.reserve(attachment_count);
John Zulauf2bc1fde2020-04-24 15:09:51 -06004784 subpass_attachment_layout.resize(rp->createInfo.subpassCount);
4785 for (auto &subpass_layouts : subpass_attachment_layout) {
4786 subpass_layouts.resize(attachment_count, kInvalidLayout);
4787 }
4788
John Zulauf8863c332020-03-20 10:34:33 -06004789 for (uint32_t j = 0; j < attachment_count; j++) {
4790 attachment_layout.push_back(rp->createInfo.pAttachments[j].initialLayout);
4791 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004792 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004793
John Zulaufbb9f07f2020-03-19 16:53:06 -06004794 void Update(uint32_t subpass, const VkAttachmentReference2 *attach_ref, uint32_t count, bool is_read) {
4795 if (nullptr == attach_ref) return;
4796 for (uint32_t j = 0; j < count; ++j) {
4797 const auto attachment = attach_ref[j].attachment;
4798 if (attachment != VK_ATTACHMENT_UNUSED) {
John Zulauf8863c332020-03-20 10:34:33 -06004799 const auto layout = attach_ref[j].layout;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004800 // Take advantage of the fact that insert won't overwrite, so we'll only write the first time.
Jeremy Gebbencbf22862021-03-03 12:01:22 -07004801 first_read.emplace(attachment, is_read);
John Zulauf2bc1fde2020-04-24 15:09:51 -06004802 if (first[attachment] == VK_SUBPASS_EXTERNAL) {
4803 first[attachment] = subpass;
4804 const auto initial_layout = rp->createInfo.pAttachments[attachment].initialLayout;
John Zulauf1507ee42020-05-18 11:33:09 -06004805 if (initial_layout != layout) {
4806 subpass_transitions[subpass].emplace_back(VK_SUBPASS_EXTERNAL, attachment, initial_layout, layout);
4807 first_is_transition[attachment] = true;
4808 }
John Zulauf2bc1fde2020-04-24 15:09:51 -06004809 }
John Zulaufbb9f07f2020-03-19 16:53:06 -06004810 last[attachment] = subpass;
John Zulauf8863c332020-03-20 10:34:33 -06004811
John Zulauf2bc1fde2020-04-24 15:09:51 -06004812 for (const auto &prev : rp->subpass_dependencies[subpass].prev) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004813 const auto prev_pass = prev.first->pass;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004814 const auto prev_layout = subpass_attachment_layout[prev_pass][attachment];
4815 if ((prev_layout != kInvalidLayout) && (prev_layout != layout)) {
4816 subpass_transitions[subpass].emplace_back(prev_pass, attachment, prev_layout, layout);
4817 }
John Zulauf8863c332020-03-20 10:34:33 -06004818 }
John Zulauf2bc1fde2020-04-24 15:09:51 -06004819 attachment_layout[attachment] = layout;
John Zulauf8863c332020-03-20 10:34:33 -06004820 }
4821 }
4822 }
4823 void FinalTransitions() {
4824 auto &final_transitions = subpass_transitions[rp->createInfo.subpassCount];
4825
4826 for (uint32_t attachment = 0; attachment < attachment_count; ++attachment) {
4827 const auto final_layout = rp->createInfo.pAttachments[attachment].finalLayout;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004828 // Add final transitions for attachments that were used and change layout.
4829 if ((last[attachment] != VK_SUBPASS_EXTERNAL) && final_layout != attachment_layout[attachment]) {
4830 final_transitions.emplace_back(last[attachment], attachment, attachment_layout[attachment], final_layout);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004831 }
locke-lunargd556cc32019-09-17 01:21:23 -06004832 }
4833 }
John Zulaufbb9f07f2020-03-19 16:53:06 -06004834 };
John Zulauf8863c332020-03-20 10:34:33 -06004835 AttachmentTracker attachment_tracker(render_pass);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004836
4837 for (uint32_t subpass_index = 0; subpass_index < create_info->subpassCount; ++subpass_index) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08004838 const VkSubpassDescription2 &subpass = create_info->pSubpasses[subpass_index];
John Zulauf8863c332020-03-20 10:34:33 -06004839 attachment_tracker.Update(subpass_index, subpass.pColorAttachments, subpass.colorAttachmentCount, false);
4840 attachment_tracker.Update(subpass_index, subpass.pResolveAttachments, subpass.colorAttachmentCount, false);
4841 attachment_tracker.Update(subpass_index, subpass.pDepthStencilAttachment, 1, false);
4842 attachment_tracker.Update(subpass_index, subpass.pInputAttachments, subpass.inputAttachmentCount, true);
John Zulauf4aff5d92020-02-21 08:29:35 -07004843 }
John Zulauf8863c332020-03-20 10:34:33 -06004844 attachment_tracker.FinalTransitions();
John Zulauf4aff5d92020-02-21 08:29:35 -07004845
John Zulaufbb9f07f2020-03-19 16:53:06 -06004846 // Add implicit dependencies
John Zulauf8863c332020-03-20 10:34:33 -06004847 for (uint32_t attachment = 0; attachment < attachment_tracker.attachment_count; attachment++) {
4848 const auto first_use = attachment_tracker.first[attachment];
John Zulauf4aff5d92020-02-21 08:29:35 -07004849 if (first_use != VK_SUBPASS_EXTERNAL) {
4850 auto &subpass_dep = render_pass->subpass_dependencies[first_use];
John Zulaufbaea94f2020-09-15 17:55:16 -06004851 if (subpass_dep.barrier_from_external.size() == 0) {
4852 // Add implicit from barrier if they're aren't any
John Zulauf4aff5d92020-02-21 08:29:35 -07004853 subpass_dep.implicit_barrier_from_external.reset(
4854 new VkSubpassDependency2(ImplicitDependencyFromExternal(first_use)));
John Zulaufbaea94f2020-09-15 17:55:16 -06004855 subpass_dep.barrier_from_external.emplace_back(subpass_dep.implicit_barrier_from_external.get());
John Zulauf4aff5d92020-02-21 08:29:35 -07004856 }
4857 }
4858
John Zulauf8863c332020-03-20 10:34:33 -06004859 const auto last_use = attachment_tracker.last[attachment];
John Zulauf4aff5d92020-02-21 08:29:35 -07004860 if (last_use != VK_SUBPASS_EXTERNAL) {
4861 auto &subpass_dep = render_pass->subpass_dependencies[last_use];
John Zulaufbaea94f2020-09-15 17:55:16 -06004862 if (render_pass->subpass_dependencies[last_use].barrier_to_external.size() == 0) {
4863 // Add implicit to barrier if they're aren't any
John Zulauf4aff5d92020-02-21 08:29:35 -07004864 subpass_dep.implicit_barrier_to_external.reset(new VkSubpassDependency2(ImplicitDependencyToExternal(last_use)));
John Zulaufbaea94f2020-09-15 17:55:16 -06004865 subpass_dep.barrier_to_external.emplace_back(subpass_dep.implicit_barrier_to_external.get());
John Zulauf4aff5d92020-02-21 08:29:35 -07004866 }
locke-lunargd556cc32019-09-17 01:21:23 -06004867 }
4868 }
4869
4870 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
4871 renderPassMap[*pRenderPass] = std::move(render_pass);
4872}
4873
4874// Style note:
4875// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
4876// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
4877// construction or assignment.
4878void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
4879 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4880 VkResult result) {
4881 if (VK_SUCCESS != result) return;
4882 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
4883 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
4884}
4885
Mike Schuchardt2df08912020-12-15 16:28:09 -08004886void ValidationStateTracker::RecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07004887 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4888 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004889 if (VK_SUCCESS != result) return;
4890 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
4891 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
4892}
4893
Mike Schuchardt2df08912020-12-15 16:28:09 -08004894void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07004895 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4896 VkResult result) {
4897 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
4898}
4899
Mike Schuchardt2df08912020-12-15 16:28:09 -08004900void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07004901 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4902 VkResult result) {
4903 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
4904}
4905
locke-lunargd556cc32019-09-17 01:21:23 -06004906void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
4907 const VkRenderPassBeginInfo *pRenderPassBegin,
4908 const VkSubpassContents contents) {
4909 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunargaecf2152020-05-12 17:15:41 -06004910 auto render_pass_state = pRenderPassBegin ? GetShared<RENDER_PASS_STATE>(pRenderPassBegin->renderPass) : nullptr;
4911 auto framebuffer = pRenderPassBegin ? GetShared<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer) : nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004912
4913 if (render_pass_state) {
locke-lunargaecf2152020-05-12 17:15:41 -06004914 cb_state->activeFramebuffer = framebuffer;
locke-lunargd556cc32019-09-17 01:21:23 -06004915 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07004916 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06004917 cb_state->activeSubpass = 0;
4918 cb_state->activeSubpassContents = contents;
locke-lunargfc78e932020-11-19 17:06:24 -07004919
locke-lunargd556cc32019-09-17 01:21:23 -06004920 // Connect this RP to cmdBuffer
locke-lunargaecf2152020-05-12 17:15:41 -06004921 AddCommandBufferBinding(
4922 render_pass_state->cb_bindings,
4923 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state.get()), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004924
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07004925 auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06004926 if (chained_device_group_struct) {
4927 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
4928 } else {
4929 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
4930 }
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004931
locke-lunargfc78e932020-11-19 17:06:24 -07004932 cb_state->active_subpasses = nullptr;
4933 cb_state->active_attachments = nullptr;
4934
4935 if (framebuffer) {
4936 cb_state->framebuffers.insert(framebuffer);
4937
4938 // Set cb_state->active_subpasses
4939 cb_state->active_subpasses =
4940 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
4941 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
4942 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
4943
4944 // Set cb_state->active_attachments & cb_state->attachments_view_states
4945 cb_state->active_attachments =
4946 std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(framebuffer->createInfo.attachmentCount);
4947 UpdateAttachmentsView(*this, *cb_state, *cb_state->activeFramebuffer, pRenderPassBegin);
4948
4949 // Connect this framebuffer and its children to this cmdBuffer
4950 AddFramebufferBinding(cb_state, framebuffer.get());
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004951 }
locke-lunargd556cc32019-09-17 01:21:23 -06004952 }
4953}
4954
4955void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
4956 const VkRenderPassBeginInfo *pRenderPassBegin,
4957 VkSubpassContents contents) {
4958 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
4959}
4960
4961void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
4962 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004963 const VkSubpassBeginInfo *pSubpassBeginInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06004964 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4965}
4966
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06004967void ValidationStateTracker::PostCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4968 uint32_t counterBufferCount,
4969 const VkBuffer *pCounterBuffers,
4970 const VkDeviceSize *pCounterBufferOffsets) {
4971 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4972
4973 cb_state->transform_feedback_active = true;
4974}
4975
4976void ValidationStateTracker::PostCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4977 uint32_t counterBufferCount, const VkBuffer *pCounterBuffers,
4978 const VkDeviceSize *pCounterBufferOffsets) {
4979 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4980
4981 cb_state->transform_feedback_active = false;
4982}
4983
Tony-LunarG977448c2019-12-02 14:52:02 -07004984void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
4985 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004986 const VkSubpassBeginInfo *pSubpassBeginInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004987 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4988}
4989
locke-lunargd556cc32019-09-17 01:21:23 -06004990void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4991 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4992 cb_state->activeSubpass++;
4993 cb_state->activeSubpassContents = contents;
locke-lunargfc78e932020-11-19 17:06:24 -07004994
4995 // Update cb_state->active_subpasses
4996 if (cb_state->activeRenderPass && cb_state->activeFramebuffer) {
4997 cb_state->active_subpasses = nullptr;
4998 cb_state->active_subpasses =
4999 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
5000
5001 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
5002 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
5003 }
locke-lunargd556cc32019-09-17 01:21:23 -06005004}
5005
5006void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
5007 RecordCmdNextSubpass(commandBuffer, contents);
5008}
5009
5010void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005011 const VkSubpassBeginInfo *pSubpassBeginInfo,
5012 const VkSubpassEndInfo *pSubpassEndInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06005013 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
5014}
5015
Tony-LunarG977448c2019-12-02 14:52:02 -07005016void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005017 const VkSubpassBeginInfo *pSubpassBeginInfo,
5018 const VkSubpassEndInfo *pSubpassEndInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005019 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
5020}
5021
locke-lunargd556cc32019-09-17 01:21:23 -06005022void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
5023 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5024 cb_state->activeRenderPass = nullptr;
locke-lunargfc78e932020-11-19 17:06:24 -07005025 cb_state->active_attachments = nullptr;
5026 cb_state->active_subpasses = nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06005027 cb_state->activeSubpass = 0;
5028 cb_state->activeFramebuffer = VK_NULL_HANDLE;
5029}
5030
5031void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
5032 RecordCmdEndRenderPassState(commandBuffer);
5033}
5034
5035void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005036 const VkSubpassEndInfo *pSubpassEndInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06005037 RecordCmdEndRenderPassState(commandBuffer);
5038}
5039
Tony-LunarG977448c2019-12-02 14:52:02 -07005040void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005041 const VkSubpassEndInfo *pSubpassEndInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005042 RecordCmdEndRenderPassState(commandBuffer);
5043}
locke-lunargd556cc32019-09-17 01:21:23 -06005044void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
5045 const VkCommandBuffer *pCommandBuffers) {
5046 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5047
5048 CMD_BUFFER_STATE *sub_cb_state = NULL;
5049 for (uint32_t i = 0; i < commandBuffersCount; i++) {
5050 sub_cb_state = GetCBState(pCommandBuffers[i]);
5051 assert(sub_cb_state);
5052 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
5053 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
5054 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
5055 // from the validation step to the recording step
5056 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
5057 }
5058 }
5059
5060 // Propagate inital layout and current layout state to the primary cmd buffer
5061 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
5062 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
5063 // for those other classes.
5064 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
5065 const auto image = sub_layout_map_entry.first;
5066 const auto *image_state = GetImageState(image);
5067 if (!image_state) continue; // Can't set layouts of a dead image
5068
5069 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
John Zulauf17708d02021-02-22 11:20:58 -07005070 const auto *sub_cb_subres_map = &sub_layout_map_entry.second;
locke-lunargd556cc32019-09-17 01:21:23 -06005071 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
5072 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
5073 }
5074
5075 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
5076 cb_state->linkedCommandBuffers.insert(sub_cb_state);
5077 sub_cb_state->linkedCommandBuffers.insert(cb_state);
5078 for (auto &function : sub_cb_state->queryUpdates) {
5079 cb_state->queryUpdates.push_back(function);
5080 }
5081 for (auto &function : sub_cb_state->queue_submit_functions) {
5082 cb_state->queue_submit_functions.push_back(function);
5083 }
David Zhao Akeley44139b12021-04-26 16:16:13 -07005084
5085 // State is trashed after executing secondary command buffers.
5086 // Importantly, this function runs after CoreChecks::PreCallValidateCmdExecuteCommands.
5087 cb_state->trashedViewportMask = ~uint32_t(0);
5088 cb_state->trashedScissorMask = ~uint32_t(0);
5089 cb_state->trashedViewportCount = true;
5090 cb_state->trashedScissorCount = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005091 }
5092}
5093
5094void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
5095 VkFlags flags, void **ppData, VkResult result) {
5096 if (VK_SUCCESS != result) return;
5097 RecordMappedMemory(mem, offset, size, ppData);
5098}
5099
5100void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
5101 auto mem_info = GetDevMemState(mem);
5102 if (mem_info) {
5103 mem_info->mapped_range = MemRange();
5104 mem_info->p_driver_data = nullptr;
5105 }
5106}
5107
5108void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
5109 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
5110 if (image_state) {
locke-lunargae26eac2020-04-16 15:29:05 -06005111 // An Android sepcial image cannot get VkSubresourceLayout until the image binds a memory.
5112 // See: VUID-vkGetImageSubresourceLayout-image-01895
5113 image_state->fragment_encoder =
5114 std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(new subresource_adapter::ImageRangeEncoder(*image_state));
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07005115 const auto swapchain_info = LvlFindInChain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06005116 if (swapchain_info) {
John Zulauf29d00532021-03-04 13:28:54 -07005117 auto *swapchain = GetSwapchainState(swapchain_info->swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06005118 if (swapchain) {
John Zulaufd13b38e2021-03-05 08:17:38 -07005119 SWAPCHAIN_IMAGE &swap_image = swapchain->images[swapchain_info->imageIndex];
John Zulauf29d00532021-03-04 13:28:54 -07005120 if (swap_image.bound_images.empty()) {
5121 // If this is the first "binding" of an image to this swapchain index, get a fake allocation
5122 image_state->swapchain_fake_address = fake_memory.Alloc(image_state->fragment_encoder->TotalSize());
5123 } else {
5124 image_state->swapchain_fake_address = (*swap_image.bound_images.cbegin())->swapchain_fake_address;
5125 }
John Zulaufd13b38e2021-03-05 08:17:38 -07005126 swap_image.bound_images.emplace(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -06005127 image_state->bind_swapchain = swapchain_info->swapchain;
5128 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
John Zulaufd13b38e2021-03-05 08:17:38 -07005129
John Zulauf29d00532021-03-04 13:28:54 -07005130 // All images bound to this swapchain and index are aliases
John Zulaufd13b38e2021-03-05 08:17:38 -07005131 AddAliasingImage(image_state, &swap_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06005132 }
5133 } else {
5134 // Track bound memory range information
5135 auto mem_info = GetDevMemState(bindInfo.memory);
5136 if (mem_info) {
John Zulaufd13b38e2021-03-05 08:17:38 -07005137 InsertImageMemoryRange(image_state, mem_info, bindInfo.memoryOffset);
5138 if (image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) {
5139 AddAliasingImage(image_state, &mem_info->bound_images);
5140 }
locke-lunargd556cc32019-09-17 01:21:23 -06005141 }
5142
5143 // Track objects tied to memory
5144 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
5145 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
5146 }
locke-lunargd556cc32019-09-17 01:21:23 -06005147 }
5148}
5149
5150void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
5151 VkDeviceSize memoryOffset, VkResult result) {
5152 if (VK_SUCCESS != result) return;
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06005153 auto bind_info = LvlInitStruct<VkBindImageMemoryInfo>();
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005154 bind_info.image = image;
5155 bind_info.memory = mem;
5156 bind_info.memoryOffset = memoryOffset;
5157 UpdateBindImageMemoryState(bind_info);
locke-lunargd556cc32019-09-17 01:21:23 -06005158}
5159
5160void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005161 const VkBindImageMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06005162 if (VK_SUCCESS != result) return;
5163 for (uint32_t i = 0; i < bindInfoCount; i++) {
5164 UpdateBindImageMemoryState(pBindInfos[i]);
5165 }
5166}
5167
5168void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005169 const VkBindImageMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06005170 if (VK_SUCCESS != result) return;
5171 for (uint32_t i = 0; i < bindInfoCount; i++) {
5172 UpdateBindImageMemoryState(pBindInfos[i]);
5173 }
5174}
5175
5176void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
5177 auto event_state = GetEventState(event);
5178 if (event_state) {
5179 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
5180 }
locke-lunargd556cc32019-09-17 01:21:23 -06005181}
5182
5183void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
5184 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
5185 VkResult result) {
5186 if (VK_SUCCESS != result) return;
5187 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
5188 pImportSemaphoreFdInfo->flags);
5189}
5190
5191void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005192 VkExternalSemaphoreHandleTypeFlagBits handle_type) {
locke-lunargd556cc32019-09-17 01:21:23 -06005193 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
Mike Schuchardt2df08912020-12-15 16:28:09 -08005194 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT) {
locke-lunargd556cc32019-09-17 01:21:23 -06005195 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
5196 semaphore_state->scope = kSyncScopeExternalPermanent;
5197 }
5198}
5199
5200#ifdef VK_USE_PLATFORM_WIN32_KHR
5201void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
5202 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
5203 if (VK_SUCCESS != result) return;
5204 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
5205 pImportSemaphoreWin32HandleInfo->flags);
5206}
5207
5208void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
5209 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
5210 HANDLE *pHandle, VkResult result) {
5211 if (VK_SUCCESS != result) return;
5212 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
5213}
5214
5215void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
5216 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
5217 if (VK_SUCCESS != result) return;
5218 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
5219 pImportFenceWin32HandleInfo->flags);
5220}
5221
5222void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
5223 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
5224 HANDLE *pHandle, VkResult result) {
5225 if (VK_SUCCESS != result) return;
5226 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
5227}
5228#endif
5229
5230void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
5231 VkResult result) {
5232 if (VK_SUCCESS != result) return;
5233 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
5234}
5235
Mike Schuchardt2df08912020-12-15 16:28:09 -08005236void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBits handle_type,
5237 VkFenceImportFlags flags) {
locke-lunargd556cc32019-09-17 01:21:23 -06005238 FENCE_STATE *fence_node = GetFenceState(fence);
5239 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08005240 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT || flags & VK_FENCE_IMPORT_TEMPORARY_BIT) &&
locke-lunargd556cc32019-09-17 01:21:23 -06005241 fence_node->scope == kSyncScopeInternal) {
5242 fence_node->scope = kSyncScopeExternalTemporary;
5243 } else {
5244 fence_node->scope = kSyncScopeExternalPermanent;
5245 }
5246 }
5247}
5248
5249void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
5250 VkResult result) {
5251 if (VK_SUCCESS != result) return;
5252 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
5253}
5254
Mike Schuchardt2df08912020-12-15 16:28:09 -08005255void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBits handle_type) {
locke-lunargd556cc32019-09-17 01:21:23 -06005256 FENCE_STATE *fence_state = GetFenceState(fence);
5257 if (fence_state) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08005258 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT) {
locke-lunargd556cc32019-09-17 01:21:23 -06005259 // Export with reference transference becomes external
5260 fence_state->scope = kSyncScopeExternalPermanent;
5261 } else if (fence_state->scope == kSyncScopeInternal) {
5262 // Export with copy transference has a side effect of resetting the fence
5263 fence_state->state = FENCE_UNSIGNALED;
5264 }
5265 }
5266}
5267
5268void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
5269 VkResult result) {
5270 if (VK_SUCCESS != result) return;
5271 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
5272}
5273
5274void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
5275 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
5276 if (VK_SUCCESS != result) return;
John Zulaufd5115702021-01-18 12:34:33 -07005277 const auto event = *pEvent;
Jeremy Gebbencbf22862021-03-03 12:01:22 -07005278 eventMap.emplace(event, std::make_shared<EVENT_STATE>(event, pCreateInfo->flags));
locke-lunargd556cc32019-09-17 01:21:23 -06005279}
5280
5281void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
5282 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
5283 SWAPCHAIN_NODE *old_swapchain_state) {
5284 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005285 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06005286 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
5287 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
5288 swapchain_state->shared_presentable = true;
5289 }
5290 surface_state->swapchain = swapchain_state.get();
5291 swapchainMap[*pSwapchain] = std::move(swapchain_state);
5292 } else {
5293 surface_state->swapchain = nullptr;
5294 }
5295 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
5296 if (old_swapchain_state) {
5297 old_swapchain_state->retired = true;
5298 }
5299 return;
5300}
5301
5302void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
5303 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
5304 VkResult result) {
5305 auto surface_state = GetSurfaceState(pCreateInfo->surface);
5306 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
5307 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
5308}
5309
5310void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
5311 const VkAllocationCallbacks *pAllocator) {
5312 if (!swapchain) return;
5313 auto swapchain_data = GetSwapchainState(swapchain);
5314 if (swapchain_data) {
John Zulauffaa7a522021-03-05 12:22:45 -07005315 for (auto &swapchain_image : swapchain_data->images) {
5316 // TODO: missing validation that the bound images are empty (except for image_state above)
5317 // Clean up the aliases and the bound_images *before* erasing the image_state.
5318 RemoveAliasingImages(swapchain_image.bound_images);
5319 swapchain_image.bound_images.clear();
5320
5321 if (swapchain_image.image_state) {
John Zulauf2d60a452021-03-04 15:12:03 -07005322 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image_state->image, kVulkanObjectTypeImage));
5323 imageMap.erase(swapchain_image.image_state->image);
John Zulauffaa7a522021-03-05 12:22:45 -07005324 swapchain_image.image_state = nullptr;
John Zulauf2d60a452021-03-04 15:12:03 -07005325 }
locke-lunargd556cc32019-09-17 01:21:23 -06005326 }
5327
5328 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
5329 if (surface_state) {
5330 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
5331 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005332 swapchain_data->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005333 swapchainMap.erase(swapchain);
5334 }
5335}
5336
sfricke-samsung5c1b7392020-12-13 22:17:15 -08005337void ValidationStateTracker::PostCallRecordCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
5338 const VkDisplayModeCreateInfoKHR *pCreateInfo,
5339 const VkAllocationCallbacks *pAllocator, VkDisplayModeKHR *pMode,
5340 VkResult result) {
5341 if (VK_SUCCESS != result) return;
5342 if (!pMode) return;
5343 auto display_mode_state = std::make_shared<DISPLAY_MODE_STATE>(*pMode);
5344 display_mode_state->physical_device = physicalDevice;
5345 display_mode_map[*pMode] = std::move(display_mode_state);
5346}
5347
locke-lunargd556cc32019-09-17 01:21:23 -06005348void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
5349 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
5350 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005351 auto semaphore_state = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
5352 if (semaphore_state) {
5353 semaphore_state->signaler.first = VK_NULL_HANDLE;
5354 semaphore_state->signaled = false;
locke-lunargd556cc32019-09-17 01:21:23 -06005355 }
5356 }
5357
5358 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
5359 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
5360 // confused itself just as much.
5361 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
5362 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
5363 // Mark the image as having been released to the WSI
5364 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
5365 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
John Zulauffaa7a522021-03-05 12:22:45 -07005366 IMAGE_STATE *image_state = swapchain_data->images[pPresentInfo->pImageIndices[i]].image_state;
locke-lunargd556cc32019-09-17 01:21:23 -06005367 if (image_state) {
5368 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005369 if (image_state->shared_presentable) {
5370 image_state->layout_locked = true;
5371 }
locke-lunargd556cc32019-09-17 01:21:23 -06005372 }
5373 }
5374 }
5375 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
5376 // its semaphore waits) /never/ participate in any completion proof.
5377}
5378
5379void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
5380 const VkSwapchainCreateInfoKHR *pCreateInfos,
5381 const VkAllocationCallbacks *pAllocator,
5382 VkSwapchainKHR *pSwapchains, VkResult result) {
5383 if (pCreateInfos) {
5384 for (uint32_t i = 0; i < swapchainCount; i++) {
5385 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
5386 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
5387 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
5388 }
5389 }
5390}
5391
5392void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
5393 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005394 auto fence_state = GetFenceState(fence);
5395 if (fence_state && fence_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06005396 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
5397 // import
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005398 fence_state->state = FENCE_INFLIGHT;
5399 fence_state->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
locke-lunargd556cc32019-09-17 01:21:23 -06005400 }
5401
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005402 auto semaphore_state = GetSemaphoreState(semaphore);
5403 if (semaphore_state && semaphore_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06005404 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
5405 // temporary import
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005406 semaphore_state->signaled = true;
5407 semaphore_state->signaler.first = VK_NULL_HANDLE;
locke-lunargd556cc32019-09-17 01:21:23 -06005408 }
5409
5410 // Mark the image as acquired.
5411 auto swapchain_data = GetSwapchainState(swapchain);
5412 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
John Zulauffaa7a522021-03-05 12:22:45 -07005413 IMAGE_STATE *image_state = swapchain_data->images[*pImageIndex].image_state;
locke-lunargd556cc32019-09-17 01:21:23 -06005414 if (image_state) {
5415 image_state->acquired = true;
5416 image_state->shared_presentable = swapchain_data->shared_presentable;
5417 }
5418 }
5419}
5420
5421void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
5422 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
5423 VkResult result) {
5424 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
5425 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
5426}
5427
5428void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
5429 uint32_t *pImageIndex, VkResult result) {
5430 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
5431 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
5432 pAcquireInfo->fence, pImageIndex);
5433}
5434
5435void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
5436 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
5437 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
5438 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
5439 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
5440 phys_device_state.phys_device = pPhysicalDevices[i];
5441 // Init actual features for each physical device
5442 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
5443 }
5444 }
5445}
5446
5447// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
5448static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005449 VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06005450 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
5451
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005452 if (pQueueFamilyProperties) { // Save queue family properties
locke-lunargd556cc32019-09-17 01:21:23 -06005453 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
5454 for (uint32_t i = 0; i < count; ++i) {
5455 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
5456 }
5457 }
5458}
5459
5460void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
5461 uint32_t *pQueueFamilyPropertyCount,
5462 VkQueueFamilyProperties *pQueueFamilyProperties) {
5463 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5464 assert(physical_device_state);
Mike Schuchardt2df08912020-12-15 16:28:09 -08005465 VkQueueFamilyProperties2 *pqfp = nullptr;
5466 std::vector<VkQueueFamilyProperties2> qfp;
locke-lunargd556cc32019-09-17 01:21:23 -06005467 qfp.resize(*pQueueFamilyPropertyCount);
5468 if (pQueueFamilyProperties) {
5469 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06005470 qfp[i] = LvlInitStruct<VkQueueFamilyProperties2>();
locke-lunargd556cc32019-09-17 01:21:23 -06005471 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
5472 }
5473 pqfp = qfp.data();
5474 }
5475 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
5476}
5477
5478void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
Mike Schuchardt2df08912020-12-15 16:28:09 -08005479 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06005480 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5481 assert(physical_device_state);
5482 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
5483 pQueueFamilyProperties);
5484}
5485
5486void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08005487 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06005488 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5489 assert(physical_device_state);
5490 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
5491 pQueueFamilyProperties);
5492}
5493void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
5494 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005495 if (!surface) return;
5496 auto surface_state = GetSurfaceState(surface);
5497 surface_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005498 surface_map.erase(surface);
5499}
5500
5501void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005502 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06005503}
5504
5505void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
5506 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
5507 const VkAllocationCallbacks *pAllocator,
5508 VkSurfaceKHR *pSurface, VkResult result) {
5509 if (VK_SUCCESS != result) return;
5510 RecordVulkanSurface(pSurface);
5511}
5512
5513#ifdef VK_USE_PLATFORM_ANDROID_KHR
5514void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
5515 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
5516 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5517 VkResult result) {
5518 if (VK_SUCCESS != result) return;
5519 RecordVulkanSurface(pSurface);
5520}
5521#endif // VK_USE_PLATFORM_ANDROID_KHR
5522
5523#ifdef VK_USE_PLATFORM_IOS_MVK
5524void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
5525 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5526 VkResult result) {
5527 if (VK_SUCCESS != result) return;
5528 RecordVulkanSurface(pSurface);
5529}
5530#endif // VK_USE_PLATFORM_IOS_MVK
5531
5532#ifdef VK_USE_PLATFORM_MACOS_MVK
5533void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
5534 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
5535 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5536 VkResult result) {
5537 if (VK_SUCCESS != result) return;
5538 RecordVulkanSurface(pSurface);
5539}
5540#endif // VK_USE_PLATFORM_MACOS_MVK
5541
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07005542#ifdef VK_USE_PLATFORM_METAL_EXT
5543void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
5544 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
5545 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5546 VkResult result) {
5547 if (VK_SUCCESS != result) return;
5548 RecordVulkanSurface(pSurface);
5549}
5550#endif // VK_USE_PLATFORM_METAL_EXT
5551
locke-lunargd556cc32019-09-17 01:21:23 -06005552#ifdef VK_USE_PLATFORM_WAYLAND_KHR
5553void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
5554 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
5555 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5556 VkResult result) {
5557 if (VK_SUCCESS != result) return;
5558 RecordVulkanSurface(pSurface);
5559}
5560#endif // VK_USE_PLATFORM_WAYLAND_KHR
5561
5562#ifdef VK_USE_PLATFORM_WIN32_KHR
5563void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
5564 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
5565 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5566 VkResult result) {
5567 if (VK_SUCCESS != result) return;
5568 RecordVulkanSurface(pSurface);
5569}
5570#endif // VK_USE_PLATFORM_WIN32_KHR
5571
5572#ifdef VK_USE_PLATFORM_XCB_KHR
5573void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
5574 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5575 VkResult result) {
5576 if (VK_SUCCESS != result) return;
5577 RecordVulkanSurface(pSurface);
5578}
5579#endif // VK_USE_PLATFORM_XCB_KHR
5580
5581#ifdef VK_USE_PLATFORM_XLIB_KHR
5582void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
5583 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5584 VkResult result) {
5585 if (VK_SUCCESS != result) return;
5586 RecordVulkanSurface(pSurface);
5587}
5588#endif // VK_USE_PLATFORM_XLIB_KHR
5589
Niklas Haas8b84af12020-04-19 22:20:11 +02005590void ValidationStateTracker::PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance,
5591 const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
5592 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5593 VkResult result) {
5594 if (VK_SUCCESS != result) return;
5595 RecordVulkanSurface(pSurface);
5596}
5597
Cort23cf2282019-09-20 18:58:18 +02005598void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005599 VkPhysicalDeviceFeatures *pFeatures) {
5600 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Yilong Li358152a2020-07-08 02:16:45 -07005601 // Reset the features2 safe struct before setting up the features field.
5602 physical_device_state->features2 = safe_VkPhysicalDeviceFeatures2();
Cortffba2642019-09-20 22:09:41 +02005603 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02005604}
5605
5606void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005607 VkPhysicalDeviceFeatures2 *pFeatures) {
5608 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02005609 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02005610}
5611
5612void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005613 VkPhysicalDeviceFeatures2 *pFeatures) {
5614 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02005615 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02005616}
5617
locke-lunargd556cc32019-09-17 01:21:23 -06005618void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
5619 VkSurfaceKHR surface,
5620 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
5621 VkResult result) {
5622 if (VK_SUCCESS != result) return;
5623 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005624 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005625
5626 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
5627 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005628}
5629
5630void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
5631 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
5632 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
5633 if (VK_SUCCESS != result) return;
5634 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005635 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005636
5637 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
5638 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005639}
5640
5641void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
5642 VkSurfaceKHR surface,
5643 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
5644 VkResult result) {
5645 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005646 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
5647 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
5648 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
5649 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
5650 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
5651 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
5652 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
5653 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
5654 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
5655 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005656
5657 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
5658 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005659}
5660
5661void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
5662 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
5663 VkBool32 *pSupported, VkResult result) {
5664 if (VK_SUCCESS != result) return;
5665 auto surface_state = GetSurfaceState(surface);
5666 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
5667}
5668
5669void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
5670 VkSurfaceKHR surface,
5671 uint32_t *pPresentModeCount,
5672 VkPresentModeKHR *pPresentModes,
5673 VkResult result) {
5674 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5675
5676 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
5677 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005678
5679 if (*pPresentModeCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005680 if (*pPresentModeCount > physical_device_state->present_modes.size()) {
locke-lunargd556cc32019-09-17 01:21:23 -06005681 physical_device_state->present_modes.resize(*pPresentModeCount);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005682 }
locke-lunargd556cc32019-09-17 01:21:23 -06005683 }
5684 if (pPresentModes) {
locke-lunargd556cc32019-09-17 01:21:23 -06005685 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
5686 physical_device_state->present_modes[i] = pPresentModes[i];
5687 }
5688 }
5689}
5690
5691void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
5692 uint32_t *pSurfaceFormatCount,
5693 VkSurfaceFormatKHR *pSurfaceFormats,
5694 VkResult result) {
5695 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5696
5697 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005698
5699 if (*pSurfaceFormatCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005700 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size()) {
locke-lunargd556cc32019-09-17 01:21:23 -06005701 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005702 }
locke-lunargd556cc32019-09-17 01:21:23 -06005703 }
5704 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06005705 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
5706 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
5707 }
5708 }
5709}
5710
5711void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
5712 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
5713 uint32_t *pSurfaceFormatCount,
5714 VkSurfaceFormat2KHR *pSurfaceFormats,
5715 VkResult result) {
5716 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5717
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005718 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005719 if (*pSurfaceFormatCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005720 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size()) {
5721 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
5722 }
locke-lunargd556cc32019-09-17 01:21:23 -06005723 }
5724 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06005725 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005726 physical_device_state->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
locke-lunargd556cc32019-09-17 01:21:23 -06005727 }
5728 }
5729}
5730
5731void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
5732 const VkDebugUtilsLabelEXT *pLabelInfo) {
5733 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
5734}
5735
5736void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
5737 EndCmdDebugUtilsLabel(report_data, commandBuffer);
5738}
5739
5740void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
5741 const VkDebugUtilsLabelEXT *pLabelInfo) {
5742 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
5743
5744 // Squirrel away an easily accessible copy.
5745 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5746 cb_state->debug_label = LoggingLabel(pLabelInfo);
5747}
5748
5749void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
Mike Schuchardt2df08912020-12-15 16:28:09 -08005750 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06005751 if (NULL != pPhysicalDeviceGroupProperties) {
5752 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
5753 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
5754 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
5755 auto &phys_device_state = physical_device_map[cur_phys_dev];
5756 phys_device_state.phys_device = cur_phys_dev;
5757 // Init actual features for each physical device
5758 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
5759 }
5760 }
5761 }
5762}
5763
5764void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
Mike Schuchardt2df08912020-12-15 16:28:09 -08005765 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties,
locke-lunargd556cc32019-09-17 01:21:23 -06005766 VkResult result) {
5767 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5768 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
5769}
5770
5771void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08005772 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties,
locke-lunargd556cc32019-09-17 01:21:23 -06005773 VkResult result) {
5774 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5775 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
5776}
5777
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005778void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
5779 uint32_t queueFamilyIndex,
5780 uint32_t *pCounterCount,
5781 VkPerformanceCounterKHR *pCounters) {
5782 if (NULL == pCounters) return;
5783
5784 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5785 assert(physical_device_state);
5786
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005787 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queue_family_counters(new QUEUE_FAMILY_PERF_COUNTERS());
5788 queue_family_counters->counters.resize(*pCounterCount);
5789 for (uint32_t i = 0; i < *pCounterCount; i++) queue_family_counters->counters[i] = pCounters[i];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005790
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005791 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queue_family_counters);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005792}
5793
5794void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
5795 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
5796 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
5797 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5798 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
5799}
5800
5801void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
5802 VkResult result) {
5803 if (result == VK_SUCCESS) performance_lock_acquired = true;
5804}
5805
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005806void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
5807 performance_lock_acquired = false;
5808 for (auto &cmd_buffer : commandBufferMap) {
5809 cmd_buffer.second->performance_lock_released = true;
5810 }
5811}
5812
locke-lunargd556cc32019-09-17 01:21:23 -06005813void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005814 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06005815 const VkAllocationCallbacks *pAllocator) {
5816 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005817 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5818 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005819 desc_template_map.erase(descriptorUpdateTemplate);
5820}
5821
5822void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005823 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06005824 const VkAllocationCallbacks *pAllocator) {
5825 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005826 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5827 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005828 desc_template_map.erase(descriptorUpdateTemplate);
5829}
5830
Mike Schuchardt2df08912020-12-15 16:28:09 -08005831void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
5832 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate) {
locke-lunargd556cc32019-09-17 01:21:23 -06005833 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005834 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06005835 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
5836}
5837
Mike Schuchardt2df08912020-12-15 16:28:09 -08005838void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(VkDevice device,
5839 const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
5840 const VkAllocationCallbacks *pAllocator,
5841 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate,
5842 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06005843 if (VK_SUCCESS != result) return;
5844 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
5845}
5846
5847void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08005848 VkDevice device, const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
5849 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06005850 if (VK_SUCCESS != result) return;
5851 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
5852}
5853
5854void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005855 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06005856 const void *pData) {
5857 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
5858 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
5859 assert(0);
5860 } else {
5861 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
5862 // TODO: Record template push descriptor updates
5863 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
5864 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
5865 }
5866 }
5867}
5868
5869void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
5870 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
5871 const void *pData) {
5872 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
5873}
5874
5875void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
Mike Schuchardt2df08912020-12-15 16:28:09 -08005876 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06005877 const void *pData) {
5878 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
5879}
5880
Mike Schuchardt2df08912020-12-15 16:28:09 -08005881void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
5882 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
5883 VkPipelineLayout layout, uint32_t set,
5884 const void *pData) {
locke-lunargd556cc32019-09-17 01:21:23 -06005885 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5886
5887 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5888 if (template_state) {
5889 auto layout_data = GetPipelineLayout(layout);
5890 auto dsl = GetDslFromPipelineLayout(layout_data, set);
5891 const auto &template_ci = template_state->create_info;
Jeff Bolz6ae39612019-10-11 20:57:36 -05005892 if (dsl && !dsl->destroyed) {
locke-lunargd556cc32019-09-17 01:21:23 -06005893 // Decode the template into a set of write updates
5894 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
5895 dsl->GetDescriptorSetLayout());
5896 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
5897 static_cast<uint32_t>(decoded_template.desc_writes.size()),
5898 decoded_template.desc_writes.data());
5899 }
5900 }
5901}
5902
5903void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
5904 uint32_t *pPropertyCount, void *pProperties) {
5905 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5906 if (*pPropertyCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005907 physical_device_state->display_plane_property_count = *pPropertyCount;
5908 }
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005909 if (*pPropertyCount || pProperties) {
5910 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005911 }
5912}
5913
5914void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
5915 uint32_t *pPropertyCount,
5916 VkDisplayPlanePropertiesKHR *pProperties,
5917 VkResult result) {
5918 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5919 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5920}
5921
5922void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
5923 uint32_t *pPropertyCount,
5924 VkDisplayPlaneProperties2KHR *pProperties,
5925 VkResult result) {
5926 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5927 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5928}
5929
5930void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5931 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
5932 QueryObject query_obj = {queryPool, query, index};
5933 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5934 RecordCmdBeginQuery(cb_state, query_obj);
5935}
5936
5937void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5938 uint32_t query, uint32_t index) {
5939 QueryObject query_obj = {queryPool, query, index};
5940 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5941 RecordCmdEndQuery(cb_state, query_obj);
5942}
5943
5944void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
5945 VkSamplerYcbcrConversion ycbcr_conversion) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005946 auto ycbcr_state = std::make_shared<SAMPLER_YCBCR_CONVERSION_STATE>();
5947
locke-lunargd556cc32019-09-17 01:21:23 -06005948 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005949 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion, ycbcr_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06005950 }
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005951
5952 const VkFormat conversion_format = create_info->format;
5953
5954 if (conversion_format != VK_FORMAT_UNDEFINED) {
5955 // If format is VK_FORMAT_UNDEFINED, will be set by external AHB features
5956 ycbcr_state->format_features = GetPotentialFormatFeatures(conversion_format);
5957 }
5958
5959 ycbcr_state->chromaFilter = create_info->chromaFilter;
5960 ycbcr_state->format = conversion_format;
5961 samplerYcbcrConversionMap[ycbcr_conversion] = std::move(ycbcr_state);
locke-lunargd556cc32019-09-17 01:21:23 -06005962}
5963
5964void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
5965 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5966 const VkAllocationCallbacks *pAllocator,
5967 VkSamplerYcbcrConversion *pYcbcrConversion,
5968 VkResult result) {
5969 if (VK_SUCCESS != result) return;
5970 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5971}
5972
5973void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
5974 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5975 const VkAllocationCallbacks *pAllocator,
5976 VkSamplerYcbcrConversion *pYcbcrConversion,
5977 VkResult result) {
5978 if (VK_SUCCESS != result) return;
5979 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5980}
5981
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005982void ValidationStateTracker::RecordDestroySamplerYcbcrConversionState(VkSamplerYcbcrConversion ycbcr_conversion) {
5983 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
5984 RecordDestroySamplerYcbcrConversionANDROID(ycbcr_conversion);
5985 }
5986
5987 auto ycbcr_state = GetSamplerYcbcrConversionState(ycbcr_conversion);
5988 ycbcr_state->destroyed = true;
5989 samplerYcbcrConversionMap.erase(ycbcr_conversion);
5990}
5991
locke-lunargd556cc32019-09-17 01:21:23 -06005992void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
5993 const VkAllocationCallbacks *pAllocator) {
5994 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005995 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005996}
5997
5998void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
5999 VkSamplerYcbcrConversion ycbcrConversion,
6000 const VkAllocationCallbacks *pAllocator) {
6001 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07006002 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06006003}
6004
Tony-LunarG977448c2019-12-02 14:52:02 -07006005void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
6006 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06006007 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07006008 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06006009
6010 // Do nothing if the query pool has been destroyed.
6011 auto query_pool_state = GetQueryPoolState(queryPool);
6012 if (!query_pool_state) return;
6013
6014 // Reset the state of existing entries.
6015 QueryObject query_obj{queryPool, 0};
6016 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
6017 for (uint32_t i = 0; i < max_query_count; ++i) {
6018 query_obj.query = firstQuery + i;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02006019 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01006020 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006021 for (uint32_t pass_index = 0; pass_index < query_pool_state->n_performance_passes; pass_index++) {
6022 query_obj.perf_pass = pass_index;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02006023 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01006024 }
6025 }
locke-lunargd556cc32019-09-17 01:21:23 -06006026 }
6027}
6028
Tony-LunarG977448c2019-12-02 14:52:02 -07006029void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
6030 uint32_t queryCount) {
6031 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
6032}
6033
6034void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
6035 uint32_t queryCount) {
6036 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
6037}
6038
locke-lunargd556cc32019-09-17 01:21:23 -06006039void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
6040 const TEMPLATE_STATE *template_state, const void *pData) {
6041 // Translate the templated update into a normal update for validation...
6042 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
6043 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
6044 decoded_update.desc_writes.data(), 0, NULL);
6045}
6046
6047// Update the common AllocateDescriptorSetsData
6048void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05006049 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06006050 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05006051 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06006052 if (layout) {
6053 ds_data->layout_nodes[i] = layout;
6054 // Count total descriptors required per type
6055 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
6056 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006057 uint32_t type_index = static_cast<uint32_t>(binding_layout->descriptorType);
6058 ds_data->required_descriptors_by_type[type_index] += binding_layout->descriptorCount;
locke-lunargd556cc32019-09-17 01:21:23 -06006059 }
6060 }
6061 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
6062 }
6063}
6064
6065// Decrement allocated sets from the pool and insert new sets into set_map
6066void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
6067 const VkDescriptorSet *descriptor_sets,
6068 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
6069 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
6070 // Account for sets and individual descriptors allocated from pool
6071 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
6072 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
6073 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
6074 }
6075
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07006076 const auto *variable_count_info = LvlFindInChain<VkDescriptorSetVariableDescriptorCountAllocateInfo>(p_alloc_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06006077 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
6078
6079 // Create tracking object for each descriptor set; insert into global map and the pool's set.
6080 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
6081 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
6082
Jeff Bolz41a1ced2019-10-11 11:40:49 -05006083 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07006084 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06006085 pool_state->sets.insert(new_ds.get());
6086 new_ds->in_use.store(0);
6087 setMap[descriptor_sets[i]] = std::move(new_ds);
6088 }
6089}
6090
6091// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
Jeremy Kniager05631e72020-06-08 14:21:35 -06006092void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type,
locke-lunarg540b2252020-08-03 13:23:36 -06006093 VkPipelineBindPoint bind_point, const char *function) {
6094 UpdateDrawState(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06006095 cb_state->hasDispatchCmd = true;
6096}
6097
locke-lunargd556cc32019-09-17 01:21:23 -06006098// Generic function to handle state update for all CmdDraw* type functions
locke-lunarg540b2252020-08-03 13:23:36 -06006099void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, VkPipelineBindPoint bind_point,
6100 const char *function) {
6101 UpdateStateCmdDrawDispatchType(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06006102 cb_state->hasDrawCmd = true;
David Zhao Akeley44139b12021-04-26 16:16:13 -07006103
6104 // Update the consumed viewport/scissor count.
6105 uint32_t& used = cb_state->usedViewportScissorCount;
6106 used = std::max(used, cb_state->pipelineStaticViewportCount);
6107 used = std::max(used, cb_state->pipelineStaticScissorCount);
6108 cb_state->usedDynamicViewportCount |= !!(cb_state->dynamic_status & CBSTATUS_VIEWPORT_WITH_COUNT_SET); // !! silences MSVC warn
6109 cb_state->usedDynamicScissorCount |= !!(cb_state->dynamic_status & CBSTATUS_SCISSOR_WITH_COUNT_SET);
locke-lunargd556cc32019-09-17 01:21:23 -06006110}
6111
6112void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
6113 uint32_t firstVertex, uint32_t firstInstance) {
6114 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006115 UpdateStateCmdDrawType(cb_state, CMD_DRAW, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDraw()");
locke-lunargd556cc32019-09-17 01:21:23 -06006116}
6117
6118void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
6119 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
6120 uint32_t firstInstance) {
6121 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006122 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXED, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexed()");
locke-lunargd556cc32019-09-17 01:21:23 -06006123}
6124
6125void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
6126 uint32_t count, uint32_t stride) {
6127 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6128 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006129 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06006130 AddCommandBufferBindingBuffer(cb_state, buffer_state);
6131}
6132
6133void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
6134 VkDeviceSize offset, uint32_t count, uint32_t stride) {
6135 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6136 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006137 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexedIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06006138 AddCommandBufferBindingBuffer(cb_state, buffer_state);
6139}
6140
6141void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
6142 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006143 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCH, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatch()");
locke-lunargd556cc32019-09-17 01:21:23 -06006144}
6145
6146void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
6147 VkDeviceSize offset) {
6148 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006149 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCHINDIRECT, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatchIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06006150 BUFFER_STATE *buffer_state = GetBufferState(buffer);
6151 AddCommandBufferBindingBuffer(cb_state, buffer_state);
6152}
6153
Tony-LunarG977448c2019-12-02 14:52:02 -07006154void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
6155 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
locke-lunarg540b2252020-08-03 13:23:36 -06006156 uint32_t stride, const char *function) {
Tony-LunarG977448c2019-12-02 14:52:02 -07006157 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6158 BUFFER_STATE *buffer_state = GetBufferState(buffer);
6159 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006160 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
Tony-LunarG977448c2019-12-02 14:52:02 -07006161 AddCommandBufferBindingBuffer(cb_state, buffer_state);
6162 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
6163}
6164
locke-lunargd556cc32019-09-17 01:21:23 -06006165void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
6166 VkDeviceSize offset, VkBuffer countBuffer,
6167 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
6168 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06006169 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
6170 "vkCmdDrawIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07006171}
6172
6173void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
6174 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
6175 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06006176 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
6177 "vkCmdDrawIndirectCount()");
Tony-LunarG977448c2019-12-02 14:52:02 -07006178}
6179
6180void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
6181 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
locke-lunarg540b2252020-08-03 13:23:36 -06006182 uint32_t maxDrawCount, uint32_t stride, const char *function) {
locke-lunargd556cc32019-09-17 01:21:23 -06006183 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6184 BUFFER_STATE *buffer_state = GetBufferState(buffer);
6185 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006186 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
locke-lunargd556cc32019-09-17 01:21:23 -06006187 AddCommandBufferBindingBuffer(cb_state, buffer_state);
6188 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
6189}
6190
6191void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
6192 VkDeviceSize offset, VkBuffer countBuffer,
6193 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
6194 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06006195 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
6196 "vkCmdDrawIndexedIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07006197}
6198
6199void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
6200 VkDeviceSize offset, VkBuffer countBuffer,
6201 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
6202 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06006203 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
6204 "vkCmdDrawIndexedIndirectCount()");
locke-lunargd556cc32019-09-17 01:21:23 -06006205}
6206
6207void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
6208 uint32_t firstTask) {
6209 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006210 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSNV, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawMeshTasksNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06006211}
6212
6213void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
6214 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
6215 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006216 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
6217 "vkCmdDrawMeshTasksIndirectNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06006218 BUFFER_STATE *buffer_state = GetBufferState(buffer);
6219 if (buffer_state) {
6220 AddCommandBufferBindingBuffer(cb_state, buffer_state);
6221 }
6222}
6223
6224void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
6225 VkDeviceSize offset, VkBuffer countBuffer,
6226 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
6227 uint32_t stride) {
6228 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6229 BUFFER_STATE *buffer_state = GetBufferState(buffer);
6230 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06006231 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTCOUNTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
6232 "vkCmdDrawMeshTasksIndirectCountNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06006233 if (buffer_state) {
6234 AddCommandBufferBindingBuffer(cb_state, buffer_state);
6235 }
6236 if (count_buffer_state) {
6237 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
6238 }
6239}
6240
6241void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
6242 const VkAllocationCallbacks *pAllocator,
6243 VkShaderModule *pShaderModule, VkResult result,
6244 void *csm_state_data) {
6245 if (VK_SUCCESS != result) return;
6246 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
6247
Tony-LunarG8a51b7d2020-07-01 15:57:23 -06006248 spv_target_env spirv_environment = PickSpirvEnv(api_version, (device_extensions.vk_khr_spirv_1_4 != kNotEnabled));
locke-lunargd556cc32019-09-17 01:21:23 -06006249 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05006250 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
6251 csm_state->unique_shader_id)
6252 : std::make_shared<SHADER_MODULE_STATE>();
sfricke-samsung962cad92021-04-13 00:46:29 -07006253 new_shader_module->SetPushConstantUsedInShader();
locke-lunargd556cc32019-09-17 01:21:23 -06006254 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
6255}
6256
6257void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05006258 PIPELINE_STATE::StageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06006259 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
locke-lunargde3f0fa2020-09-10 11:55:31 -06006260 stage_state->entry_point_name = pStage->pName;
6261 stage_state->shader_state = GetShared<SHADER_MODULE_STATE>(pStage->module);
6262 auto module = stage_state->shader_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -06006263 if (!module->has_valid_spirv) return;
6264
6265 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
sfricke-samsung962cad92021-04-13 00:46:29 -07006266 auto entrypoint = module->FindEntrypoint(pStage->pName, pStage->stage);
locke-lunargd556cc32019-09-17 01:21:23 -06006267 if (entrypoint == module->end()) return;
6268
locke-lunarg654e3692020-06-04 17:19:15 -06006269 stage_state->stage_flag = pStage->stage;
6270
locke-lunargd556cc32019-09-17 01:21:23 -06006271 // Mark accessible ids
sfricke-samsung962cad92021-04-13 00:46:29 -07006272 stage_state->accessible_ids = module->MarkAccessibleIds(entrypoint);
6273 module->ProcessExecutionModes(entrypoint, pipeline);
locke-lunargd556cc32019-09-17 01:21:23 -06006274
sfricke-samsung962cad92021-04-13 00:46:29 -07006275 stage_state->descriptor_uses = module->CollectInterfaceByDescriptorSlot(
6276 stage_state->accessible_ids, &stage_state->has_writable_descriptor, &stage_state->has_atomic_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06006277 // Capture descriptor uses for the pipeline
locke-lunarg36045992020-08-20 16:54:37 -06006278 for (const auto &use : stage_state->descriptor_uses) {
locke-lunargd556cc32019-09-17 01:21:23 -06006279 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06006280 const uint32_t slot = use.first.first;
locke-lunarg351c9d82020-10-23 14:43:21 -06006281 pipeline->active_slots[slot][use.first.second].is_writable |= use.second.is_writable;
locke-lunarg36045992020-08-20 16:54:37 -06006282 auto &reqs = pipeline->active_slots[slot][use.first.second].reqs;
sfricke-samsung962cad92021-04-13 00:46:29 -07006283 reqs = descriptor_req(reqs | module->DescriptorTypeToReqs(use.second.type_id));
locke-lunarg25b6c352020-08-06 17:44:18 -06006284 if (use.second.is_atomic_operation) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_VIEW_ATOMIC_OPERATION);
locke-lunarg12d20992020-09-21 12:46:49 -06006285 if (use.second.is_sampler_implicitLod_dref_proj) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_IMPLICITLOD_DREF_PROJ);
locke-lunargae2a43c2020-09-22 17:21:57 -06006286 if (use.second.is_sampler_bias_offset) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_BIAS_OFFSET);
locke-lunarg12d20992020-09-21 12:46:49 -06006287
John Zulauf649edd52019-10-02 14:39:41 -06006288 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunarg36045992020-08-20 16:54:37 -06006289 if (use.second.samplers_used_by_image.size()) {
locke-lunarg654a9052020-10-13 16:28:42 -06006290 auto &samplers_used_by_image = pipeline->active_slots[slot][use.first.second].samplers_used_by_image;
6291 if (use.second.samplers_used_by_image.size() > samplers_used_by_image.size()) {
6292 samplers_used_by_image.resize(use.second.samplers_used_by_image.size());
6293 }
locke-lunarg654a9052020-10-13 16:28:42 -06006294 uint32_t image_index = 0;
6295 for (const auto &samplers : use.second.samplers_used_by_image) {
6296 for (const auto &sampler : samplers) {
locke-lunargb8be8222020-10-20 00:34:37 -06006297 samplers_used_by_image[image_index].emplace(sampler, nullptr);
locke-lunarg654a9052020-10-13 16:28:42 -06006298 }
6299 ++image_index;
6300 }
locke-lunarg36045992020-08-20 16:54:37 -06006301 }
locke-lunargd556cc32019-09-17 01:21:23 -06006302 }
locke-lunarg78486832020-09-09 19:39:42 -06006303
locke-lunarg96dc9632020-06-10 17:22:18 -06006304 if (pStage->stage == VK_SHADER_STAGE_FRAGMENT_BIT) {
sfricke-samsung962cad92021-04-13 00:46:29 -07006305 pipeline->fragmentShader_writable_output_location_list = module->CollectWritableOutputLocationinFS(*pStage);
locke-lunarg96dc9632020-06-10 17:22:18 -06006306 }
locke-lunargd556cc32019-09-17 01:21:23 -06006307}
6308
sfricke-samsung70ad9ce2021-04-04 00:53:54 -07006309// Discussed in details in https://github.com/KhronosGroup/Vulkan-Docs/issues/1081
6310// Internal discussion and CTS were written to prove that this is not called after an incompatible vkCmdBindPipeline
6311// "Binding a pipeline with a layout that is not compatible with the push constant layout does not disturb the push constant values"
6312//
6313// vkCmdBindDescriptorSet has nothing to do with push constants and don't need to call this after neither
6314//
6315// Part of this assumes apps at draw/dispath/traceRays/etc time will have it properly compatabile or else other VU will be triggered
locke-lunargd556cc32019-09-17 01:21:23 -06006316void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
6317 if (cb_state == nullptr) {
6318 return;
6319 }
6320
6321 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
6322 if (pipeline_layout_state == nullptr) {
6323 return;
6324 }
6325
6326 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
6327 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
6328 cb_state->push_constant_data.clear();
locke-lunargde3f0fa2020-09-10 11:55:31 -06006329 cb_state->push_constant_data_update.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06006330 uint32_t size_needed = 0;
John Zulauf79f06582021-02-27 18:38:39 -07006331 for (const auto &push_constant_range : *cb_state->push_constant_data_ranges) {
locke-lunargde3f0fa2020-09-10 11:55:31 -06006332 auto size = push_constant_range.offset + push_constant_range.size;
6333 size_needed = std::max(size_needed, size);
6334
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006335 auto stage_flags = push_constant_range.stageFlags;
locke-lunargde3f0fa2020-09-10 11:55:31 -06006336 uint32_t bit_shift = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006337 while (stage_flags) {
6338 if (stage_flags & 1) {
locke-lunargde3f0fa2020-09-10 11:55:31 -06006339 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
6340 const auto it = cb_state->push_constant_data_update.find(flag);
6341
6342 if (it != cb_state->push_constant_data_update.end()) {
6343 if (it->second.size() < push_constant_range.offset) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06006344 it->second.resize(push_constant_range.offset, PC_Byte_Not_Set);
locke-lunargde3f0fa2020-09-10 11:55:31 -06006345 }
6346 if (it->second.size() < size) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06006347 it->second.resize(size, PC_Byte_Not_Updated);
locke-lunargde3f0fa2020-09-10 11:55:31 -06006348 }
6349 } else {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06006350 std::vector<uint8_t> bytes;
6351 bytes.resize(push_constant_range.offset, PC_Byte_Not_Set);
6352 bytes.resize(size, PC_Byte_Not_Updated);
locke-lunargde3f0fa2020-09-10 11:55:31 -06006353 cb_state->push_constant_data_update[flag] = bytes;
6354 }
6355 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006356 stage_flags = stage_flags >> 1;
locke-lunargde3f0fa2020-09-10 11:55:31 -06006357 ++bit_shift;
6358 }
locke-lunargd556cc32019-09-17 01:21:23 -06006359 }
6360 cb_state->push_constant_data.resize(size_needed, 0);
6361 }
6362}
John Zulauf22b0fbe2019-10-15 06:26:16 -06006363
6364void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
6365 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
6366 VkResult result) {
6367 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
6368 auto swapchain_state = GetSwapchainState(swapchain);
6369
6370 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
6371
6372 if (pSwapchainImages) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06006373 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
John Zulauf29d00532021-03-04 13:28:54 -07006374 SWAPCHAIN_IMAGE &swapchain_image = swapchain_state->images[i];
John Zulauffaa7a522021-03-05 12:22:45 -07006375 if (swapchain_image.image_state) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06006376
6377 // Add imageMap entries for each swapchain image
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06006378 auto image_ci = LvlInitStruct<VkImageCreateInfo>();
6379 image_ci.pNext = LvlFindInChain<VkImageFormatListCreateInfo>(swapchain_state->createInfo.pNext);
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -06006380 image_ci.flags = 0; // to be updated below
John Zulauf22b0fbe2019-10-15 06:26:16 -06006381 image_ci.imageType = VK_IMAGE_TYPE_2D;
6382 image_ci.format = swapchain_state->createInfo.imageFormat;
6383 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
6384 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
6385 image_ci.extent.depth = 1;
6386 image_ci.mipLevels = 1;
6387 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
6388 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
6389 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
6390 image_ci.usage = swapchain_state->createInfo.imageUsage;
6391 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
6392 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
6393 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
6394 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
6395
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006396 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06006397 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006398 }
6399 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06006400 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006401 }
6402 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08006403 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07006404 }
John Zulauf22b0fbe2019-10-15 06:26:16 -06006405
locke-lunarg296a3c92020-03-25 01:04:29 -06006406 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(device, pSwapchainImages[i], &image_ci);
John Zulauffaa7a522021-03-05 12:22:45 -07006407 auto *image_state = imageMap[pSwapchainImages[i]].get();
6408 assert(image_state);
John Zulauf22b0fbe2019-10-15 06:26:16 -06006409 image_state->valid = false;
6410 image_state->create_from_swapchain = swapchain;
6411 image_state->bind_swapchain = swapchain;
6412 image_state->bind_swapchain_imageIndex = i;
Tony-LunarGe64e4fe2020-02-17 16:21:55 -07006413 image_state->is_swapchain_image = true;
sfricke-samsungc8a50852021-03-31 13:56:50 -07006414 image_state->unprotected = ((image_ci.flags & VK_IMAGE_CREATE_PROTECTED_BIT) == 0);
John Zulauf29d00532021-03-04 13:28:54 -07006415
6416 // Since swapchains can't be linear, we can create an encoder here, and SyncValNeeds a fake_base_address
6417 image_state->fragment_encoder = std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(
6418 new subresource_adapter::ImageRangeEncoder(*image_state));
6419
6420 if (swapchain_image.bound_images.empty()) {
6421 // First time "bind" allocates
6422 image_state->swapchain_fake_address = fake_memory.Alloc(image_state->fragment_encoder->TotalSize());
6423 } else {
6424 // All others reuse
6425 image_state->swapchain_fake_address = (*swapchain_image.bound_images.cbegin())->swapchain_fake_address;
6426 // Since there are others, need to update the aliasing information
John Zulauffaa7a522021-03-05 12:22:45 -07006427 AddAliasingImage(image_state, &swapchain_image.bound_images);
John Zulauf29d00532021-03-04 13:28:54 -07006428 }
6429
6430 swapchain_image.image_state = image_state; // Don't move, it's already a reference to the imageMap
John Zulauffaa7a522021-03-05 12:22:45 -07006431 swapchain_image.bound_images.emplace(image_state);
Petr Kraus44f1c482020-04-25 20:09:25 +02006432
6433 AddImageStateProps(*image_state, device, physical_device);
John Zulauf22b0fbe2019-10-15 06:26:16 -06006434 }
6435 }
6436
6437 if (*pSwapchainImageCount) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06006438 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
6439 }
6440}
sourav parmar35e7a002020-06-09 17:58:44 -07006441
sourav parmar35e7a002020-06-09 17:58:44 -07006442void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer,
6443 const VkCopyAccelerationStructureInfoKHR *pInfo) {
6444 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6445 if (cb_state) {
sourav parmarcd5fb182020-07-17 12:58:44 -07006446 ACCELERATION_STRUCTURE_STATE_KHR *src_as_state = GetAccelerationStructureStateKHR(pInfo->src);
6447 ACCELERATION_STRUCTURE_STATE_KHR *dst_as_state = GetAccelerationStructureStateKHR(pInfo->dst);
sourav parmar35e7a002020-06-09 17:58:44 -07006448 if (dst_as_state != nullptr && src_as_state != nullptr) {
6449 dst_as_state->built = true;
6450 dst_as_state->build_info_khr = src_as_state->build_info_khr;
6451 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
6452 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
6453 }
6454 }
6455}
Piers Daniell39842ee2020-07-10 16:42:33 -06006456
6457void ValidationStateTracker::PreCallRecordCmdSetCullModeEXT(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) {
6458 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6459 cb_state->status |= CBSTATUS_CULL_MODE_SET;
6460 cb_state->static_status &= ~CBSTATUS_CULL_MODE_SET;
6461}
6462
6463void ValidationStateTracker::PreCallRecordCmdSetFrontFaceEXT(VkCommandBuffer commandBuffer, VkFrontFace frontFace) {
6464 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6465 cb_state->status |= CBSTATUS_FRONT_FACE_SET;
6466 cb_state->static_status &= ~CBSTATUS_FRONT_FACE_SET;
6467}
6468
6469void ValidationStateTracker::PreCallRecordCmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer,
6470 VkPrimitiveTopology primitiveTopology) {
6471 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6472 cb_state->primitiveTopology = primitiveTopology;
6473 cb_state->status |= CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
6474 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
6475}
6476
6477void ValidationStateTracker::PreCallRecordCmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer, uint32_t viewportCount,
6478 const VkViewport *pViewports) {
6479 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07006480 uint32_t bits = (1u << viewportCount) - 1u;
6481 cb_state->viewportWithCountMask |= bits;
6482 cb_state->trashedViewportMask &= ~bits;
Tobias Hector6663c9b2020-11-05 10:18:02 +00006483 cb_state->viewportWithCountCount = viewportCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07006484 cb_state->trashedViewportCount = false;
Piers Daniell39842ee2020-07-10 16:42:33 -06006485 cb_state->status |= CBSTATUS_VIEWPORT_WITH_COUNT_SET;
6486 cb_state->static_status &= ~CBSTATUS_VIEWPORT_WITH_COUNT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07006487
6488 cb_state->dynamicViewports.resize(std::max(size_t(viewportCount), cb_state->dynamicViewports.size()));
6489 for (size_t i = 0; i < viewportCount; ++i) {
6490 cb_state->dynamicViewports[i] = pViewports[i];
6491 }
Piers Daniell39842ee2020-07-10 16:42:33 -06006492}
6493
6494void ValidationStateTracker::PreCallRecordCmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer, uint32_t scissorCount,
6495 const VkRect2D *pScissors) {
6496 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07006497 uint32_t bits = (1u << scissorCount) - 1u;
6498 cb_state->scissorWithCountMask |= bits;
6499 cb_state->trashedScissorMask &= ~bits;
6500 cb_state->scissorWithCountCount = scissorCount;
6501 cb_state->trashedScissorCount = false;
Piers Daniell39842ee2020-07-10 16:42:33 -06006502 cb_state->status |= CBSTATUS_SCISSOR_WITH_COUNT_SET;
6503 cb_state->static_status &= ~CBSTATUS_SCISSOR_WITH_COUNT_SET;
6504}
6505
6506void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer, uint32_t firstBinding,
6507 uint32_t bindingCount, const VkBuffer *pBuffers,
6508 const VkDeviceSize *pOffsets, const VkDeviceSize *pSizes,
6509 const VkDeviceSize *pStrides) {
6510 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6511 if (pStrides) {
6512 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
6513 cb_state->static_status &= ~CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
6514 }
6515
6516 uint32_t end = firstBinding + bindingCount;
6517 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
6518 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
6519 }
6520
6521 for (uint32_t i = 0; i < bindingCount; ++i) {
6522 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07006523 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
Piers Daniell39842ee2020-07-10 16:42:33 -06006524 vertex_buffer_binding.offset = pOffsets[i];
6525 vertex_buffer_binding.size = (pSizes) ? pSizes[i] : VK_WHOLE_SIZE;
6526 vertex_buffer_binding.stride = (pStrides) ? pStrides[i] : 0;
6527 // Add binding for this vertex buffer to this commandbuffer
6528 if (pBuffers[i]) {
locke-lunarg1ae57d62020-11-18 10:49:19 -07006529 AddCommandBufferBindingBuffer(cb_state, vertex_buffer_binding.buffer_state.get());
Piers Daniell39842ee2020-07-10 16:42:33 -06006530 }
6531 }
6532}
6533
6534void ValidationStateTracker::PreCallRecordCmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) {
6535 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6536 cb_state->status |= CBSTATUS_DEPTH_TEST_ENABLE_SET;
6537 cb_state->static_status &= ~CBSTATUS_DEPTH_TEST_ENABLE_SET;
6538}
6539
6540void ValidationStateTracker::PreCallRecordCmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) {
6541 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6542 cb_state->status |= CBSTATUS_DEPTH_WRITE_ENABLE_SET;
6543 cb_state->static_status &= ~CBSTATUS_DEPTH_WRITE_ENABLE_SET;
6544}
6545
6546void ValidationStateTracker::PreCallRecordCmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) {
6547 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6548 cb_state->status |= CBSTATUS_DEPTH_COMPARE_OP_SET;
6549 cb_state->static_status &= ~CBSTATUS_DEPTH_COMPARE_OP_SET;
6550}
6551
6552void ValidationStateTracker::PreCallRecordCmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer,
6553 VkBool32 depthBoundsTestEnable) {
6554 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6555 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
6556 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
6557}
6558void ValidationStateTracker::PreCallRecordCmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) {
6559 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6560 cb_state->status |= CBSTATUS_STENCIL_TEST_ENABLE_SET;
6561 cb_state->static_status &= ~CBSTATUS_STENCIL_TEST_ENABLE_SET;
6562}
6563
6564void ValidationStateTracker::PreCallRecordCmdSetStencilOpEXT(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
6565 VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp,
6566 VkCompareOp compareOp) {
6567 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6568 cb_state->status |= CBSTATUS_STENCIL_OP_SET;
6569 cb_state->static_status &= ~CBSTATUS_STENCIL_OP_SET;
6570}
locke-lunarg4189aa22020-10-21 00:23:48 -06006571
6572void ValidationStateTracker::PreCallRecordCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle,
6573 uint32_t discardRectangleCount,
6574 const VkRect2D *pDiscardRectangles) {
6575 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6576 cb_state->status |= CBSTATUS_DISCARD_RECTANGLE_SET;
6577 cb_state->static_status &= ~CBSTATUS_DISCARD_RECTANGLE_SET;
6578}
6579
6580void ValidationStateTracker::PreCallRecordCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,
6581 const VkSampleLocationsInfoEXT *pSampleLocationsInfo) {
6582 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6583 cb_state->status |= CBSTATUS_SAMPLE_LOCATIONS_SET;
6584 cb_state->static_status &= ~CBSTATUS_SAMPLE_LOCATIONS_SET;
6585}
6586
6587void ValidationStateTracker::PreCallRecordCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer,
6588 VkCoarseSampleOrderTypeNV sampleOrderType,
6589 uint32_t customSampleOrderCount,
6590 const VkCoarseSampleOrderCustomNV *pCustomSampleOrders) {
6591 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6592 cb_state->status |= CBSTATUS_COARSE_SAMPLE_ORDER_SET;
6593 cb_state->static_status &= ~CBSTATUS_COARSE_SAMPLE_ORDER_SET;
6594}
Vikram Kushwahaa57b0c32021-04-19 12:21:46 -07006595
6596void ValidationStateTracker::PreCallRecordCmdSetPatchControlPointsEXT(VkCommandBuffer commandBuffer, uint32_t patchControlPoints) {
6597 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6598 cb_state->status |= CBSTATUS_PATCH_CONTROL_POINTS_SET;
6599 cb_state->static_status &= ~CBSTATUS_PATCH_CONTROL_POINTS_SET;
6600}
6601
6602void ValidationStateTracker::PreCallRecordCmdSetLogicOpEXT(VkCommandBuffer commandBuffer, VkLogicOp logicOp) {
6603 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6604 cb_state->status |= CBSTATUS_LOGIC_OP_SET;
6605 cb_state->static_status &= ~CBSTATUS_LOGIC_OP_SET;
6606}
6607
6608void ValidationStateTracker::PreCallRecordCmdSetRasterizerDiscardEnableEXT(VkCommandBuffer commandBuffer,
6609 VkBool32 rasterizerDiscardEnable) {
6610 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6611 cb_state->status |= CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET;
6612 cb_state->static_status &= ~CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET;
6613}
6614
6615void ValidationStateTracker::PreCallRecordCmdSetDepthBiasEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable) {
6616 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6617 cb_state->status |= CBSTATUS_DEPTH_BIAS_ENABLE_SET;
6618 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_ENABLE_SET;
6619}
6620
6621void ValidationStateTracker::PreCallRecordCmdSetPrimitiveRestartEnableEXT(VkCommandBuffer commandBuffer,
6622 VkBool32 primitiveRestartEnable) {
6623 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6624 cb_state->status |= CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET;
6625 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07006626}