blob: 48db0ef6dc8c7f8e0414dd11f105978035a475e8 [file] [log] [blame]
sfricke-samsung486a51e2021-01-02 00:10:15 -08001/* Copyright (c) 2015-2021 The Khronos Group Inc.
2 * Copyright (c) 2015-2021 Valve Corporation
3 * Copyright (c) 2015-2021 LunarG, Inc.
4 * Copyright (C) 2015-2021 Google Inc.
Tobias Hector6663c9b2020-11-05 10:18:02 +00005 * Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved.
locke-lunargd556cc32019-09-17 01:21:23 -06006 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Mark Lobodzinski <mark@lunarg.com>
20 * Author: Dave Houlton <daveh@lunarg.com>
21 * Shannon McPherson <shannon@lunarg.com>
Tobias Hector6663c9b2020-11-05 10:18:02 +000022 * Author: Tobias Hector <tobias.hector@amd.com>
locke-lunargd556cc32019-09-17 01:21:23 -060023 */
24
David Zhao Akeley44139b12021-04-26 16:16:13 -070025#include <algorithm>
locke-lunargd556cc32019-09-17 01:21:23 -060026#include <cmath>
locke-lunargd556cc32019-09-17 01:21:23 -060027
28#include "vk_enum_string_helper.h"
29#include "vk_format_utils.h"
30#include "vk_layer_data.h"
31#include "vk_layer_utils.h"
32#include "vk_layer_logging.h"
33#include "vk_typemap_helper.h"
34
35#include "chassis.h"
36#include "state_tracker.h"
37#include "shader_validation.h"
Jeremy Gebben74aa7622020-12-15 11:18:00 -070038#include "sync_utils.h"
Jeremy Gebben159b3cc2021-06-03 09:09:03 -060039#include "cmd_buffer_state.h"
40#include "render_pass_state.h"
locke-lunarg4189aa22020-10-21 00:23:48 -060041
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -060042void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
43 if (add_obj) {
44 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
45 // Call base class
46 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
47 }
48}
49
John Zulauf2bc1fde2020-04-24 15:09:51 -060050// NOTE: Beware the lifespan of the rp_begin when holding the return. If the rp_begin isn't a "safe" copy, "IMAGELESS"
51// attachments won't persist past the API entry point exit.
Jeremy Gebben88f58142021-06-01 10:07:52 -060052static std::pair<uint32_t, const VkImageView *> GetFramebufferAttachments(const VkRenderPassBeginInfo &rp_begin,
53 const FRAMEBUFFER_STATE &fb_state) {
John Zulauf2bc1fde2020-04-24 15:09:51 -060054 const VkImageView *attachments = fb_state.createInfo.pAttachments;
55 uint32_t count = fb_state.createInfo.attachmentCount;
56 if (fb_state.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -070057 const auto *framebuffer_attachments = LvlFindInChain<VkRenderPassAttachmentBeginInfo>(rp_begin.pNext);
John Zulauf2bc1fde2020-04-24 15:09:51 -060058 if (framebuffer_attachments) {
59 attachments = framebuffer_attachments->pAttachments;
60 count = framebuffer_attachments->attachmentCount;
61 }
62 }
63 return std::make_pair(count, attachments);
64}
65
John Zulauf64ffe552021-02-06 10:25:07 -070066template <typename ImageViewPointer, typename Get>
67std::vector<ImageViewPointer> GetAttachmentViewsImpl(const VkRenderPassBeginInfo &rp_begin, const FRAMEBUFFER_STATE &fb_state,
68 const Get &get_fn) {
69 std::vector<ImageViewPointer> views;
John Zulauf2bc1fde2020-04-24 15:09:51 -060070
71 const auto count_attachment = GetFramebufferAttachments(rp_begin, fb_state);
72 const auto attachment_count = count_attachment.first;
73 const auto *attachments = count_attachment.second;
74 views.resize(attachment_count, nullptr);
75 for (uint32_t i = 0; i < attachment_count; i++) {
76 if (attachments[i] != VK_NULL_HANDLE) {
John Zulauf64ffe552021-02-06 10:25:07 -070077 views[i] = get_fn(attachments[i]);
John Zulauf2bc1fde2020-04-24 15:09:51 -060078 }
79 }
80 return views;
81}
82
John Zulauf64ffe552021-02-06 10:25:07 -070083std::vector<std::shared_ptr<const IMAGE_VIEW_STATE>> ValidationStateTracker::GetSharedAttachmentViews(
84 const VkRenderPassBeginInfo &rp_begin, const FRAMEBUFFER_STATE &fb_state) const {
85 auto get_fn = [this](VkImageView handle) { return this->GetShared<IMAGE_VIEW_STATE>(handle); };
86 return GetAttachmentViewsImpl<std::shared_ptr<const IMAGE_VIEW_STATE>>(rp_begin, fb_state, get_fn);
87}
88
locke-lunargd556cc32019-09-17 01:21:23 -060089#ifdef VK_USE_PLATFORM_ANDROID_KHR
90// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
91// This could also move into a seperate core_validation_android.cpp file... ?
92
93void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -070094 const VkExternalMemoryImageCreateInfo *emici = LvlFindInChain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -060095 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
Spencer Fricke224c9852020-04-06 07:47:29 -070096 is_node->external_ahb = true;
locke-lunargd556cc32019-09-17 01:21:23 -060097 }
Mark Lobodzinski1f887d32020-12-30 15:31:33 -070098 const VkExternalFormatANDROID *ext_fmt_android = LvlFindInChain<VkExternalFormatANDROID>(create_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -060099 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
100 is_node->has_ahb_format = true;
101 is_node->ahb_format = ext_fmt_android->externalFormat;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700102 // VUID 01894 will catch if not found in map
103 auto it = ahb_ext_formats_map.find(ext_fmt_android->externalFormat);
104 if (it != ahb_ext_formats_map.end()) {
105 is_node->format_features = it->second;
106 }
locke-lunargd556cc32019-09-17 01:21:23 -0600107 }
108}
109
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700110void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700111 const VkExternalMemoryBufferCreateInfo *embci = LvlFindInChain<VkExternalMemoryBufferCreateInfo>(create_info->pNext);
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700112 if (embci && (embci->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
113 bs_node->external_ahb = true;
114 }
115}
116
locke-lunargd556cc32019-09-17 01:21:23 -0600117void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700118 VkSamplerYcbcrConversion ycbcr_conversion,
119 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700120 const VkExternalFormatANDROID *ext_format_android = LvlFindInChain<VkExternalFormatANDROID>(create_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600121 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
122 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700123 // VUID 01894 will catch if not found in map
124 auto it = ahb_ext_formats_map.find(ext_format_android->externalFormat);
125 if (it != ahb_ext_formats_map.end()) {
126 ycbcr_state->format_features = it->second;
127 }
locke-lunargd556cc32019-09-17 01:21:23 -0600128 }
129};
130
131void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
132 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
133};
134
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700135void ValidationStateTracker::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
136 VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties, VkResult result) {
137 if (VK_SUCCESS != result) return;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700138 auto ahb_format_props = LvlFindInChain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700139 if (ahb_format_props) {
Jeremy Gebbenfc6f8152021-03-18 16:58:55 -0600140 ahb_ext_formats_map.emplace(ahb_format_props->externalFormat, ahb_format_props->formatFeatures);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700141 }
142}
143
locke-lunargd556cc32019-09-17 01:21:23 -0600144#else
145
146void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
147
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700148void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {}
149
locke-lunargd556cc32019-09-17 01:21:23 -0600150void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700151 VkSamplerYcbcrConversion ycbcr_conversion,
152 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state){};
locke-lunargd556cc32019-09-17 01:21:23 -0600153
154void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
155
156#endif // VK_USE_PLATFORM_ANDROID_KHR
157
Petr Kraus44f1c482020-04-25 20:09:25 +0200158void AddImageStateProps(IMAGE_STATE &image_state, const VkDevice device, const VkPhysicalDevice physical_device) {
159 // Add feature support according to Image Format Features (vkspec.html#resources-image-format-features)
160 // if format is AHB external format then the features are already set
161 if (image_state.has_ahb_format == false) {
162 const VkImageTiling image_tiling = image_state.createInfo.tiling;
163 const VkFormat image_format = image_state.createInfo.format;
164 if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
165 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {
166 VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, nullptr};
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600167 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state.image(), &drm_format_properties);
Petr Kraus44f1c482020-04-25 20:09:25 +0200168
169 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
170 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
171 nullptr};
172 format_properties_2.pNext = (void *)&drm_properties_list;
173 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Lionel Landwerlin09351a72020-06-22 18:15:59 +0300174 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
175 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
176 drm_properties_list.pDrmFormatModifierProperties = &drm_properties[0];
177 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Petr Kraus44f1c482020-04-25 20:09:25 +0200178
179 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300180 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier ==
181 drm_format_properties.drmFormatModifier) {
182 image_state.format_features =
Petr Kraus44f1c482020-04-25 20:09:25 +0200183 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300184 break;
Petr Kraus44f1c482020-04-25 20:09:25 +0200185 }
186 }
187 } else {
188 VkFormatProperties format_properties;
189 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_format, &format_properties);
190 image_state.format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
191 : format_properties.optimalTilingFeatures;
192 }
193 }
194}
195
locke-lunargd556cc32019-09-17 01:21:23 -0600196void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
197 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
198 if (VK_SUCCESS != result) return;
locke-lunarg296a3c92020-03-25 01:04:29 -0600199 auto is_node = std::make_shared<IMAGE_STATE>(device, *pImage, pCreateInfo);
sfricke-samsung71bc6572020-04-29 15:49:43 -0700200 is_node->disjoint = ((pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT) != 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600201 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
202 RecordCreateImageANDROID(pCreateInfo, is_node.get());
203 }
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700204 const auto swapchain_info = LvlFindInChain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600205 if (swapchain_info) {
206 is_node->create_from_swapchain = swapchain_info->swapchain;
207 }
208
locke-lunargd556cc32019-09-17 01:21:23 -0600209 // Record the memory requirements in case they won't be queried
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700210 // External AHB memory can't be queried until after memory is bound
Spencer Fricke224c9852020-04-06 07:47:29 -0700211 if (is_node->external_ahb == false) {
sfricke-samsung71bc6572020-04-29 15:49:43 -0700212 if (is_node->disjoint == false) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700213 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
214 } else {
215 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
216 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
217 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
218 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
219 mem_req_info2.pNext = &image_plane_req;
220 mem_req_info2.image = *pImage;
221
222 assert(plane_count != 0); // assumes each format has at least first plane
223 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
224 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
225 is_node->plane0_requirements = mem_reqs2.memoryRequirements;
226
227 if (plane_count >= 2) {
228 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
229 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
230 is_node->plane1_requirements = mem_reqs2.memoryRequirements;
231 }
232 if (plane_count >= 3) {
233 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
234 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
235 is_node->plane2_requirements = mem_reqs2.memoryRequirements;
236 }
237 }
locke-lunargd556cc32019-09-17 01:21:23 -0600238 }
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700239
Petr Kraus44f1c482020-04-25 20:09:25 +0200240 AddImageStateProps(*is_node, device, physical_device);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700241
sfricke-samsungedce77a2020-07-03 22:35:13 -0700242 is_node->unprotected = ((pCreateInfo->flags & VK_IMAGE_CREATE_PROTECTED_BIT) == 0);
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700243 imageMap.emplace(*pImage, std::move(is_node));
locke-lunargd556cc32019-09-17 01:21:23 -0600244}
245
246void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
247 if (!image) return;
248 IMAGE_STATE *image_state = GetImageState(image);
locke-lunargd556cc32019-09-17 01:21:23 -0600249 // Clean up memory mapping, bindings and range references for image
locke-lunargd556cc32019-09-17 01:21:23 -0600250 if (image_state->bind_swapchain) {
251 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
252 if (swapchain) {
John Zulaufd13b38e2021-03-05 08:17:38 -0700253 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600254 }
255 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600256 image_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -0600257 imageMap.erase(image);
258}
259
260void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
261 VkImageLayout imageLayout, const VkClearColorValue *pColor,
262 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600263
264 if (disabled[command_buffer_state]) return;
265
locke-lunargd556cc32019-09-17 01:21:23 -0600266 auto cb_node = GetCBState(commandBuffer);
267 auto image_state = GetImageState(image);
268 if (cb_node && image_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600269 cb_node->AddChild(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600270 }
271}
272
273void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
274 VkImageLayout imageLayout,
275 const VkClearDepthStencilValue *pDepthStencil,
276 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600277 if (disabled[command_buffer_state]) return;
278
locke-lunargd556cc32019-09-17 01:21:23 -0600279 auto cb_node = GetCBState(commandBuffer);
280 auto image_state = GetImageState(image);
281 if (cb_node && image_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600282 cb_node->AddChild(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600283 }
284}
285
286void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
287 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
288 uint32_t regionCount, const VkImageCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600289 if (disabled[command_buffer_state]) return;
290
locke-lunargd556cc32019-09-17 01:21:23 -0600291 auto cb_node = GetCBState(commandBuffer);
292 auto src_image_state = GetImageState(srcImage);
293 auto dst_image_state = GetImageState(dstImage);
294
295 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600296 cb_node->AddChild(src_image_state);
297 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600298}
299
Jeff Leger178b1e52020-10-05 12:22:23 -0400300void ValidationStateTracker::PreCallRecordCmdCopyImage2KHR(VkCommandBuffer commandBuffer,
301 const VkCopyImageInfo2KHR *pCopyImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600302 if (disabled[command_buffer_state]) return;
303
Jeff Leger178b1e52020-10-05 12:22:23 -0400304 auto cb_node = GetCBState(commandBuffer);
305 auto src_image_state = GetImageState(pCopyImageInfo->srcImage);
306 auto dst_image_state = GetImageState(pCopyImageInfo->dstImage);
307
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600308 cb_node->AddChild(src_image_state);
309 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400310}
311
locke-lunargd556cc32019-09-17 01:21:23 -0600312void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
313 VkImageLayout srcImageLayout, VkImage dstImage,
314 VkImageLayout dstImageLayout, uint32_t regionCount,
315 const VkImageResolve *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600316 if (disabled[command_buffer_state]) return;
317
locke-lunargd556cc32019-09-17 01:21:23 -0600318 auto cb_node = GetCBState(commandBuffer);
319 auto src_image_state = GetImageState(srcImage);
320 auto dst_image_state = GetImageState(dstImage);
321
322 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600323 cb_node->AddChild(src_image_state);
324 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600325}
326
Jeff Leger178b1e52020-10-05 12:22:23 -0400327void ValidationStateTracker::PreCallRecordCmdResolveImage2KHR(VkCommandBuffer commandBuffer,
328 const VkResolveImageInfo2KHR *pResolveImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600329 if (disabled[command_buffer_state]) return;
330
Jeff Leger178b1e52020-10-05 12:22:23 -0400331 auto cb_node = GetCBState(commandBuffer);
332 auto src_image_state = GetImageState(pResolveImageInfo->srcImage);
333 auto dst_image_state = GetImageState(pResolveImageInfo->dstImage);
334
335 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600336 cb_node->AddChild(src_image_state);
337 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400338}
339
locke-lunargd556cc32019-09-17 01:21:23 -0600340void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
341 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
342 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600343 if (disabled[command_buffer_state]) return;
344
locke-lunargd556cc32019-09-17 01:21:23 -0600345 auto cb_node = GetCBState(commandBuffer);
346 auto src_image_state = GetImageState(srcImage);
347 auto dst_image_state = GetImageState(dstImage);
348
349 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600350 cb_node->AddChild(src_image_state);
351 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600352}
353
Jeff Leger178b1e52020-10-05 12:22:23 -0400354void ValidationStateTracker::PreCallRecordCmdBlitImage2KHR(VkCommandBuffer commandBuffer,
355 const VkBlitImageInfo2KHR *pBlitImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600356 if (disabled[command_buffer_state]) return;
357
Jeff Leger178b1e52020-10-05 12:22:23 -0400358 auto cb_node = GetCBState(commandBuffer);
359 auto src_image_state = GetImageState(pBlitImageInfo->srcImage);
360 auto dst_image_state = GetImageState(pBlitImageInfo->dstImage);
361
362 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600363 cb_node->AddChild(src_image_state);
364 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400365}
366
locke-lunargd556cc32019-09-17 01:21:23 -0600367void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
368 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
369 VkResult result) {
370 if (result != VK_SUCCESS) return;
371 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500372 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600373
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700374 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
375 RecordCreateBufferANDROID(pCreateInfo, buffer_state.get());
376 }
locke-lunargd556cc32019-09-17 01:21:23 -0600377 // Get a set of requirements in the case the app does not
sfricke-samsungad90e722020-07-08 20:54:24 -0700378 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
locke-lunargd556cc32019-09-17 01:21:23 -0600379
sfricke-samsungedce77a2020-07-03 22:35:13 -0700380 buffer_state->unprotected = ((pCreateInfo->flags & VK_BUFFER_CREATE_PROTECTED_BIT) == 0);
381
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700382 bufferMap.emplace(*pBuffer, std::move(buffer_state));
locke-lunargd556cc32019-09-17 01:21:23 -0600383}
384
385void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
386 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
387 VkResult result) {
388 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500389 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
locke-lunarg25b6c352020-08-06 17:44:18 -0600390 auto buffer_view_state = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
391
392 VkFormatProperties format_properties;
393 DispatchGetPhysicalDeviceFormatProperties(physical_device, pCreateInfo->format, &format_properties);
394 buffer_view_state->format_features = format_properties.bufferFeatures;
395
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700396 bufferViewMap.emplace(*pView, std::move(buffer_view_state));
locke-lunargd556cc32019-09-17 01:21:23 -0600397}
398
399void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
400 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
401 VkResult result) {
402 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500403 auto image_state = GetImageShared(pCreateInfo->image);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700404 auto image_view_state = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
405
406 // Add feature support according to Image View Format Features (vkspec.html#resources-image-view-format-features)
407 const VkImageTiling image_tiling = image_state->createInfo.tiling;
408 const VkFormat image_view_format = pCreateInfo->format;
409 if (image_state->has_ahb_format == true) {
410 // The ImageView uses same Image's format feature since they share same AHB
411 image_view_state->format_features = image_state->format_features;
412 } else if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
413 // Parameter validation should catch if this is used without VK_EXT_image_drm_format_modifier
414 assert(device_extensions.vk_ext_image_drm_format_modifier);
415 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
416 nullptr};
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600417 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state->image(), &drm_format_properties);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700418
419 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
420 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
421 nullptr};
422 format_properties_2.pNext = (void *)&drm_properties_list;
nyorain38a9d232021-03-06 13:06:12 +0100423
424 // First call is to get the number of modifiers compatible with the queried format
425 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
426
427 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
428 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
429 drm_properties_list.pDrmFormatModifierProperties = drm_properties.data();
430
431 // Second call, now with an allocated array in pDrmFormatModifierProperties, is to get the modifiers
432 // compatible with the queried format
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700433 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
434
435 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300436 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier == drm_format_properties.drmFormatModifier) {
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700437 image_view_state->format_features |=
438 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300439 break;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700440 }
441 }
442 } else {
443 VkFormatProperties format_properties;
444 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_view_format, &format_properties);
445 image_view_state->format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
446 : format_properties.optimalTilingFeatures;
447 }
448
sfricke-samsungd23f6e62021-01-17 09:05:47 -0800449 auto usage_create_info = LvlFindInChain<VkImageViewUsageCreateInfo>(pCreateInfo->pNext);
450 image_view_state->inherited_usage = (usage_create_info) ? usage_create_info->usage : image_state->createInfo.usage;
451
locke-lunarg9939d4b2020-10-26 20:11:08 -0600452 // filter_cubic_props is used in CmdDraw validation. But it takes a lot of performance if it does in CmdDraw.
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700453 image_view_state->filter_cubic_props = LvlInitStruct<VkFilterCubicImageViewImageFormatPropertiesEXT>();
locke-lunarg9939d4b2020-10-26 20:11:08 -0600454 if (IsExtEnabled(device_extensions.vk_ext_filter_cubic)) {
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700455 auto imageview_format_info = LvlInitStruct<VkPhysicalDeviceImageViewImageFormatInfoEXT>();
locke-lunarg9939d4b2020-10-26 20:11:08 -0600456 imageview_format_info.imageViewType = pCreateInfo->viewType;
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700457 auto image_format_info = LvlInitStruct<VkPhysicalDeviceImageFormatInfo2>(&imageview_format_info);
locke-lunarg9939d4b2020-10-26 20:11:08 -0600458 image_format_info.type = image_state->createInfo.imageType;
459 image_format_info.format = image_state->createInfo.format;
460 image_format_info.tiling = image_state->createInfo.tiling;
sfricke-samsungd23f6e62021-01-17 09:05:47 -0800461 image_format_info.usage = image_view_state->inherited_usage;
locke-lunarg9939d4b2020-10-26 20:11:08 -0600462 image_format_info.flags = image_state->createInfo.flags;
463
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700464 auto image_format_properties = LvlInitStruct<VkImageFormatProperties2>(&image_view_state->filter_cubic_props);
locke-lunarg9939d4b2020-10-26 20:11:08 -0600465
466 DispatchGetPhysicalDeviceImageFormatProperties2(physical_device, &image_format_info, &image_format_properties);
467 }
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700468 imageViewMap.emplace(*pView, std::move(image_view_state));
locke-lunargd556cc32019-09-17 01:21:23 -0600469}
470
471void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
472 uint32_t regionCount, const VkBufferCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600473 if (disabled[command_buffer_state]) return;
474
locke-lunargd556cc32019-09-17 01:21:23 -0600475 auto cb_node = GetCBState(commandBuffer);
476 auto src_buffer_state = GetBufferState(srcBuffer);
477 auto dst_buffer_state = GetBufferState(dstBuffer);
478
479 // Update bindings between buffers and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600480 cb_node->AddChild(src_buffer_state);
481 cb_node->AddChild(dst_buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600482}
483
Jeff Leger178b1e52020-10-05 12:22:23 -0400484void ValidationStateTracker::PreCallRecordCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer,
485 const VkCopyBufferInfo2KHR *pCopyBufferInfos) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600486 if (disabled[command_buffer_state]) return;
487
Jeff Leger178b1e52020-10-05 12:22:23 -0400488 auto cb_node = GetCBState(commandBuffer);
489 auto src_buffer_state = GetBufferState(pCopyBufferInfos->srcBuffer);
490 auto dst_buffer_state = GetBufferState(pCopyBufferInfos->dstBuffer);
491
492 // Update bindings between buffers and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600493 cb_node->AddChild(src_buffer_state);
494 cb_node->AddChild(dst_buffer_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400495}
496
locke-lunargd556cc32019-09-17 01:21:23 -0600497void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
498 const VkAllocationCallbacks *pAllocator) {
499 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
500 if (!image_view_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -0600501
502 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600503 image_view_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -0600504 imageViewMap.erase(imageView);
505}
506
507void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
508 if (!buffer) return;
509 auto buffer_state = GetBufferState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -0600510
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600511 buffer_state->Destroy();
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600512 bufferMap.erase(buffer_state->buffer());
locke-lunargd556cc32019-09-17 01:21:23 -0600513}
514
515void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
516 const VkAllocationCallbacks *pAllocator) {
517 if (!bufferView) return;
518 auto buffer_view_state = GetBufferViewState(bufferView);
locke-lunargd556cc32019-09-17 01:21:23 -0600519
520 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600521 buffer_view_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -0600522 bufferViewMap.erase(bufferView);
523}
524
525void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
526 VkDeviceSize size, uint32_t data) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600527 if (disabled[command_buffer_state]) return;
528
locke-lunargd556cc32019-09-17 01:21:23 -0600529 auto cb_node = GetCBState(commandBuffer);
530 auto buffer_state = GetBufferState(dstBuffer);
531 // Update bindings between buffer and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600532 cb_node->AddChild(buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600533}
534
535void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
536 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
537 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600538 if (disabled[command_buffer_state]) return;
539
locke-lunargd556cc32019-09-17 01:21:23 -0600540 auto cb_node = GetCBState(commandBuffer);
541 auto src_image_state = GetImageState(srcImage);
542 auto dst_buffer_state = GetBufferState(dstBuffer);
543
544 // Update bindings between buffer/image and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600545 cb_node->AddChild(src_image_state);
546 cb_node->AddChild(dst_buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600547}
548
Jeff Leger178b1e52020-10-05 12:22:23 -0400549void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
550 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600551 if (disabled[command_buffer_state]) return;
552
Jeff Leger178b1e52020-10-05 12:22:23 -0400553 auto cb_node = GetCBState(commandBuffer);
554 auto src_image_state = GetImageState(pCopyImageToBufferInfo->srcImage);
555 auto dst_buffer_state = GetBufferState(pCopyImageToBufferInfo->dstBuffer);
556
557 // Update bindings between buffer/image and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600558 cb_node->AddChild(src_image_state);
559 cb_node->AddChild(dst_buffer_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400560}
561
locke-lunargd556cc32019-09-17 01:21:23 -0600562void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
563 VkImageLayout dstImageLayout, uint32_t regionCount,
564 const VkBufferImageCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600565 if (disabled[command_buffer_state]) return;
566
locke-lunargd556cc32019-09-17 01:21:23 -0600567 auto cb_node = GetCBState(commandBuffer);
568 auto src_buffer_state = GetBufferState(srcBuffer);
569 auto dst_image_state = GetImageState(dstImage);
570
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600571 cb_node->AddChild(src_buffer_state);
572 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600573}
574
Jeff Leger178b1e52020-10-05 12:22:23 -0400575void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
576 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600577
578 if (disabled[command_buffer_state]) return;
579
Jeff Leger178b1e52020-10-05 12:22:23 -0400580 auto cb_node = GetCBState(commandBuffer);
581 auto src_buffer_state = GetBufferState(pCopyBufferToImageInfo->srcBuffer);
582 auto dst_image_state = GetImageState(pCopyBufferToImageInfo->dstImage);
583
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600584 cb_node->AddChild(src_buffer_state);
585 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400586}
587
Jeremy Gebben159b3cc2021-06-03 09:09:03 -0600588QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
589 auto it = queueMap.find(queue);
590 if (it == queueMap.end()) {
591 return nullptr;
592 }
593 return &it->second;
locke-lunargd556cc32019-09-17 01:21:23 -0600594}
595
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600596const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
597 auto it = queueMap.find(queue);
598 if (it == queueMap.cend()) {
599 return nullptr;
600 }
601 return &it->second;
locke-lunargd556cc32019-09-17 01:21:23 -0600602}
603
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700604void ValidationStateTracker::RemoveAliasingImages(const layer_data::unordered_set<IMAGE_STATE *> &bound_images) {
locke-lunargd556cc32019-09-17 01:21:23 -0600605 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
606 // reference. It doesn't need two ways clear.
John Zulaufd13b38e2021-03-05 08:17:38 -0700607 for (auto *bound_image : bound_images) {
608 if (bound_image) {
609 bound_image->aliasing_images.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600610 }
611 }
612}
613
locke-lunargd556cc32019-09-17 01:21:23 -0600614const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
615 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
616 auto it = phys_dev_map->find(phys);
617 if (it == phys_dev_map->end()) {
618 return nullptr;
619 }
620 return &it->second;
621}
622
623PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
624 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
625 auto it = phys_dev_map->find(phys);
626 if (it == phys_dev_map->end()) {
627 return nullptr;
628 }
629 return &it->second;
630}
631
632PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
633const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
634
635// Return ptr to memory binding for given handle of specified type
636template <typename State, typename Result>
637static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
638 switch (typed_handle.type) {
639 case kVulkanObjectTypeImage:
640 return state->GetImageState(typed_handle.Cast<VkImage>());
641 case kVulkanObjectTypeBuffer:
642 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
643 case kVulkanObjectTypeAccelerationStructureNV:
sourav parmarcd5fb182020-07-17 12:58:44 -0700644 return state->GetAccelerationStructureStateNV(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -0600645 default:
646 break;
647 }
648 return nullptr;
649}
650
651const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
652 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
653}
654
655BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
656 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
657}
658
659void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
660 assert(object != NULL);
661
John Zulauf79952712020-04-07 11:25:54 -0600662 auto fake_address = fake_memory.Alloc(pAllocateInfo->allocationSize);
663 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo, fake_address);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500664 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600665
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700666 auto dedicated = LvlFindInChain<VkMemoryDedicatedAllocateInfo>(pAllocateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600667 if (dedicated) {
Jeremy Gebben7c313fa2021-05-28 13:21:44 -0600668 if (dedicated->buffer) {
669 const auto *buffer_state = GetBufferState(dedicated->buffer);
670 if (buffer_state) {
671 mem_info->dedicated_handle = VulkanTypedHandle(dedicated->buffer, kVulkanObjectTypeBuffer);
672 mem_info->dedicated_create_info.buffer = buffer_state->createInfo;
673 }
674 } else if (dedicated->image) {
675 const auto *image_state = GetImageState(dedicated->image);
676 if (image_state) {
677 mem_info->dedicated_handle = VulkanTypedHandle(dedicated->image, kVulkanObjectTypeImage);
678 mem_info->dedicated_create_info.image = image_state->createInfo;
679 }
680 }
locke-lunargd556cc32019-09-17 01:21:23 -0600681 }
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700682 auto export_info = LvlFindInChain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600683 if (export_info) {
684 mem_info->is_export = true;
685 mem_info->export_handle_type_flags = export_info->handleTypes;
686 }
sfricke-samsung23068272020-06-21 14:49:51 -0700687
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700688 auto alloc_flags = LvlFindInChain<VkMemoryAllocateFlagsInfo>(pAllocateInfo->pNext);
Mark Lobodzinski7bda6ed2020-07-24 09:57:38 -0600689 if (alloc_flags) {
690 auto dev_mask = alloc_flags->deviceMask;
691 if ((dev_mask != 0) && (dev_mask & (dev_mask - 1))) {
692 mem_info->multi_instance = true;
693 }
694 }
695 auto heap_index = phys_dev_mem_props.memoryTypes[mem_info->alloc_info.memoryTypeIndex].heapIndex;
Tony-LunarGb399fb02020-08-06 14:20:59 -0600696 mem_info->multi_instance |= (((phys_dev_mem_props.memoryHeaps[heap_index].flags & VK_MEMORY_HEAP_MULTI_INSTANCE_BIT) != 0) &&
697 physical_device_count > 1);
Mark Lobodzinski7bda6ed2020-07-24 09:57:38 -0600698
sfricke-samsung23068272020-06-21 14:49:51 -0700699 // Assumes validation already for only a single import operation in the pNext
700#ifdef VK_USE_PLATFORM_WIN32_KHR
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700701 auto win32_import = LvlFindInChain<VkImportMemoryWin32HandleInfoKHR>(pAllocateInfo->pNext);
sfricke-samsung23068272020-06-21 14:49:51 -0700702 if (win32_import) {
703 mem_info->is_import = true;
704 mem_info->import_handle_type_flags = win32_import->handleType;
705 }
706#endif
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700707 auto fd_import = LvlFindInChain<VkImportMemoryFdInfoKHR>(pAllocateInfo->pNext);
sfricke-samsung23068272020-06-21 14:49:51 -0700708 if (fd_import) {
709 mem_info->is_import = true;
710 mem_info->import_handle_type_flags = fd_import->handleType;
711 }
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700712 auto host_pointer_import = LvlFindInChain<VkImportMemoryHostPointerInfoEXT>(pAllocateInfo->pNext);
sfricke-samsung23068272020-06-21 14:49:51 -0700713 if (host_pointer_import) {
714 mem_info->is_import = true;
715 mem_info->import_handle_type_flags = host_pointer_import->handleType;
716 }
717#ifdef VK_USE_PLATFORM_ANDROID_KHR
718 // AHB Import doesn't have handle in the pNext struct
719 // It should be assumed that all imported AHB can only have the same, single handleType
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700720 auto ahb_import = LvlFindInChain<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
sfricke-samsung23068272020-06-21 14:49:51 -0700721 if ((ahb_import) && (ahb_import->buffer != nullptr)) {
722 mem_info->is_import_ahb = true;
723 mem_info->is_import = true;
724 mem_info->import_handle_type_flags = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
725 }
sfricke-samsung98acb882020-11-03 19:16:10 -0800726#endif // VK_USE_PLATFORM_ANDROID_KHR
sfricke-samsungedce77a2020-07-03 22:35:13 -0700727
728 const VkMemoryType memory_type = phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex];
729 mem_info->unprotected = ((memory_type.propertyFlags & VK_MEMORY_PROPERTY_PROTECTED_BIT) == 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600730}
731
locke-lunarg540b2252020-08-03 13:23:36 -0600732void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, const VkPipelineBindPoint bind_point,
733 const char *function) {
locke-lunargb8d7a7a2020-10-25 16:01:52 -0600734 const auto lv_bind_point = ConvertToLvlBindPoint(bind_point);
735 auto &state = cb_state->lastBound[lv_bind_point];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700736 PIPELINE_STATE *pipe = state.pipeline_state;
locke-lunargd556cc32019-09-17 01:21:23 -0600737 if (VK_NULL_HANDLE != state.pipeline_layout) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700738 for (const auto &set_binding_pair : pipe->active_slots) {
739 uint32_t set_index = set_binding_pair.first;
locke-lunargd556cc32019-09-17 01:21:23 -0600740 // Pull the set node
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700741 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[set_index].bound_descriptor_set;
locke-lunargd556cc32019-09-17 01:21:23 -0600742
Tony-LunarG77822802020-05-28 16:35:46 -0600743 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
locke-lunargd556cc32019-09-17 01:21:23 -0600744
Tony-LunarG77822802020-05-28 16:35:46 -0600745 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
746 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
747 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700748 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pipe);
Tony-LunarG77822802020-05-28 16:35:46 -0600749
750 if (reduced_map.IsManyDescriptors()) {
751 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700752 descriptor_set->UpdateValidationCache(*cb_state, *pipe, binding_req_map);
Tony-LunarG77822802020-05-28 16:35:46 -0600753 }
754
755 // We can skip updating the state if "nothing" has changed since the last validation.
756 // See CoreChecks::ValidateCmdBufDrawState for more details.
757 bool descriptor_set_changed =
758 !reduced_map.IsManyDescriptors() ||
759 // Update if descriptor set (or contents) has changed
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700760 state.per_set[set_index].validated_set != descriptor_set ||
761 state.per_set[set_index].validated_set_change_count != descriptor_set->GetChangeCount() ||
Tony-LunarG77822802020-05-28 16:35:46 -0600762 (!disabled[image_layout_validation] &&
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700763 state.per_set[set_index].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
Tony-LunarG77822802020-05-28 16:35:46 -0600764 bool need_update = descriptor_set_changed ||
765 // Update if previous bindingReqMap doesn't include new bindingReqMap
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700766 !std::includes(state.per_set[set_index].validated_set_binding_req_map.begin(),
767 state.per_set[set_index].validated_set_binding_req_map.end(), binding_req_map.begin(),
Tony-LunarG77822802020-05-28 16:35:46 -0600768 binding_req_map.end());
769
770 if (need_update) {
771 // Bind this set and its active descriptor resources to the command buffer
772 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
773 // Only record the bindings that haven't already been recorded
774 BindingReqMap delta_reqs;
775 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700776 state.per_set[set_index].validated_set_binding_req_map.begin(),
777 state.per_set[set_index].validated_set_binding_req_map.end(),
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700778 layer_data::insert_iterator<BindingReqMap>(delta_reqs, delta_reqs.begin()));
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700779 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pipe, delta_reqs, function);
Tony-LunarG77822802020-05-28 16:35:46 -0600780 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700781 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pipe, binding_req_map, function);
locke-lunargd556cc32019-09-17 01:21:23 -0600782 }
783
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700784 state.per_set[set_index].validated_set = descriptor_set;
785 state.per_set[set_index].validated_set_change_count = descriptor_set->GetChangeCount();
786 state.per_set[set_index].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
Tony-LunarG77822802020-05-28 16:35:46 -0600787 if (reduced_map.IsManyDescriptors()) {
788 // Check whether old == new before assigning, the equality check is much cheaper than
789 // freeing and reallocating the map.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700790 if (state.per_set[set_index].validated_set_binding_req_map != set_binding_pair.second) {
791 state.per_set[set_index].validated_set_binding_req_map = set_binding_pair.second;
Jeff Bolz56308942019-10-06 22:05:23 -0500792 }
Tony-LunarG77822802020-05-28 16:35:46 -0600793 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700794 state.per_set[set_index].validated_set_binding_req_map = BindingReqMap();
locke-lunargd556cc32019-09-17 01:21:23 -0600795 }
796 }
797 }
798 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700799 if (!pipe->vertex_binding_descriptions_.empty()) {
locke-lunargd556cc32019-09-17 01:21:23 -0600800 cb_state->vertex_buffer_used = true;
801 }
802}
803
804// Remove set from setMap and delete the set
805void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500806 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600807 descriptor_set->Destroy();
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500808
locke-lunargd556cc32019-09-17 01:21:23 -0600809 setMap.erase(descriptor_set->GetSet());
810}
811
812// Free all DS Pools including their Sets & related sub-structs
813// NOTE : Calls to this function should be wrapped in mutex
814void ValidationStateTracker::DeleteDescriptorSetPools() {
815 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
816 // Remove this pools' sets from setMap and delete them
John Zulauf79f06582021-02-27 18:38:39 -0700817 for (auto *ds : ii->second->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -0600818 FreeDescriptorSet(ds);
819 }
820 ii->second->sets.clear();
821 ii = descriptorPoolMap.erase(ii);
822 }
823}
824
825// For given object struct return a ptr of BASE_NODE type for its wrapping struct
826BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500827 if (object_struct.node) {
828#ifdef _DEBUG
829 // assert that lookup would find the same object
830 VulkanTypedHandle other = object_struct;
831 other.node = nullptr;
832 assert(object_struct.node == GetStateStructPtrFromObject(other));
833#endif
834 return object_struct.node;
835 }
locke-lunargd556cc32019-09-17 01:21:23 -0600836 BASE_NODE *base_ptr = nullptr;
837 switch (object_struct.type) {
838 case kVulkanObjectTypeDescriptorSet: {
839 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
840 break;
841 }
842 case kVulkanObjectTypeSampler: {
843 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
844 break;
845 }
846 case kVulkanObjectTypeQueryPool: {
847 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
848 break;
849 }
850 case kVulkanObjectTypePipeline: {
851 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
852 break;
853 }
854 case kVulkanObjectTypeBuffer: {
855 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
856 break;
857 }
858 case kVulkanObjectTypeBufferView: {
859 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
860 break;
861 }
862 case kVulkanObjectTypeImage: {
863 base_ptr = GetImageState(object_struct.Cast<VkImage>());
864 break;
865 }
866 case kVulkanObjectTypeImageView: {
867 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
868 break;
869 }
870 case kVulkanObjectTypeEvent: {
871 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
872 break;
873 }
874 case kVulkanObjectTypeDescriptorPool: {
875 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
876 break;
877 }
878 case kVulkanObjectTypeCommandPool: {
879 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
880 break;
881 }
882 case kVulkanObjectTypeFramebuffer: {
883 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
884 break;
885 }
886 case kVulkanObjectTypeRenderPass: {
887 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
888 break;
889 }
890 case kVulkanObjectTypeDeviceMemory: {
891 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
892 break;
893 }
894 case kVulkanObjectTypeAccelerationStructureNV: {
sourav parmarcd5fb182020-07-17 12:58:44 -0700895 base_ptr = GetAccelerationStructureStateNV(object_struct.Cast<VkAccelerationStructureNV>());
896 break;
897 }
898 case kVulkanObjectTypeAccelerationStructureKHR: {
899 base_ptr = GetAccelerationStructureStateKHR(object_struct.Cast<VkAccelerationStructureKHR>());
locke-lunargd556cc32019-09-17 01:21:23 -0600900 break;
901 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500902 case kVulkanObjectTypeUnknown:
903 // This can happen if an element of the object_bindings vector has been
904 // zeroed out, after an object is destroyed.
905 break;
locke-lunargd556cc32019-09-17 01:21:23 -0600906 default:
907 // TODO : Any other objects to be handled here?
908 assert(0);
909 break;
910 }
911 return base_ptr;
912}
913
sfricke-samsungbf1a2ed2020-06-14 23:31:00 -0700914// Gets union of all features defined by Potential Format Features
915// except, does not handle the external format case for AHB as that only can be used for sampled images
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700916VkFormatFeatureFlags ValidationStateTracker::GetPotentialFormatFeatures(VkFormat format) const {
917 VkFormatFeatureFlags format_features = 0;
918
919 if (format != VK_FORMAT_UNDEFINED) {
920 VkFormatProperties format_properties;
921 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
922 format_features |= format_properties.linearTilingFeatures;
923 format_features |= format_properties.optimalTilingFeatures;
924 if (device_extensions.vk_ext_image_drm_format_modifier) {
925 // VK_KHR_get_physical_device_properties2 is required in this case
926 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2};
927 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
928 nullptr};
929 format_properties_2.pNext = (void *)&drm_properties_list;
Marc Alcala Prieto773871c2021-02-04 19:24:43 +0100930
931 // First call is to get the number of modifiers compatible with the queried format
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700932 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
Marc Alcala Prieto773871c2021-02-04 19:24:43 +0100933
934 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
935 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
936 drm_properties_list.pDrmFormatModifierProperties = drm_properties.data();
937
938 // Second call, now with an allocated array in pDrmFormatModifierProperties, is to get the modifiers
939 // compatible with the queried format
940 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
941
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700942 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
943 format_features |= drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
944 }
945 }
946 }
947
948 return format_features;
949}
950
locke-lunargd556cc32019-09-17 01:21:23 -0600951// Reset the command buffer state
952// Maintain the createInfo and set state to CB_NEW, but clear all other state
953void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700954 CMD_BUFFER_STATE *cb_state = GetCBState(cb);
955 if (cb_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600956 cb_state->Reset();
locke-lunargd556cc32019-09-17 01:21:23 -0600957 // Clean up the label data
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600958 ResetCmdDebugUtilsLabel(report_data, cb_state->commandBuffer());
locke-lunargd556cc32019-09-17 01:21:23 -0600959 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600960
locke-lunargd556cc32019-09-17 01:21:23 -0600961 if (command_buffer_reset_callback) {
962 (*command_buffer_reset_callback)(cb);
963 }
964}
965
966void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
967 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
968 VkResult result) {
969 if (VK_SUCCESS != result) return;
970
Locke Linf3873542021-04-26 11:25:10 -0600971 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
972 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
973 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
974
locke-lunargd556cc32019-09-17 01:21:23 -0600975 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
976 if (nullptr == enabled_features_found) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700977 const auto *features2 = LvlFindInChain<VkPhysicalDeviceFeatures2>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600978 if (features2) {
979 enabled_features_found = &(features2->features);
Locke Linf3873542021-04-26 11:25:10 -0600980
981 const auto *provoking_vertex_features = lvl_find_in_chain<VkPhysicalDeviceProvokingVertexFeaturesEXT>(features2->pNext);
982 if (provoking_vertex_features) {
983 state_tracker->enabled_features.provoking_vertex_features = *provoking_vertex_features;
984 }
locke-lunargd556cc32019-09-17 01:21:23 -0600985 }
986 }
987
locke-lunargd556cc32019-09-17 01:21:23 -0600988 if (nullptr == enabled_features_found) {
989 state_tracker->enabled_features.core = {};
990 } else {
991 state_tracker->enabled_features.core = *enabled_features_found;
992 }
993
994 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
995 // previously set them through an explicit API call.
996 uint32_t count;
997 auto pd_state = GetPhysicalDeviceState(gpu);
998 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
999 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
1000 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
1001 // Save local link to this device's physical device state
1002 state_tracker->physical_device_state = pd_state;
1003
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001004 const auto *vulkan_12_features = LvlFindInChain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001005 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001006 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001007 } else {
sfricke-samsung27c70722020-05-02 08:42:39 -07001008 // Set Extension Feature Aliases to false as there is no struct to check
1009 state_tracker->enabled_features.core12.drawIndirectCount = VK_FALSE;
1010 state_tracker->enabled_features.core12.samplerMirrorClampToEdge = VK_FALSE;
1011 state_tracker->enabled_features.core12.descriptorIndexing = VK_FALSE;
1012 state_tracker->enabled_features.core12.samplerFilterMinmax = VK_FALSE;
1013 state_tracker->enabled_features.core12.shaderOutputLayer = VK_FALSE;
1014 state_tracker->enabled_features.core12.shaderOutputViewportIndex = VK_FALSE;
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001015 state_tracker->enabled_features.core12.subgroupBroadcastDynamicId = VK_FALSE;
sfricke-samsung27c70722020-05-02 08:42:39 -07001016
1017 // These structs are only allowed in pNext chain if there is no VkPhysicalDeviceVulkan12Features
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001018
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001019 const auto *eight_bit_storage_features = LvlFindInChain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001020 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001021 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
1022 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
1023 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
1024 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001025 }
1026
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001027 const auto *float16_int8_features = LvlFindInChain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001028 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001029 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
1030 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001031 }
1032
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001033 const auto *descriptor_indexing_features = LvlFindInChain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001034 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001035 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
1036 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
1037 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
1038 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
1039 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
1040 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
1041 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
1042 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
1043 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
1044 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
1045 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
1046 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
1047 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
1048 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
1049 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
1050 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
1051 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
1052 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
1053 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
1054 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
1055 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
1056 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
1057 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
1058 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
1059 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
1060 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
1061 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
1062 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
1063 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
1064 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
1065 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
1066 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
1067 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
1068 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
1069 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
1070 descriptor_indexing_features->descriptorBindingPartiallyBound;
1071 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
1072 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
1073 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001074 }
1075
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001076 const auto *scalar_block_layout_features = LvlFindInChain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001077 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001078 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001079 }
1080
1081 const auto *imageless_framebuffer_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001082 LvlFindInChain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001083 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001084 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001085 }
1086
1087 const auto *uniform_buffer_standard_layout_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001088 LvlFindInChain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001089 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001090 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
1091 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001092 }
1093
1094 const auto *subgroup_extended_types_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001095 LvlFindInChain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001096 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001097 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
1098 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001099 }
1100
1101 const auto *separate_depth_stencil_layouts_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001102 LvlFindInChain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001103 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001104 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
1105 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001106 }
1107
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001108 const auto *host_query_reset_features = LvlFindInChain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001109 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001110 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001111 }
1112
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001113 const auto *timeline_semaphore_features = LvlFindInChain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001114 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001115 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001116 }
1117
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001118 const auto *buffer_device_address = LvlFindInChain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001119 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001120 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
1121 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1122 buffer_device_address->bufferDeviceAddressCaptureReplay;
1123 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1124 buffer_device_address->bufferDeviceAddressMultiDevice;
1125 }
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001126
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001127 const auto *atomic_int64_features = LvlFindInChain<VkPhysicalDeviceShaderAtomicInt64Features>(pCreateInfo->pNext);
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001128 if (atomic_int64_features) {
1129 state_tracker->enabled_features.core12.shaderBufferInt64Atomics = atomic_int64_features->shaderBufferInt64Atomics;
1130 state_tracker->enabled_features.core12.shaderSharedInt64Atomics = atomic_int64_features->shaderSharedInt64Atomics;
1131 }
1132
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001133 const auto *memory_model_features = LvlFindInChain<VkPhysicalDeviceVulkanMemoryModelFeatures>(pCreateInfo->pNext);
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001134 if (memory_model_features) {
1135 state_tracker->enabled_features.core12.vulkanMemoryModel = memory_model_features->vulkanMemoryModel;
1136 state_tracker->enabled_features.core12.vulkanMemoryModelDeviceScope =
1137 memory_model_features->vulkanMemoryModelDeviceScope;
1138 state_tracker->enabled_features.core12.vulkanMemoryModelAvailabilityVisibilityChains =
1139 memory_model_features->vulkanMemoryModelAvailabilityVisibilityChains;
1140 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001141 }
1142
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001143 const auto *vulkan_11_features = LvlFindInChain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001144 if (vulkan_11_features) {
1145 state_tracker->enabled_features.core11 = *vulkan_11_features;
1146 } else {
1147 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
1148
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001149 const auto *sixteen_bit_storage_features = LvlFindInChain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001150 if (sixteen_bit_storage_features) {
1151 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
1152 sixteen_bit_storage_features->storageBuffer16BitAccess;
1153 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
1154 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
1155 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
1156 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
1157 }
1158
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001159 const auto *multiview_features = LvlFindInChain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001160 if (multiview_features) {
1161 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1162 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1163 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1164 }
1165
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001166 const auto *variable_pointers_features = LvlFindInChain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001167 if (variable_pointers_features) {
1168 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1169 variable_pointers_features->variablePointersStorageBuffer;
1170 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1171 }
1172
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001173 const auto *protected_memory_features = LvlFindInChain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001174 if (protected_memory_features) {
1175 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1176 }
1177
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001178 const auto *ycbcr_conversion_features = LvlFindInChain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001179 if (ycbcr_conversion_features) {
1180 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1181 }
1182
1183 const auto *shader_draw_parameters_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001184 LvlFindInChain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001185 if (shader_draw_parameters_features) {
1186 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001187 }
1188 }
1189
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001190 const auto *device_group_ci = LvlFindInChain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
Tony-LunarGca4891a2020-08-10 15:46:46 -06001191 if (device_group_ci) {
1192 state_tracker->physical_device_count = device_group_ci->physicalDeviceCount;
1193 state_tracker->device_group_create_info = *device_group_ci;
1194 } else {
1195 state_tracker->physical_device_count = 1;
1196 }
locke-lunargd556cc32019-09-17 01:21:23 -06001197
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001198 const auto *exclusive_scissor_features = LvlFindInChain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001199 if (exclusive_scissor_features) {
1200 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1201 }
1202
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001203 const auto *shading_rate_image_features = LvlFindInChain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001204 if (shading_rate_image_features) {
1205 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1206 }
1207
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001208 const auto *mesh_shader_features = LvlFindInChain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001209 if (mesh_shader_features) {
1210 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1211 }
1212
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001213 const auto *inline_uniform_block_features = LvlFindInChain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001214 if (inline_uniform_block_features) {
1215 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1216 }
1217
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001218 const auto *transform_feedback_features = LvlFindInChain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001219 if (transform_feedback_features) {
1220 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1221 }
1222
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001223 const auto *vtx_attrib_div_features = LvlFindInChain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001224 if (vtx_attrib_div_features) {
1225 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1226 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001227
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001228 const auto *buffer_device_address_ext = LvlFindInChain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001229 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001230 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001231 }
1232
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001233 const auto *cooperative_matrix_features = LvlFindInChain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001234 if (cooperative_matrix_features) {
1235 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1236 }
1237
locke-lunargd556cc32019-09-17 01:21:23 -06001238 const auto *compute_shader_derivatives_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001239 LvlFindInChain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001240 if (compute_shader_derivatives_features) {
1241 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1242 }
1243
1244 const auto *fragment_shader_barycentric_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001245 LvlFindInChain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001246 if (fragment_shader_barycentric_features) {
1247 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1248 }
1249
1250 const auto *shader_image_footprint_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001251 LvlFindInChain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001252 if (shader_image_footprint_features) {
1253 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1254 }
1255
1256 const auto *fragment_shader_interlock_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001257 LvlFindInChain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001258 if (fragment_shader_interlock_features) {
1259 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1260 }
1261
1262 const auto *demote_to_helper_invocation_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001263 LvlFindInChain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001264 if (demote_to_helper_invocation_features) {
1265 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1266 }
1267
1268 const auto *texel_buffer_alignment_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001269 LvlFindInChain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001270 if (texel_buffer_alignment_features) {
1271 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1272 }
1273
locke-lunargd556cc32019-09-17 01:21:23 -06001274 const auto *pipeline_exe_props_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001275 LvlFindInChain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001276 if (pipeline_exe_props_features) {
1277 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1278 }
1279
Jeff Bolz82f854d2019-09-17 14:56:47 -05001280 const auto *dedicated_allocation_image_aliasing_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001281 LvlFindInChain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
Jeff Bolz82f854d2019-09-17 14:56:47 -05001282 if (dedicated_allocation_image_aliasing_features) {
1283 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1284 *dedicated_allocation_image_aliasing_features;
1285 }
1286
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001287 const auto *performance_query_features = LvlFindInChain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001288 if (performance_query_features) {
1289 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1290 }
1291
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001292 const auto *device_coherent_memory_features = LvlFindInChain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
Tobias Hector782bcde2019-11-28 16:19:42 +00001293 if (device_coherent_memory_features) {
1294 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1295 }
1296
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001297 const auto *ycbcr_image_array_features = LvlFindInChain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsungcead0802020-01-30 22:20:10 -08001298 if (ycbcr_image_array_features) {
1299 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1300 }
1301
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001302 const auto *ray_query_features = LvlFindInChain<VkPhysicalDeviceRayQueryFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001303 if (ray_query_features) {
1304 state_tracker->enabled_features.ray_query_features = *ray_query_features;
1305 }
1306
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001307 const auto *ray_tracing_pipeline_features = LvlFindInChain<VkPhysicalDeviceRayTracingPipelineFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001308 if (ray_tracing_pipeline_features) {
1309 state_tracker->enabled_features.ray_tracing_pipeline_features = *ray_tracing_pipeline_features;
1310 }
1311
1312 const auto *ray_tracing_acceleration_structure_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001313 LvlFindInChain<VkPhysicalDeviceAccelerationStructureFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001314 if (ray_tracing_acceleration_structure_features) {
1315 state_tracker->enabled_features.ray_tracing_acceleration_structure_features = *ray_tracing_acceleration_structure_features;
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001316 }
1317
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001318 const auto *robustness2_features = LvlFindInChain<VkPhysicalDeviceRobustness2FeaturesEXT>(pCreateInfo->pNext);
Jeff Bolz165818a2020-05-08 11:19:03 -05001319 if (robustness2_features) {
1320 state_tracker->enabled_features.robustness2_features = *robustness2_features;
1321 }
1322
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001323 const auto *fragment_density_map_features = LvlFindInChain<VkPhysicalDeviceFragmentDensityMapFeaturesEXT>(pCreateInfo->pNext);
janharaldfredriksen-arm3b793772020-05-12 18:55:53 +02001324 if (fragment_density_map_features) {
1325 state_tracker->enabled_features.fragment_density_map_features = *fragment_density_map_features;
1326 }
1327
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001328 const auto *fragment_density_map_features2 = LvlFindInChain<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT>(pCreateInfo->pNext);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001329 if (fragment_density_map_features2) {
1330 state_tracker->enabled_features.fragment_density_map2_features = *fragment_density_map_features2;
1331 }
1332
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001333 const auto *astc_decode_features = LvlFindInChain<VkPhysicalDeviceASTCDecodeFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0c4a06f2020-06-27 01:24:32 -07001334 if (astc_decode_features) {
1335 state_tracker->enabled_features.astc_decode_features = *astc_decode_features;
1336 }
1337
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001338 const auto *custom_border_color_features = LvlFindInChain<VkPhysicalDeviceCustomBorderColorFeaturesEXT>(pCreateInfo->pNext);
Tony-LunarG7337b312020-04-15 16:40:25 -06001339 if (custom_border_color_features) {
1340 state_tracker->enabled_features.custom_border_color_features = *custom_border_color_features;
1341 }
1342
sfricke-samsungfd661d62020-05-16 00:57:27 -07001343 const auto *pipeline_creation_cache_control_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001344 LvlFindInChain<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsungfd661d62020-05-16 00:57:27 -07001345 if (pipeline_creation_cache_control_features) {
1346 state_tracker->enabled_features.pipeline_creation_cache_control_features = *pipeline_creation_cache_control_features;
1347 }
1348
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001349 const auto *fragment_shading_rate_features = LvlFindInChain<VkPhysicalDeviceFragmentShadingRateFeaturesKHR>(pCreateInfo->pNext);
Tobias Hector6663c9b2020-11-05 10:18:02 +00001350 if (fragment_shading_rate_features) {
1351 state_tracker->enabled_features.fragment_shading_rate_features = *fragment_shading_rate_features;
1352 }
1353
Piers Daniell39842ee2020-07-10 16:42:33 -06001354 const auto *extended_dynamic_state_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001355 LvlFindInChain<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT>(pCreateInfo->pNext);
Piers Daniell39842ee2020-07-10 16:42:33 -06001356 if (extended_dynamic_state_features) {
1357 state_tracker->enabled_features.extended_dynamic_state_features = *extended_dynamic_state_features;
1358 }
1359
Vikram Kushwahaa57b0c32021-04-19 12:21:46 -07001360 const auto *extended_dynamic_state2_features =
1361 LvlFindInChain<VkPhysicalDeviceExtendedDynamicState2FeaturesEXT>(pCreateInfo->pNext);
1362 if (extended_dynamic_state2_features) {
1363 state_tracker->enabled_features.extended_dynamic_state2_features = *extended_dynamic_state2_features;
1364 }
1365
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001366 const auto *multiview_features = LvlFindInChain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
locke-lunarg3fa463a2020-10-23 16:39:04 -06001367 if (multiview_features) {
1368 state_tracker->enabled_features.multiview_features = *multiview_features;
1369 }
1370
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001371 const auto *portability_features = LvlFindInChain<VkPhysicalDevicePortabilitySubsetFeaturesKHR>(pCreateInfo->pNext);
Nathaniel Cesariob3f2d702020-11-09 09:20:49 -07001372 if (portability_features) {
1373 state_tracker->enabled_features.portability_subset_features = *portability_features;
1374 }
1375
sfricke-samsung0065ce02020-12-03 22:46:37 -08001376 const auto *shader_integer_functions2_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001377 LvlFindInChain<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001378 if (shader_integer_functions2_features) {
1379 state_tracker->enabled_features.shader_integer_functions2_features = *shader_integer_functions2_features;
1380 }
1381
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001382 const auto *shader_sm_builtins_feature = LvlFindInChain<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001383 if (shader_sm_builtins_feature) {
1384 state_tracker->enabled_features.shader_sm_builtins_feature = *shader_sm_builtins_feature;
1385 }
1386
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001387 const auto *shader_atomic_float_feature = LvlFindInChain<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001388 if (shader_atomic_float_feature) {
1389 state_tracker->enabled_features.shader_atomic_float_feature = *shader_atomic_float_feature;
1390 }
1391
1392 const auto *shader_image_atomic_int64_feature =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001393 LvlFindInChain<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001394 if (shader_image_atomic_int64_feature) {
1395 state_tracker->enabled_features.shader_image_atomic_int64_feature = *shader_image_atomic_int64_feature;
1396 }
1397
sfricke-samsung486a51e2021-01-02 00:10:15 -08001398 const auto *shader_clock_feature = LvlFindInChain<VkPhysicalDeviceShaderClockFeaturesKHR>(pCreateInfo->pNext);
1399 if (shader_clock_feature) {
1400 state_tracker->enabled_features.shader_clock_feature = *shader_clock_feature;
1401 }
1402
Jeremy Gebben5f585ae2021-02-02 09:03:06 -07001403 const auto *conditional_rendering_features =
1404 LvlFindInChain<VkPhysicalDeviceConditionalRenderingFeaturesEXT>(pCreateInfo->pNext);
1405 if (conditional_rendering_features) {
1406 state_tracker->enabled_features.conditional_rendering = *conditional_rendering_features;
1407 }
1408
Shannon McPhersondb287d42021-02-02 15:27:32 -07001409 const auto *workgroup_memory_explicit_layout_features =
1410 LvlFindInChain<VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>(pCreateInfo->pNext);
1411 if (workgroup_memory_explicit_layout_features) {
1412 state_tracker->enabled_features.workgroup_memory_explicit_layout_features = *workgroup_memory_explicit_layout_features;
1413 }
1414
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001415 const auto *synchronization2_features =
1416 LvlFindInChain<VkPhysicalDeviceSynchronization2FeaturesKHR>(pCreateInfo->pNext);
1417 if (synchronization2_features) {
1418 state_tracker->enabled_features.synchronization2_features = *synchronization2_features;
1419 }
1420
Locke Linf3873542021-04-26 11:25:10 -06001421 const auto *provoking_vertex_features = lvl_find_in_chain<VkPhysicalDeviceProvokingVertexFeaturesEXT>(pCreateInfo->pNext);
1422 if (provoking_vertex_features) {
1423 state_tracker->enabled_features.provoking_vertex_features = *provoking_vertex_features;
1424 }
1425
Piers Daniellcb6d8032021-04-19 18:51:26 -06001426 const auto *vertex_input_dynamic_state_features =
1427 LvlFindInChain<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT>(pCreateInfo->pNext);
1428 if (vertex_input_dynamic_state_features) {
1429 state_tracker->enabled_features.vertex_input_dynamic_state_features = *vertex_input_dynamic_state_features;
1430 }
1431
David Zhao Akeley44139b12021-04-26 16:16:13 -07001432 const auto *inherited_viewport_scissor_features =
1433 LvlFindInChain<VkPhysicalDeviceInheritedViewportScissorFeaturesNV>(pCreateInfo->pNext);
1434 if (inherited_viewport_scissor_features) {
1435 state_tracker->enabled_features.inherited_viewport_scissor_features = *inherited_viewport_scissor_features;
1436 }
1437
locke-lunargd556cc32019-09-17 01:21:23 -06001438 // Store physical device properties and physical device mem limits into CoreChecks structs
1439 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1440 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001441 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1442 &state_tracker->phys_dev_props_core11);
1443 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1444 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001445
1446 const auto &dev_ext = state_tracker->device_extensions;
1447 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1448
1449 if (dev_ext.vk_khr_push_descriptor) {
1450 // Get the needed push_descriptor limits
1451 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1452 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1453 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1454 }
1455
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001456 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001457 VkPhysicalDeviceDescriptorIndexingProperties descriptor_indexing_prop;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001458 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1459 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1460 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1461 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1462 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1463 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1464 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1465 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1466 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1467 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1468 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1469 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1470 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1471 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1472 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1473 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1474 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1475 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1476 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1477 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1478 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1479 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1480 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1481 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1482 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1483 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1484 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
1485 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1486 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
1487 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
1488 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
1489 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
1490 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
1491 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
1492 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1493 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1494 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
1495 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
1496 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1497 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
1498 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
1499 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
1500 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
1501 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
1502 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
1503 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
1504 }
1505
locke-lunargd556cc32019-09-17 01:21:23 -06001506 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1507 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1508 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1509 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001510
1511 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001512 VkPhysicalDeviceDepthStencilResolveProperties depth_stencil_resolve_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001513 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
1514 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
1515 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
1516 depth_stencil_resolve_props.supportedStencilResolveModes;
1517 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
1518 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
1519 }
1520
locke-lunargd556cc32019-09-17 01:21:23 -06001521 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001522 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
sourav parmarcd5fb182020-07-17 12:58:44 -07001523 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing_pipeline, &phys_dev_props->ray_tracing_propsKHR);
1524 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_acceleration_structure, &phys_dev_props->acc_structure_props);
locke-lunargd556cc32019-09-17 01:21:23 -06001525 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1526 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001527 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map_2, &phys_dev_props->fragment_density_map2_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001528 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
sfricke-samsung8f658d42020-05-03 20:12:24 -07001529 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_sample_locations, &phys_dev_props->sample_locations_props);
Tony-LunarG7337b312020-04-15 16:40:25 -06001530 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_custom_border_color, &phys_dev_props->custom_border_color_props);
locke-lunarg3fa463a2020-10-23 16:39:04 -06001531 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_multiview, &phys_dev_props->multiview_props);
Nathaniel Cesario3291c912020-11-17 16:54:41 -07001532 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_portability_subset, &phys_dev_props->portability_props);
Locke Lin016d8482021-05-27 12:11:31 -06001533 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_provoking_vertex, &phys_dev_props->provoking_vertex_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001534
1535 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001536 VkPhysicalDeviceTimelineSemaphoreProperties timeline_semaphore_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001537 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
1538 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
1539 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
1540 }
1541
1542 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001543 VkPhysicalDeviceFloatControlsProperties float_controls_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001544 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
1545 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
1546 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
1547 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
1548 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
1549 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
1550 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
1551 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
1552 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
1553 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
1554 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
1555 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
1556 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
1557 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
1558 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
1559 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
1560 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
1561 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
1562 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
1563 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
1564 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
1565 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07001566
locke-lunargd556cc32019-09-17 01:21:23 -06001567 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1568 // Get the needed cooperative_matrix properties
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07001569 auto cooperative_matrix_props = LvlInitStruct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1570 auto prop2 = LvlInitStruct<VkPhysicalDeviceProperties2>(&cooperative_matrix_props);
locke-lunargd556cc32019-09-17 01:21:23 -06001571 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1572 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1573
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001574 uint32_t num_cooperative_matrix_properties = 0;
1575 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &num_cooperative_matrix_properties, NULL);
1576 state_tracker->cooperative_matrix_properties.resize(num_cooperative_matrix_properties,
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07001577 LvlInitStruct<VkCooperativeMatrixPropertiesNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06001578
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001579 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &num_cooperative_matrix_properties,
locke-lunargd556cc32019-09-17 01:21:23 -06001580 state_tracker->cooperative_matrix_properties.data());
1581 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001582 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06001583 // Get the needed subgroup limits
Locke Lin016d8482021-05-27 12:11:31 -06001584 auto subgroup_prop = LvlInitStruct<VkPhysicalDeviceSubgroupProperties>();
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07001585 auto prop2 = LvlInitStruct<VkPhysicalDeviceProperties2>(&subgroup_prop);
locke-lunargd556cc32019-09-17 01:21:23 -06001586 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1587
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001588 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
1589 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
1590 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
1591 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06001592 }
1593
Tobias Hector6663c9b2020-11-05 10:18:02 +00001594 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_fragment_shading_rate, &phys_dev_props->fragment_shading_rate_props);
1595
locke-lunargd556cc32019-09-17 01:21:23 -06001596 // Store queue family data
1597 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1598 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001599 const VkDeviceQueueCreateInfo &queue_create_info = pCreateInfo->pQueueCreateInfos[i];
sfricke-samsungb585ec12021-05-06 03:10:13 -07001600 state_tracker->queue_family_index_set.insert(queue_create_info.queueFamilyIndex);
1601 state_tracker->device_queue_info_list.push_back(
1602 {i, queue_create_info.queueFamilyIndex, queue_create_info.flags, queue_create_info.queueCount});
locke-lunargd556cc32019-09-17 01:21:23 -06001603 }
1604 }
1605}
1606
1607void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1608 if (!device) return;
1609
locke-lunargd556cc32019-09-17 01:21:23 -06001610 // Reset all command buffers before destroying them, to unlink object_bindings.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001611 for (auto &command_buffer : commandBufferMap) {
1612 ResetCommandBufferState(command_buffer.first);
locke-lunargd556cc32019-09-17 01:21:23 -06001613 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001614 pipelineMap.clear();
1615 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001616 commandBufferMap.clear();
1617
1618 // This will also delete all sets in the pool & remove them from setMap
1619 DeleteDescriptorSetPools();
1620 // All sets should be removed
1621 assert(setMap.empty());
1622 descriptorSetLayoutMap.clear();
1623 imageViewMap.clear();
1624 imageMap.clear();
1625 bufferViewMap.clear();
1626 bufferMap.clear();
1627 // Queues persist until device is destroyed
1628 queueMap.clear();
1629}
1630
locke-lunargd556cc32019-09-17 01:21:23 -06001631// Track which resources are in-flight by atomically incrementing their "in_use" count
1632void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1633 cb_node->submitCount++;
locke-lunargd556cc32019-09-17 01:21:23 -06001634
locke-lunargd556cc32019-09-17 01:21:23 -06001635 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1636 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1637 // should then be flagged prior to calling this function
1638 for (auto event : cb_node->writeEventsBeforeWait) {
1639 auto event_state = GetEventState(event);
1640 if (event_state) event_state->write_in_use++;
1641 }
1642}
1643
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001644void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
Jeremy Gebbencbf22862021-03-03 12:01:22 -07001645 layer_data::unordered_map<VkQueue, uint64_t> other_queue_seqs;
1646 layer_data::unordered_map<VkSemaphore, uint64_t> timeline_semaphore_counters;
locke-lunargd556cc32019-09-17 01:21:23 -06001647
1648 // Roll this queue forward, one submission at a time.
1649 while (pQueue->seq < seq) {
1650 auto &submission = pQueue->submissions.front();
1651
1652 for (auto &wait : submission.waitSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001653 auto semaphore_state = GetSemaphoreState(wait.semaphore);
1654 if (semaphore_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001655 semaphore_state->EndUse();
locke-lunargd556cc32019-09-17 01:21:23 -06001656 }
Mike Schuchardt2df08912020-12-15 16:28:09 -08001657 if (wait.type == VK_SEMAPHORE_TYPE_TIMELINE) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001658 auto &last_counter = timeline_semaphore_counters[wait.semaphore];
1659 last_counter = std::max(last_counter, wait.payload);
Marshall Drew-Brook03847582020-11-06 15:10:45 -08001660 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001661 auto &last_seq = other_queue_seqs[wait.queue];
1662 last_seq = std::max(last_seq, wait.seq);
Marshall Drew-Brook03847582020-11-06 15:10:45 -08001663 }
locke-lunargd556cc32019-09-17 01:21:23 -06001664 }
1665
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001666 for (auto &signal : submission.signalSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001667 auto semaphore_state = GetSemaphoreState(signal.semaphore);
1668 if (semaphore_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001669 semaphore_state->EndUse();
Mike Schuchardt2df08912020-12-15 16:28:09 -08001670 if (semaphore_state->type == VK_SEMAPHORE_TYPE_TIMELINE && semaphore_state->payload < signal.payload) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001671 semaphore_state->payload = signal.payload;
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001672 }
locke-lunargd556cc32019-09-17 01:21:23 -06001673 }
1674 }
1675
1676 for (auto &semaphore : submission.externalSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001677 auto semaphore_state = GetSemaphoreState(semaphore);
1678 if (semaphore_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001679 semaphore_state->EndUse();
locke-lunargd556cc32019-09-17 01:21:23 -06001680 }
1681 }
1682
1683 for (auto cb : submission.cbs) {
1684 auto cb_node = GetCBState(cb);
1685 if (!cb_node) {
1686 continue;
1687 }
1688 // First perform decrement on general case bound objects
locke-lunargd556cc32019-09-17 01:21:23 -06001689 for (auto event : cb_node->writeEventsBeforeWait) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001690 auto event_node = eventMap.find(event);
1691 if (event_node != eventMap.end()) {
John Zulauf48057322020-12-02 11:59:31 -07001692 event_node->second->write_in_use--;
locke-lunargd556cc32019-09-17 01:21:23 -06001693 }
1694 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001695 QueryMap local_query_to_state_map;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001696 VkQueryPool first_pool = VK_NULL_HANDLE;
Jeff Bolz310775c2019-10-09 00:46:33 -05001697 for (auto &function : cb_node->queryUpdates) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001698 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &local_query_to_state_map);
Jeff Bolz310775c2019-10-09 00:46:33 -05001699 }
1700
John Zulauf79f06582021-02-27 18:38:39 -07001701 for (const auto &query_state_pair : local_query_to_state_map) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001702 if (query_state_pair.second == QUERYSTATE_ENDED) {
1703 queryToStateMap[query_state_pair.first] = QUERYSTATE_AVAILABLE;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001704 }
locke-lunargd556cc32019-09-17 01:21:23 -06001705 }
Jeremy Gebben5570abe2021-05-16 18:35:13 -06001706 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
1707 cb_node->EndUse();
1708 }
locke-lunargd556cc32019-09-17 01:21:23 -06001709 }
1710
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001711 auto fence_state = GetFenceState(submission.fence);
1712 if (fence_state && fence_state->scope == kSyncScopeInternal) {
1713 fence_state->state = FENCE_RETIRED;
locke-lunargd556cc32019-09-17 01:21:23 -06001714 }
1715
1716 pQueue->submissions.pop_front();
1717 pQueue->seq++;
1718 }
1719
1720 // Roll other queues forward to the highest seq we saw a wait for
John Zulauf79f06582021-02-27 18:38:39 -07001721 for (const auto &qs : other_queue_seqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001722 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001723 }
John Zulauf79f06582021-02-27 18:38:39 -07001724 for (const auto &sc : timeline_semaphore_counters) {
Marshall Drew-Brook03847582020-11-06 15:10:45 -08001725 RetireTimelineSemaphore(sc.first, sc.second);
1726 }
locke-lunargd556cc32019-09-17 01:21:23 -06001727}
1728
1729// Submit a fence to a queue, delimiting previous fences and previous untracked
1730// work by it.
1731static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1732 pFence->state = FENCE_INFLIGHT;
1733 pFence->signaler.first = pQueue->queue;
1734 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1735}
1736
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001737uint64_t ValidationStateTracker::RecordSubmitFence(QUEUE_STATE *queue_state, VkFence fence, uint32_t submit_count) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001738 auto fence_state = GetFenceState(fence);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001739 uint64_t early_retire_seq = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001740 if (fence_state) {
1741 if (fence_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06001742 // Mark fence in use
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001743 SubmitFence(queue_state, fence_state, std::max(1u, submit_count));
1744 if (!submit_count) {
locke-lunargd556cc32019-09-17 01:21:23 -06001745 // If no submissions, but just dropping a fence on the end of the queue,
1746 // record an empty submission with just the fence, so we can determine
1747 // its completion.
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001748 CB_SUBMISSION submission;
1749 submission.fence = fence;
1750 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06001751 }
1752 } else {
1753 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001754 early_retire_seq = queue_state->seq + queue_state->submissions.size();
locke-lunargd556cc32019-09-17 01:21:23 -06001755 }
1756 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001757 return early_retire_seq;
1758}
1759
1760void ValidationStateTracker::RecordSubmitCommandBuffer(CB_SUBMISSION &submission, VkCommandBuffer command_buffer) {
1761 auto cb_node = GetCBState(command_buffer);
1762 if (cb_node) {
1763 submission.cbs.push_back(command_buffer);
John Zulauf79f06582021-02-27 18:38:39 -07001764 for (auto *secondary_cmd_buffer : cb_node->linkedCommandBuffers) {
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06001765 submission.cbs.push_back(secondary_cmd_buffer->commandBuffer());
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001766 IncrementResources(secondary_cmd_buffer);
1767 }
1768 IncrementResources(cb_node);
Jeremy Gebben5570abe2021-05-16 18:35:13 -06001769 // increment use count for all bound objects including secondary cbs
1770 cb_node->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001771
1772 VkQueryPool first_pool = VK_NULL_HANDLE;
1773 EventToStageMap local_event_to_stage_map;
1774 QueryMap local_query_to_state_map;
1775 for (auto &function : cb_node->queryUpdates) {
1776 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &local_query_to_state_map);
1777 }
1778
John Zulauf79f06582021-02-27 18:38:39 -07001779 for (const auto &query_state_pair : local_query_to_state_map) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001780 queryToStateMap[query_state_pair.first] = query_state_pair.second;
1781 }
1782
John Zulauf79f06582021-02-27 18:38:39 -07001783 for (const auto &function : cb_node->eventUpdates) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001784 function(nullptr, /*do_validate*/ false, &local_event_to_stage_map);
1785 }
1786
John Zulauf79f06582021-02-27 18:38:39 -07001787 for (const auto &eventStagePair : local_event_to_stage_map) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001788 eventMap[eventStagePair.first]->stageMask = eventStagePair.second;
1789 }
1790 }
1791}
1792
1793void ValidationStateTracker::RecordSubmitWaitSemaphore(CB_SUBMISSION &submission, VkQueue queue, VkSemaphore semaphore,
1794 uint64_t value, uint64_t next_seq) {
1795 auto semaphore_state = GetSemaphoreState(semaphore);
1796 if (semaphore_state) {
1797 if (semaphore_state->scope == kSyncScopeInternal) {
1798 SEMAPHORE_WAIT wait;
1799 wait.semaphore = semaphore;
1800 wait.type = semaphore_state->type;
1801 if (semaphore_state->type == VK_SEMAPHORE_TYPE_BINARY) {
1802 if (semaphore_state->signaler.first != VK_NULL_HANDLE) {
1803 wait.queue = semaphore_state->signaler.first;
1804 wait.seq = semaphore_state->signaler.second;
1805 submission.waitSemaphores.emplace_back(std::move(wait));
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001806 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001807 }
1808 semaphore_state->signaler.first = VK_NULL_HANDLE;
1809 semaphore_state->signaled = false;
1810 } else if (semaphore_state->payload < value) {
1811 wait.queue = queue;
1812 wait.seq = next_seq;
1813 wait.payload = value;
1814 submission.waitSemaphores.emplace_back(std::move(wait));
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001815 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001816 }
1817 } else {
1818 submission.externalSemaphores.push_back(semaphore);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001819 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001820 if (semaphore_state->scope == kSyncScopeExternalTemporary) {
1821 semaphore_state->scope = kSyncScopeInternal;
1822 }
1823 }
1824 }
1825}
1826
1827bool ValidationStateTracker::RecordSubmitSignalSemaphore(CB_SUBMISSION &submission, VkQueue queue, VkSemaphore semaphore,
1828 uint64_t value, uint64_t next_seq) {
1829 bool retire_early = false;
1830 auto semaphore_state = GetSemaphoreState(semaphore);
1831 if (semaphore_state) {
1832 if (semaphore_state->scope == kSyncScopeInternal) {
1833 SEMAPHORE_SIGNAL signal;
1834 signal.semaphore = semaphore;
1835 signal.seq = next_seq;
1836 if (semaphore_state->type == VK_SEMAPHORE_TYPE_BINARY) {
1837 semaphore_state->signaler.first = queue;
1838 semaphore_state->signaler.second = next_seq;
1839 semaphore_state->signaled = true;
1840 } else {
1841 signal.payload = value;
1842 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001843 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001844 submission.signalSemaphores.emplace_back(std::move(signal));
1845 } else {
1846 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1847 retire_early = true;
1848 }
1849 }
1850 return retire_early;
1851}
1852
1853void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1854 VkFence fence, VkResult result) {
1855 if (result != VK_SUCCESS) return;
1856 auto queue_state = GetQueueState(queue);
1857
1858 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, submitCount);
locke-lunargd556cc32019-09-17 01:21:23 -06001859
1860 // Now process each individual submit
1861 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001862 CB_SUBMISSION submission;
locke-lunargd556cc32019-09-17 01:21:23 -06001863 const VkSubmitInfo *submit = &pSubmits[submit_idx];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001864 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001865 auto *timeline_semaphore_submit = LvlFindInChain<VkTimelineSemaphoreSubmitInfo>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001866 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
Jeremy Gebben4ae5cc72021-03-10 15:34:44 -07001867 uint64_t value = 0;
1868 if (timeline_semaphore_submit && timeline_semaphore_submit->pWaitSemaphoreValues != nullptr &&
1869 (i < timeline_semaphore_submit->waitSemaphoreValueCount)) {
1870 value = timeline_semaphore_submit->pWaitSemaphoreValues[i];
1871 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001872 RecordSubmitWaitSemaphore(submission, queue, submit->pWaitSemaphores[i], value, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001873 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001874
1875 bool retire_early = false;
locke-lunargd556cc32019-09-17 01:21:23 -06001876 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
Jeremy Gebben4ae5cc72021-03-10 15:34:44 -07001877 uint64_t value = 0;
1878 if (timeline_semaphore_submit && timeline_semaphore_submit->pSignalSemaphoreValues != nullptr &&
1879 (i < timeline_semaphore_submit->signalSemaphoreValueCount)) {
1880 value = timeline_semaphore_submit->pSignalSemaphoreValues[i];
1881 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001882 retire_early |= RecordSubmitSignalSemaphore(submission, queue, submit->pSignalSemaphores[i], value, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001883 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001884 if (retire_early) {
1885 early_retire_seq = std::max(early_retire_seq, next_seq);
1886 }
1887
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001888 const auto perf_submit = LvlFindInChain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001889 submission.perf_submit_pass = perf_submit ? perf_submit->counterPassIndex : 0;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001890
locke-lunargd556cc32019-09-17 01:21:23 -06001891 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001892 RecordSubmitCommandBuffer(submission, submit->pCommandBuffers[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06001893 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001894 submission.fence = submit_idx == (submitCount - 1) ? fence : VK_NULL_HANDLE;
1895 queue_state->submissions.emplace_back(std::move(submission));
1896 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001897
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001898 if (early_retire_seq) {
1899 RetireWorkOnQueue(queue_state, early_retire_seq);
1900 }
1901}
1902
1903void ValidationStateTracker::PostCallRecordQueueSubmit2KHR(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR *pSubmits,
1904 VkFence fence, VkResult result) {
1905 if (result != VK_SUCCESS) return;
1906 auto queue_state = GetQueueState(queue);
1907
1908 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, submitCount);
1909
1910 // Now process each individual submit
1911 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1912 CB_SUBMISSION submission;
1913 const VkSubmitInfo2KHR *submit = &pSubmits[submit_idx];
1914 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
1915 for (uint32_t i = 0; i < submit->waitSemaphoreInfoCount; ++i) {
1916 const auto &sem_info = submit->pWaitSemaphoreInfos[i];
1917 RecordSubmitWaitSemaphore(submission, queue, sem_info.semaphore, sem_info.value, next_seq);
1918 }
1919 bool retire_early = false;
1920 for (uint32_t i = 0; i < submit->signalSemaphoreInfoCount; ++i) {
1921 const auto &sem_info = submit->pSignalSemaphoreInfos[i];
1922 retire_early |= RecordSubmitSignalSemaphore(submission, queue, sem_info.semaphore, sem_info.value, next_seq);
1923 }
1924 if (retire_early) {
1925 early_retire_seq = std::max(early_retire_seq, next_seq);
1926 }
1927 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
1928 submission.perf_submit_pass = perf_submit ? perf_submit->counterPassIndex : 0;
1929
1930 for (uint32_t i = 0; i < submit->commandBufferInfoCount; i++) {
1931 RecordSubmitCommandBuffer(submission, submit->pCommandBufferInfos[i].commandBuffer);
1932 }
1933 submission.fence = submit_idx == (submitCount - 1) ? fence : VK_NULL_HANDLE;
1934 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06001935 }
1936
1937 if (early_retire_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001938 RetireWorkOnQueue(queue_state, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001939 }
1940}
1941
1942void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1943 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1944 VkResult result) {
1945 if (VK_SUCCESS == result) {
1946 AddMemObjInfo(device, *pMemory, pAllocateInfo);
1947 }
1948 return;
1949}
1950
1951void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1952 if (!mem) return;
1953 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
locke-lunargd556cc32019-09-17 01:21:23 -06001954 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001955 mem_info->Destroy();
John Zulauf79952712020-04-07 11:25:54 -06001956 fake_memory.Free(mem_info->fake_base_address);
locke-lunargd556cc32019-09-17 01:21:23 -06001957 memObjMap.erase(mem);
1958}
1959
1960void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
1961 VkFence fence, VkResult result) {
1962 if (result != VK_SUCCESS) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001963 auto queue_state = GetQueueState(queue);
locke-lunargd556cc32019-09-17 01:21:23 -06001964
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001965 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, bindInfoCount);
locke-lunargd556cc32019-09-17 01:21:23 -06001966
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001967 for (uint32_t bind_idx = 0; bind_idx < bindInfoCount; ++bind_idx) {
1968 const VkBindSparseInfo &bind_info = pBindInfo[bind_idx];
locke-lunargd556cc32019-09-17 01:21:23 -06001969 // Track objects tied to memory
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001970 for (uint32_t j = 0; j < bind_info.bufferBindCount; j++) {
1971 for (uint32_t k = 0; k < bind_info.pBufferBinds[j].bindCount; k++) {
1972 auto sparse_binding = bind_info.pBufferBinds[j].pBinds[k];
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001973 auto buffer_state = GetBufferState(bind_info.pBufferBinds[j].buffer);
1974 auto mem_state = GetDevMemShared(sparse_binding.memory);
1975 if (buffer_state && mem_state) {
1976 buffer_state->SetSparseMemBinding(mem_state, sparse_binding.memoryOffset, sparse_binding.size);
1977 }
locke-lunargd556cc32019-09-17 01:21:23 -06001978 }
1979 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001980 for (uint32_t j = 0; j < bind_info.imageOpaqueBindCount; j++) {
1981 for (uint32_t k = 0; k < bind_info.pImageOpaqueBinds[j].bindCount; k++) {
1982 auto sparse_binding = bind_info.pImageOpaqueBinds[j].pBinds[k];
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001983 auto image_state = GetImageState(bind_info.pImageOpaqueBinds[j].image);
1984 auto mem_state = GetDevMemShared(sparse_binding.memory);
1985 if (image_state && mem_state) {
1986 image_state->SetSparseMemBinding(mem_state, sparse_binding.memoryOffset, sparse_binding.size);
1987 }
locke-lunargd556cc32019-09-17 01:21:23 -06001988 }
1989 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001990 for (uint32_t j = 0; j < bind_info.imageBindCount; j++) {
1991 for (uint32_t k = 0; k < bind_info.pImageBinds[j].bindCount; k++) {
1992 auto sparse_binding = bind_info.pImageBinds[j].pBinds[k];
locke-lunargd556cc32019-09-17 01:21:23 -06001993 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
1994 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001995 auto image_state = GetImageState(bind_info.pImageBinds[j].image);
1996 auto mem_state = GetDevMemShared(sparse_binding.memory);
1997 if (image_state && mem_state) {
1998 image_state->SetSparseMemBinding(mem_state, sparse_binding.memoryOffset, size);
1999 }
locke-lunargd556cc32019-09-17 01:21:23 -06002000 }
2001 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002002 CB_SUBMISSION submission;
2003 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002004 for (uint32_t i = 0; i < bind_info.waitSemaphoreCount; ++i) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002005 RecordSubmitWaitSemaphore(submission, queue, bind_info.pWaitSemaphores[i], 0, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002006 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002007 bool retire_early = false;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002008 for (uint32_t i = 0; i < bind_info.signalSemaphoreCount; ++i) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002009 retire_early |= RecordSubmitSignalSemaphore(submission, queue, bind_info.pSignalSemaphores[i], 0, next_seq);
2010 }
2011 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
2012 if (retire_early) {
2013 early_retire_seq = std::max(early_retire_seq, queue_state->seq + queue_state->submissions.size() + 1);
locke-lunargd556cc32019-09-17 01:21:23 -06002014 }
2015
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002016 submission.fence = bind_idx == (bindInfoCount - 1) ? fence : VK_NULL_HANDLE;
2017 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06002018 }
2019
2020 if (early_retire_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002021 RetireWorkOnQueue(queue_state, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002022 }
2023}
2024
2025void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
2026 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
2027 VkResult result) {
2028 if (VK_SUCCESS != result) return;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002029 semaphoreMap[*pSemaphore] = std::make_shared<SEMAPHORE_STATE>(*pSemaphore, LvlFindInChain<VkSemaphoreTypeCreateInfo>(pCreateInfo->pNext));
locke-lunargd556cc32019-09-17 01:21:23 -06002030}
2031
Mike Schuchardt2df08912020-12-15 16:28:09 -08002032void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBits handle_type,
2033 VkSemaphoreImportFlags flags) {
locke-lunargd556cc32019-09-17 01:21:23 -06002034 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
2035 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08002036 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) &&
locke-lunargd556cc32019-09-17 01:21:23 -06002037 sema_node->scope == kSyncScopeInternal) {
2038 sema_node->scope = kSyncScopeExternalTemporary;
2039 } else {
2040 sema_node->scope = kSyncScopeExternalPermanent;
2041 }
2042 }
2043}
2044
Mike Schuchardt2df08912020-12-15 16:28:09 -08002045void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfo *pSignalInfo,
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002046 VkResult result) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002047 auto *semaphore_state = GetSemaphoreState(pSignalInfo->semaphore);
2048 semaphore_state->payload = pSignalInfo->value;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002049}
2050
locke-lunargd556cc32019-09-17 01:21:23 -06002051void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
2052 auto mem_info = GetDevMemState(mem);
2053 if (mem_info) {
2054 mem_info->mapped_range.offset = offset;
2055 mem_info->mapped_range.size = size;
2056 mem_info->p_driver_data = *ppData;
2057 }
2058}
2059
2060void ValidationStateTracker::RetireFence(VkFence fence) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002061 auto fence_state = GetFenceState(fence);
2062 if (fence_state && fence_state->scope == kSyncScopeInternal) {
2063 if (fence_state->signaler.first != VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -06002064 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002065 RetireWorkOnQueue(GetQueueState(fence_state->signaler.first), fence_state->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002066 } else {
2067 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
2068 // the fence as retired.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002069 fence_state->state = FENCE_RETIRED;
locke-lunargd556cc32019-09-17 01:21:23 -06002070 }
2071 }
2072}
2073
2074void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2075 VkBool32 waitAll, uint64_t timeout, VkResult result) {
2076 if (VK_SUCCESS != result) return;
2077
2078 // When we know that all fences are complete we can clean/remove their CBs
2079 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
2080 for (uint32_t i = 0; i < fenceCount; i++) {
2081 RetireFence(pFences[i]);
2082 }
2083 }
2084 // NOTE : Alternate case not handled here is when some fences have completed. In
2085 // this case for app to guarantee which fences completed it will have to call
2086 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
2087}
2088
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002089void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002090 auto semaphore_state = GetSemaphoreState(semaphore);
2091 if (semaphore_state) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002092 for (auto &pair : queueMap) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002093 QUEUE_STATE &queue_state = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002094 uint64_t max_seq = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002095 for (const auto &submission : queue_state.submissions) {
2096 for (const auto &signal_semaphore : submission.signalSemaphores) {
2097 if (signal_semaphore.semaphore == semaphore && signal_semaphore.payload <= until_payload) {
2098 if (signal_semaphore.seq > max_seq) {
2099 max_seq = signal_semaphore.seq;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002100 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002101 }
2102 }
2103 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06002104 if (max_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002105 RetireWorkOnQueue(&queue_state, max_seq);
Tony-LunarG47d5e272020-04-07 15:35:55 -06002106 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002107 }
2108 }
2109}
2110
John Zulauff89de662020-04-13 18:57:34 -06002111void ValidationStateTracker::RecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2112 VkResult result) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002113 if (VK_SUCCESS != result) return;
2114
2115 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
2116 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
2117 }
2118}
2119
John Zulauff89de662020-04-13 18:57:34 -06002120void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2121 VkResult result) {
2122 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2123}
2124
2125void ValidationStateTracker::PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo,
2126 uint64_t timeout, VkResult result) {
2127 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2128}
2129
Adrian Coca Lorentec7d76102020-09-28 13:58:16 +02002130void ValidationStateTracker::RecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2131 VkResult result) {
2132 if (VK_SUCCESS != result) return;
2133
2134 RetireTimelineSemaphore(semaphore, *pValue);
2135}
2136
2137void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2138 VkResult result) {
2139 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2140}
2141void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2142 VkResult result) {
2143 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2144}
2145
locke-lunargd556cc32019-09-17 01:21:23 -06002146void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
2147 if (VK_SUCCESS != result) return;
2148 RetireFence(fence);
2149}
2150
2151void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
Jeremy Gebben5573a8c2021-06-04 08:55:10 -06002152 queueMap.emplace(queue, QUEUE_STATE(queue, queue_family_index));
locke-lunargd556cc32019-09-17 01:21:23 -06002153}
2154
2155void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2156 VkQueue *pQueue) {
2157 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2158}
2159
2160void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2161 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2162}
2163
2164void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2165 if (VK_SUCCESS != result) return;
2166 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002167 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002168}
2169
2170void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2171 if (VK_SUCCESS != result) return;
2172 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002173 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002174 }
2175}
2176
2177void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2178 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002179 auto fence_state = GetFenceState(fence);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002180 fence_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002181 fenceMap.erase(fence);
2182}
2183
2184void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2185 const VkAllocationCallbacks *pAllocator) {
2186 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002187 auto semaphore_state = GetSemaphoreState(semaphore);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002188 semaphore_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002189 semaphoreMap.erase(semaphore);
2190}
2191
2192void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2193 if (!event) return;
John Zulauf48057322020-12-02 11:59:31 -07002194 EVENT_STATE *event_state = Get<EVENT_STATE>(event);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002195 event_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002196 eventMap.erase(event);
2197}
2198
2199void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2200 const VkAllocationCallbacks *pAllocator) {
2201 if (!queryPool) return;
2202 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002203 qp_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002204 queryPoolMap.erase(queryPool);
2205}
2206
locke-lunargd556cc32019-09-17 01:21:23 -06002207void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2208 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2209 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002210 // Track objects tied to memory
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002211 auto mem_state = GetDevMemShared(mem);
2212 if (mem_state) {
2213 buffer_state->SetMemBinding(mem_state, memoryOffset);
2214 }
locke-lunargd556cc32019-09-17 01:21:23 -06002215 }
2216}
2217
2218void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2219 VkDeviceSize memoryOffset, VkResult result) {
2220 if (VK_SUCCESS != result) return;
2221 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2222}
2223
2224void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002225 const VkBindBufferMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06002226 for (uint32_t i = 0; i < bindInfoCount; i++) {
2227 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2228 }
2229}
2230
2231void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002232 const VkBindBufferMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06002233 for (uint32_t i = 0; i < bindInfoCount; i++) {
2234 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2235 }
2236}
2237
Spencer Fricke6c127102020-04-16 06:25:20 -07002238void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer) {
locke-lunargd556cc32019-09-17 01:21:23 -06002239 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2240 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002241 buffer_state->memory_requirements_checked = true;
2242 }
2243}
2244
2245void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2246 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002247 RecordGetBufferMemoryRequirementsState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002248}
2249
2250void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002251 const VkBufferMemoryRequirementsInfo2 *pInfo,
2252 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002253 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002254}
2255
2256void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002257 const VkBufferMemoryRequirementsInfo2 *pInfo,
2258 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002259 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002260}
2261
Spencer Fricke6c127102020-04-16 06:25:20 -07002262void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002263 const VkImagePlaneMemoryRequirementsInfo *plane_info =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002264 (pInfo == nullptr) ? nullptr : LvlFindInChain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002265 IMAGE_STATE *image_state = GetImageState(image);
2266 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002267 if (plane_info != nullptr) {
2268 // Multi-plane image
2269 image_state->memory_requirements_checked = false; // Each image plane needs to be checked itself
2270 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
2271 image_state->plane0_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002272 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
2273 image_state->plane1_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002274 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
2275 image_state->plane2_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002276 }
2277 } else {
2278 // Single Plane image
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002279 image_state->memory_requirements_checked = true;
2280 }
locke-lunargd556cc32019-09-17 01:21:23 -06002281 }
2282}
2283
2284void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2285 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002286 RecordGetImageMemoryRequirementsState(image, nullptr);
locke-lunargd556cc32019-09-17 01:21:23 -06002287}
2288
2289void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2290 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002291 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002292}
2293
2294void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2295 const VkImageMemoryRequirementsInfo2 *pInfo,
2296 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002297 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002298}
2299
2300static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2301 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2302 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2303 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2304 image_state->sparse_metadata_required = true;
2305 }
2306}
2307
2308void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2309 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2310 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2311 auto image_state = GetImageState(image);
2312 image_state->get_sparse_reqs_called = true;
2313 if (!pSparseMemoryRequirements) return;
2314 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2315 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2316 }
2317}
2318
2319void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
Mike Schuchardt2df08912020-12-15 16:28:09 -08002320 VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount,
2321 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements) {
locke-lunargd556cc32019-09-17 01:21:23 -06002322 auto image_state = GetImageState(pInfo->image);
2323 image_state->get_sparse_reqs_called = true;
2324 if (!pSparseMemoryRequirements) return;
2325 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2326 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2327 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2328 }
2329}
2330
2331void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08002332 VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount,
2333 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements) {
locke-lunargd556cc32019-09-17 01:21:23 -06002334 auto image_state = GetImageState(pInfo->image);
2335 image_state->get_sparse_reqs_called = true;
2336 if (!pSparseMemoryRequirements) return;
2337 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2338 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2339 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2340 }
2341}
2342
2343void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2344 const VkAllocationCallbacks *pAllocator) {
2345 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002346 auto shader_module_state = GetShaderModuleState(shaderModule);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002347 shader_module_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002348 shaderModuleMap.erase(shaderModule);
2349}
2350
2351void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2352 const VkAllocationCallbacks *pAllocator) {
2353 if (!pipeline) return;
2354 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
locke-lunargd556cc32019-09-17 01:21:23 -06002355 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002356 pipeline_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002357 pipelineMap.erase(pipeline);
2358}
2359
2360void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2361 const VkAllocationCallbacks *pAllocator) {
2362 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002363 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002364 pipeline_layout_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002365 pipelineLayoutMap.erase(pipelineLayout);
2366}
2367
2368void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2369 const VkAllocationCallbacks *pAllocator) {
2370 if (!sampler) return;
2371 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
locke-lunargd556cc32019-09-17 01:21:23 -06002372 // Any bound cmd buffers are now invalid
2373 if (sampler_state) {
Yuly Novikov424cdd52020-05-26 16:45:12 -04002374 if (sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2375 sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
2376 custom_border_color_sampler_count--;
2377 }
2378
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002379 sampler_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002380 }
2381 samplerMap.erase(sampler);
2382}
2383
2384void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2385 const VkAllocationCallbacks *pAllocator) {
2386 if (!descriptorSetLayout) return;
2387 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2388 if (layout_it != descriptorSetLayoutMap.end()) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002389 layout_it->second.get()->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002390 descriptorSetLayoutMap.erase(layout_it);
2391 }
2392}
2393
2394void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2395 const VkAllocationCallbacks *pAllocator) {
2396 if (!descriptorPool) return;
2397 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002398 if (desc_pool_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002399 // Free sets that were in this pool
John Zulauf79f06582021-02-27 18:38:39 -07002400 for (auto *ds : desc_pool_state->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06002401 FreeDescriptorSet(ds);
2402 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002403 desc_pool_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002404 descriptorPoolMap.erase(descriptorPool);
2405 }
2406}
2407
2408// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2409void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2410 const VkCommandBuffer *command_buffers) {
2411 for (uint32_t i = 0; i < command_buffer_count; i++) {
John Zulaufd1f85d42020-04-15 12:23:15 -06002412 // Allow any derived class to clean up command buffer state
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002413 if (command_buffer_reset_callback) {
2414 (*command_buffer_reset_callback)(command_buffers[i]);
2415 }
John Zulaufd1f85d42020-04-15 12:23:15 -06002416 if (command_buffer_free_callback) {
2417 (*command_buffer_free_callback)(command_buffers[i]);
2418 }
2419
locke-lunargd556cc32019-09-17 01:21:23 -06002420 auto cb_state = GetCBState(command_buffers[i]);
2421 // Remove references to command buffer's state and delete
2422 if (cb_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002423 // Remove the cb_state's references from COMMAND_POOL_STATEs
2424 pool_state->commandBuffers.erase(command_buffers[i]);
2425 // Remove the cb debug labels
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06002426 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer());
locke-lunargd556cc32019-09-17 01:21:23 -06002427 // Remove CBState from CB map
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002428 cb_state->Destroy();
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06002429 commandBufferMap.erase(cb_state->commandBuffer());
locke-lunargd556cc32019-09-17 01:21:23 -06002430 }
2431 }
2432}
2433
2434void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2435 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002436 auto pool = GetCommandPoolState(commandPool);
2437 FreeCommandBufferStates(pool, commandBufferCount, pCommandBuffers);
locke-lunargd556cc32019-09-17 01:21:23 -06002438}
2439
2440void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2441 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2442 VkResult result) {
2443 if (VK_SUCCESS != result) return;
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06002444 auto queue_flags = GetPhysicalDeviceState()->queue_family_properties[pCreateInfo->queueFamilyIndex].queueFlags;
2445 commandPoolMap[*pCommandPool] = std::make_shared<COMMAND_POOL_STATE>(*pCommandPool, pCreateInfo, queue_flags);
locke-lunargd556cc32019-09-17 01:21:23 -06002446}
2447
2448void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2449 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2450 VkResult result) {
2451 if (VK_SUCCESS != result) return;
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002452
2453 uint32_t index_count = 0, n_perf_pass = 0;
2454 bool has_cb = false, has_rb = false;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002455 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002456 const auto *perf = LvlFindInChain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002457 index_count = perf->counterIndexCount;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002458
Mark Lobodzinski7e948e42020-09-09 10:23:36 -06002459 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002460 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2461 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2462 switch (counter.scope) {
2463 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002464 has_cb = true;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002465 break;
2466 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002467 has_rb = true;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002468 break;
2469 default:
2470 break;
2471 }
2472 }
2473
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002474 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf, &n_perf_pass);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002475 }
2476
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002477 queryPoolMap[*pQueryPool] =
2478 std::make_shared<QUERY_POOL_STATE>(*pQueryPool, pCreateInfo, index_count, n_perf_pass, has_cb, has_rb);
locke-lunargd556cc32019-09-17 01:21:23 -06002479
2480 QueryObject query_obj{*pQueryPool, 0u};
2481 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2482 query_obj.query = i;
2483 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2484 }
2485}
2486
2487void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2488 const VkAllocationCallbacks *pAllocator) {
2489 if (!commandPool) return;
2490 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2491 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2492 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2493 if (cp_state) {
2494 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2495 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2496 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002497 cp_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002498 commandPoolMap.erase(commandPool);
2499 }
2500}
2501
2502void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2503 VkCommandPoolResetFlags flags, VkResult result) {
2504 if (VK_SUCCESS != result) return;
2505 // Reset all of the CBs allocated from this pool
2506 auto command_pool_state = GetCommandPoolState(commandPool);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002507 for (auto cmd_buffer : command_pool_state->commandBuffers) {
2508 ResetCommandBufferState(cmd_buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002509 }
2510}
2511
2512void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2513 VkResult result) {
2514 for (uint32_t i = 0; i < fenceCount; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002515 auto fence_state = GetFenceState(pFences[i]);
2516 if (fence_state) {
2517 if (fence_state->scope == kSyncScopeInternal) {
2518 fence_state->state = FENCE_UNSIGNALED;
2519 } else if (fence_state->scope == kSyncScopeExternalTemporary) {
2520 fence_state->scope = kSyncScopeInternal;
locke-lunargd556cc32019-09-17 01:21:23 -06002521 }
2522 }
2523 }
2524}
2525
locke-lunargd556cc32019-09-17 01:21:23 -06002526void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2527 const VkAllocationCallbacks *pAllocator) {
2528 if (!framebuffer) return;
2529 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002530 framebuffer_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002531 frameBufferMap.erase(framebuffer);
2532}
2533
2534void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2535 const VkAllocationCallbacks *pAllocator) {
2536 if (!renderPass) return;
2537 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002538 rp_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002539 renderPassMap.erase(renderPass);
2540}
2541
2542void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2543 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2544 if (VK_SUCCESS != result) return;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002545 fenceMap[*pFence] = std::make_shared<FENCE_STATE>(*pFence, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002546}
2547
2548bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2549 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2550 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002551 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002552 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2553 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2554 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2555 cgpl_state->pipe_state.reserve(count);
2556 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002557 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002558 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002559 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002560 }
2561 return false;
2562}
2563
2564void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2565 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2566 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2567 VkResult result, void *cgpl_state_data) {
2568 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2569 // This API may create pipelines regardless of the return value
2570 for (uint32_t i = 0; i < count; i++) {
2571 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002572 (cgpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002573 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2574 }
2575 }
2576 cgpl_state->pipe_state.clear();
2577}
2578
2579bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2580 const VkComputePipelineCreateInfo *pCreateInfos,
2581 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002582 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002583 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2584 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2585 ccpl_state->pipe_state.reserve(count);
2586 for (uint32_t i = 0; i < count; i++) {
2587 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002588 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002589 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002590 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002591 }
2592 return false;
2593}
2594
2595void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2596 const VkComputePipelineCreateInfo *pCreateInfos,
2597 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2598 VkResult result, void *ccpl_state_data) {
2599 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2600
2601 // This API may create pipelines regardless of the return value
2602 for (uint32_t i = 0; i < count; i++) {
2603 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002604 (ccpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002605 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2606 }
2607 }
2608 ccpl_state->pipe_state.clear();
2609}
2610
2611bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2612 uint32_t count,
2613 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2614 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002615 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002616 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2617 crtpl_state->pipe_state.reserve(count);
2618 for (uint32_t i = 0; i < count; i++) {
2619 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002620 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002621 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002622 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002623 }
2624 return false;
2625}
2626
2627void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2628 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2629 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2630 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2631 // This API may create pipelines regardless of the return value
2632 for (uint32_t i = 0; i < count; i++) {
2633 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002634 (crtpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002635 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2636 }
2637 }
2638 crtpl_state->pipe_state.clear();
2639}
2640
sourav parmarcd5fb182020-07-17 12:58:44 -07002641bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
2642 VkPipelineCache pipelineCache, uint32_t count,
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002643 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2644 const VkAllocationCallbacks *pAllocator,
2645 VkPipeline *pPipelines, void *crtpl_state_data) const {
2646 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2647 crtpl_state->pipe_state.reserve(count);
2648 for (uint32_t i = 0; i < count; i++) {
2649 // Create and initialize internal tracking data structure
2650 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
2651 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
2652 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
2653 }
2654 return false;
2655}
2656
sourav parmarcd5fb182020-07-17 12:58:44 -07002657void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
2658 VkPipelineCache pipelineCache, uint32_t count,
2659 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2660 const VkAllocationCallbacks *pAllocator,
2661 VkPipeline *pPipelines, VkResult result,
2662 void *crtpl_state_data) {
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002663 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2664 // This API may create pipelines regardless of the return value
2665 for (uint32_t i = 0; i < count; i++) {
2666 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002667 (crtpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002668 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2669 }
2670 }
2671 crtpl_state->pipe_state.clear();
2672}
2673
locke-lunargd556cc32019-09-17 01:21:23 -06002674void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2675 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2676 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002677 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002678 if (pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2679 pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
Tony-LunarG7337b312020-04-15 16:40:25 -06002680 custom_border_color_sampler_count++;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002681 }
locke-lunargd556cc32019-09-17 01:21:23 -06002682}
2683
2684void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2685 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2686 const VkAllocationCallbacks *pAllocator,
2687 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2688 if (VK_SUCCESS != result) return;
2689 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2690}
2691
2692// For repeatable sorting, not very useful for "memory in range" search
2693struct PushConstantRangeCompare {
2694 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2695 if (lhs->offset == rhs->offset) {
2696 if (lhs->size == rhs->size) {
2697 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2698 return lhs->stageFlags < rhs->stageFlags;
2699 }
2700 // If the offsets are the same then sorting by the end of range is useful for validation
2701 return lhs->size < rhs->size;
2702 }
2703 return lhs->offset < rhs->offset;
2704 }
2705};
2706
2707static PushConstantRangesDict push_constant_ranges_dict;
2708
2709PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2710 if (!info->pPushConstantRanges) {
2711 // Hand back the empty entry (creating as needed)...
2712 return push_constant_ranges_dict.look_up(PushConstantRanges());
2713 }
2714
2715 // Sort the input ranges to ensure equivalent ranges map to the same id
2716 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2717 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2718 sorted.insert(info->pPushConstantRanges + i);
2719 }
2720
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07002721 PushConstantRanges ranges;
2722 ranges.reserve(sorted.size());
John Zulauf79f06582021-02-27 18:38:39 -07002723 for (const auto *range : sorted) {
locke-lunargd556cc32019-09-17 01:21:23 -06002724 ranges.emplace_back(*range);
2725 }
2726 return push_constant_ranges_dict.look_up(std::move(ranges));
2727}
2728
2729// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2730static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2731
2732// Dictionary of canonical form of the "compatible for set" records
2733static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2734
2735static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2736 const PipelineLayoutSetLayoutsId set_layouts_id) {
2737 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2738}
2739
2740void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2741 const VkAllocationCallbacks *pAllocator,
2742 VkPipelineLayout *pPipelineLayout, VkResult result) {
2743 if (VK_SUCCESS != result) return;
2744
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002745 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>(*pPipelineLayout);
locke-lunargd556cc32019-09-17 01:21:23 -06002746 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2747 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2748 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002749 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002750 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2751 }
2752
2753 // Get canonical form IDs for the "compatible for set" contents
2754 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
2755 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
2756 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
2757
2758 // Create table of "compatible for set N" cannonical forms for trivial accept validation
2759 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2760 pipeline_layout_state->compat_for_set.emplace_back(
2761 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
2762 }
2763 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
2764}
2765
2766void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
2767 const VkAllocationCallbacks *pAllocator,
2768 VkDescriptorPool *pDescriptorPool, VkResult result) {
2769 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002770 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002771}
2772
2773void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2774 VkDescriptorPoolResetFlags flags, VkResult result) {
2775 if (VK_SUCCESS != result) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002776 DESCRIPTOR_POOL_STATE *pool = GetDescriptorPoolState(descriptorPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002777 // TODO: validate flags
2778 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
John Zulauf79f06582021-02-27 18:38:39 -07002779 for (auto *ds : pool->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06002780 FreeDescriptorSet(ds);
2781 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002782 pool->sets.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06002783 // Reset available count for each type and available sets for this pool
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002784 for (auto it = pool->availableDescriptorTypeCount.begin(); it != pool->availableDescriptorTypeCount.end(); ++it) {
2785 pool->availableDescriptorTypeCount[it->first] = pool->maxDescriptorTypeCount[it->first];
locke-lunargd556cc32019-09-17 01:21:23 -06002786 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002787 pool->availableSets = pool->maxSets;
locke-lunargd556cc32019-09-17 01:21:23 -06002788}
2789
2790bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
2791 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002792 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002793 // Always update common data
2794 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2795 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2796 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
2797
2798 return false;
2799}
2800
2801// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
2802void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
2803 VkDescriptorSet *pDescriptorSets, VkResult result,
2804 void *ads_state_data) {
2805 if (VK_SUCCESS != result) return;
2806 // All the updates are contained in a single cvdescriptorset function
2807 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2808 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2809 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
2810}
2811
2812void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
2813 const VkDescriptorSet *pDescriptorSets) {
2814 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
2815 // Update available descriptor sets in pool
2816 pool_state->availableSets += count;
2817
2818 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
2819 for (uint32_t i = 0; i < count; ++i) {
2820 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
2821 auto descriptor_set = setMap[pDescriptorSets[i]].get();
2822 uint32_t type_index = 0, descriptor_count = 0;
2823 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
2824 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
2825 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
2826 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
2827 }
2828 FreeDescriptorSet(descriptor_set);
2829 pool_state->sets.erase(descriptor_set);
2830 }
2831 }
2832}
2833
2834void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
2835 const VkWriteDescriptorSet *pDescriptorWrites,
2836 uint32_t descriptorCopyCount,
2837 const VkCopyDescriptorSet *pDescriptorCopies) {
2838 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
2839 pDescriptorCopies);
2840}
2841
2842void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
2843 VkCommandBuffer *pCommandBuffer, VkResult result) {
2844 if (VK_SUCCESS != result) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002845 auto pool = GetCommandPoolShared(pCreateInfo->commandPool);
2846 if (pool) {
locke-lunargd556cc32019-09-17 01:21:23 -06002847 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
2848 // Add command buffer to its commandPool map
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002849 pool->commandBuffers.insert(pCommandBuffer[i]);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002850 auto cb_state = std::make_shared<CMD_BUFFER_STATE>(pCommandBuffer[i], pCreateInfo);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002851 cb_state->command_pool = pool;
2852 cb_state->unprotected = pool->unprotected;
locke-lunargd556cc32019-09-17 01:21:23 -06002853 // Add command buffer to map
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002854 commandBufferMap[pCommandBuffer[i]] = std::move(cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002855 ResetCommandBufferState(pCommandBuffer[i]);
2856 }
2857 }
2858}
2859
locke-lunargfc78e932020-11-19 17:06:24 -07002860void UpdateSubpassAttachments(const safe_VkSubpassDescription2 &subpass, std::vector<SUBPASS_INFO> &subpasses) {
2861 for (uint32_t index = 0; index < subpass.inputAttachmentCount; ++index) {
2862 const uint32_t attachment_index = subpass.pInputAttachments[index].attachment;
2863 if (attachment_index != VK_ATTACHMENT_UNUSED) {
2864 subpasses[attachment_index].used = true;
2865 subpasses[attachment_index].usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
2866 subpasses[attachment_index].layout = subpass.pInputAttachments[index].layout;
2867 }
2868 }
2869
2870 for (uint32_t index = 0; index < subpass.colorAttachmentCount; ++index) {
2871 const uint32_t attachment_index = subpass.pColorAttachments[index].attachment;
2872 if (attachment_index != VK_ATTACHMENT_UNUSED) {
2873 subpasses[attachment_index].used = true;
2874 subpasses[attachment_index].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2875 subpasses[attachment_index].layout = subpass.pColorAttachments[index].layout;
2876 }
2877 if (subpass.pResolveAttachments) {
2878 const uint32_t attachment_index2 = subpass.pResolveAttachments[index].attachment;
2879 if (attachment_index2 != VK_ATTACHMENT_UNUSED) {
2880 subpasses[attachment_index2].used = true;
2881 subpasses[attachment_index2].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2882 subpasses[attachment_index2].layout = subpass.pResolveAttachments[index].layout;
2883 }
2884 }
2885 }
2886
2887 if (subpass.pDepthStencilAttachment) {
2888 const uint32_t attachment_index = subpass.pDepthStencilAttachment->attachment;
2889 if (attachment_index != VK_ATTACHMENT_UNUSED) {
2890 subpasses[attachment_index].used = true;
2891 subpasses[attachment_index].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
2892 subpasses[attachment_index].layout = subpass.pDepthStencilAttachment->layout;
2893 }
2894 }
2895}
2896
2897void UpdateAttachmentsView(ValidationStateTracker &tracker, CMD_BUFFER_STATE &cb_state, const FRAMEBUFFER_STATE &framebuffer,
2898 const VkRenderPassBeginInfo *pRenderPassBegin) {
2899 auto &attachments = *(cb_state.active_attachments.get());
2900 const bool imageless = (framebuffer.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) ? true : false;
2901 const VkRenderPassAttachmentBeginInfo *attachment_info_struct = nullptr;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002902 if (pRenderPassBegin) attachment_info_struct = LvlFindInChain<VkRenderPassAttachmentBeginInfo>(pRenderPassBegin->pNext);
locke-lunargfc78e932020-11-19 17:06:24 -07002903
2904 for (uint32_t i = 0; i < attachments.size(); ++i) {
2905 if (imageless) {
2906 if (attachment_info_struct && i < attachment_info_struct->attachmentCount) {
2907 auto res = cb_state.attachments_view_states.insert(
2908 tracker.GetShared<IMAGE_VIEW_STATE>(attachment_info_struct->pAttachments[i]));
2909 attachments[i] = res.first->get();
2910 }
2911 } else {
2912 auto res = cb_state.attachments_view_states.insert(framebuffer.attachments_view_state[i]);
2913 attachments[i] = res.first->get();
2914 }
2915 }
2916}
2917
locke-lunargd556cc32019-09-17 01:21:23 -06002918void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
2919 const VkCommandBufferBeginInfo *pBeginInfo) {
2920 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2921 if (!cb_state) return;
locke-lunargfc78e932020-11-19 17:06:24 -07002922
locke-lunargd556cc32019-09-17 01:21:23 -06002923 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
2924 ResetCommandBufferState(commandBuffer);
2925 }
2926 // Set updated state here in case implicit reset occurs above
2927 cb_state->state = CB_RECORDING;
2928 cb_state->beginInfo = *pBeginInfo;
Tony-LunarG3c287f62020-12-17 12:39:49 -07002929 if (cb_state->beginInfo.pInheritanceInfo && (cb_state->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY)) {
locke-lunargd556cc32019-09-17 01:21:23 -06002930 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
2931 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
2932 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
2933 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
2934 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
locke-lunargaecf2152020-05-12 17:15:41 -06002935 cb_state->activeRenderPass = GetShared<RENDER_PASS_STATE>(cb_state->beginInfo.pInheritanceInfo->renderPass);
locke-lunargd556cc32019-09-17 01:21:23 -06002936 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
locke-lunargfc78e932020-11-19 17:06:24 -07002937
locke-lunargaecf2152020-05-12 17:15:41 -06002938 if (cb_state->beginInfo.pInheritanceInfo->framebuffer) {
2939 cb_state->activeFramebuffer = GetShared<FRAMEBUFFER_STATE>(cb_state->beginInfo.pInheritanceInfo->framebuffer);
locke-lunargfc78e932020-11-19 17:06:24 -07002940 cb_state->active_subpasses = nullptr;
2941 cb_state->active_attachments = nullptr;
2942
2943 if (cb_state->activeFramebuffer) {
2944 cb_state->framebuffers.insert(cb_state->activeFramebuffer);
2945
2946 // Set cb_state->active_subpasses
2947 cb_state->active_subpasses =
2948 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
2949 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
2950 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
2951
2952 // Set cb_state->active_attachments & cb_state->attachments_view_states
2953 cb_state->active_attachments =
2954 std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
2955 UpdateAttachmentsView(*this, *cb_state, *cb_state->activeFramebuffer, nullptr);
2956
2957 // Connect this framebuffer and its children to this cmdBuffer
Jeremy Gebben5570abe2021-05-16 18:35:13 -06002958 if (!disabled[command_buffer_state]) {
2959 cb_state->AddChild(cb_state->activeFramebuffer.get());
2960 }
locke-lunargfc78e932020-11-19 17:06:24 -07002961 }
locke-lunargaecf2152020-05-12 17:15:41 -06002962 }
David Zhao Akeley44139b12021-04-26 16:16:13 -07002963
2964 // Check for VkCommandBufferInheritanceViewportScissorInfoNV (VK_NV_inherited_viewport_scissor)
2965 auto p_inherited_viewport_scissor_info =
2966 LvlFindInChain<VkCommandBufferInheritanceViewportScissorInfoNV>(cb_state->beginInfo.pInheritanceInfo->pNext);
2967 if (p_inherited_viewport_scissor_info != nullptr && p_inherited_viewport_scissor_info->viewportScissor2D) {
2968 auto pViewportDepths = p_inherited_viewport_scissor_info->pViewportDepths;
2969 cb_state->inheritedViewportDepths.assign(
2970 pViewportDepths, pViewportDepths + p_inherited_viewport_scissor_info->viewportDepthCount);
2971 }
locke-lunargd556cc32019-09-17 01:21:23 -06002972 }
2973 }
2974
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002975 auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002976 if (chained_device_group_struct) {
2977 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
2978 } else {
2979 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
2980 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002981
2982 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06002983}
2984
2985void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
2986 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2987 if (!cb_state) return;
2988 // Cached validation is specific to a specific recording of a specific command buffer.
John Zulauf79f06582021-02-27 18:38:39 -07002989 for (auto *descriptor_set : cb_state->validated_descriptor_sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06002990 descriptor_set->ClearCachedValidation(cb_state);
2991 }
2992 cb_state->validated_descriptor_sets.clear();
2993 if (VK_SUCCESS == result) {
2994 cb_state->state = CB_RECORDED;
2995 }
2996}
2997
2998void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
2999 VkResult result) {
3000 if (VK_SUCCESS == result) {
3001 ResetCommandBufferState(commandBuffer);
3002 }
3003}
3004
3005CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
3006 // initially assume everything is static state
3007 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
3008
3009 if (ds) {
3010 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
locke-lunarg4189aa22020-10-21 00:23:48 -06003011 flags &= ~ConvertToCBStatusFlagBits(ds->pDynamicStates[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003012 }
3013 }
locke-lunargd556cc32019-09-17 01:21:23 -06003014 return flags;
3015}
3016
3017// Validation cache:
3018// CV is the bottommost implementor of this extension. Don't pass calls down.
3019// utility function to set collective state for pipeline
3020void SetPipelineState(PIPELINE_STATE *pPipe) {
3021 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
3022 if (pPipe->graphicsPipelineCI.pColorBlendState) {
3023 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
3024 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
3025 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3026 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3027 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3028 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3029 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3030 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3031 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3032 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
3033 pPipe->blendConstantsEnabled = true;
3034 }
3035 }
3036 }
3037 }
sfricke-samsung8f658d42020-05-03 20:12:24 -07003038 // Check if sample location is enabled
3039 if (pPipe->graphicsPipelineCI.pMultisampleState) {
3040 const VkPipelineSampleLocationsStateCreateInfoEXT *sample_location_state =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07003041 LvlFindInChain<VkPipelineSampleLocationsStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pMultisampleState->pNext);
sfricke-samsung8f658d42020-05-03 20:12:24 -07003042 if (sample_location_state != nullptr) {
3043 pPipe->sample_location_enabled = sample_location_state->sampleLocationsEnable;
3044 }
3045 }
locke-lunargd556cc32019-09-17 01:21:23 -06003046}
3047
3048void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
3049 VkPipeline pipeline) {
3050 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3051 assert(cb_state);
3052
3053 auto pipe_state = GetPipelineState(pipeline);
3054 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003055 bool rasterization_enabled = VK_FALSE == pipe_state->graphicsPipelineCI.ptr()->pRasterizationState->rasterizerDiscardEnable;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003056 const auto* viewport_state = pipe_state->graphicsPipelineCI.ptr()->pViewportState;
3057 const auto* dynamic_state = pipe_state->graphicsPipelineCI.ptr()->pDynamicState;
locke-lunargd556cc32019-09-17 01:21:23 -06003058 cb_state->status &= ~cb_state->static_status;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003059 cb_state->static_status = MakeStaticStateMask(dynamic_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003060 cb_state->status |= cb_state->static_status;
locke-lunarg4189aa22020-10-21 00:23:48 -06003061 cb_state->dynamic_status = CBSTATUS_ALL_STATE_SET & (~cb_state->static_status);
David Zhao Akeley44139b12021-04-26 16:16:13 -07003062
3063 // Used to calculate CMD_BUFFER_STATE::usedViewportScissorCount upon draw command with this graphics pipeline.
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003064 // If rasterization disabled (no viewport/scissors used), or the actual number of viewports/scissors is dynamic (unknown at
3065 // this time), then these are set to 0 to disable this checking.
David Zhao Akeley44139b12021-04-26 16:16:13 -07003066 auto has_dynamic_viewport_count = cb_state->dynamic_status & CBSTATUS_VIEWPORT_WITH_COUNT_SET;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003067 auto has_dynamic_scissor_count = cb_state->dynamic_status & CBSTATUS_SCISSOR_WITH_COUNT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003068 cb_state->pipelineStaticViewportCount =
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003069 has_dynamic_viewport_count || !rasterization_enabled ? 0 : viewport_state->viewportCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003070 cb_state->pipelineStaticScissorCount =
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003071 has_dynamic_scissor_count || !rasterization_enabled ? 0 : viewport_state->scissorCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003072
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003073 // Trash dynamic viewport/scissor state if pipeline defines static state and enabled rasterization.
David Zhao Akeley44139b12021-04-26 16:16:13 -07003074 // akeley98 NOTE: There's a bit of an ambiguity in the spec, whether binding such a pipeline overwrites
3075 // the entire viewport (scissor) array, or only the subsection defined by the viewport (scissor) count.
3076 // I am taking the latter interpretation based on the implementation details of NVIDIA's Vulkan driver.
David Zhao Akeley44139b12021-04-26 16:16:13 -07003077 if (!has_dynamic_viewport_count) {
3078 cb_state->trashedViewportCount = true;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003079 if (rasterization_enabled && (cb_state->static_status & CBSTATUS_VIEWPORT_SET)) {
David Zhao Akeley44139b12021-04-26 16:16:13 -07003080 cb_state->trashedViewportMask |= (uint32_t(1) << viewport_state->viewportCount) - 1u;
3081 // should become = ~uint32_t(0) if the other interpretation is correct.
3082 }
3083 }
3084 if (!has_dynamic_scissor_count) {
3085 cb_state->trashedScissorCount = true;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003086 if (rasterization_enabled && (cb_state->static_status & CBSTATUS_SCISSOR_SET)) {
David Zhao Akeley44139b12021-04-26 16:16:13 -07003087 cb_state->trashedScissorMask |= (uint32_t(1) << viewport_state->scissorCount) - 1u;
3088 // should become = ~uint32_t(0) if the other interpretation is correct.
3089 }
3090 }
locke-lunargd556cc32019-09-17 01:21:23 -06003091 }
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003092 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
3093 cb_state->lastBound[lv_bind_point].pipeline_state = pipe_state;
locke-lunargd556cc32019-09-17 01:21:23 -06003094 SetPipelineState(pipe_state);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003095 if (!disabled[command_buffer_state]) {
3096 cb_state->AddChild(pipe_state);
3097 }
locke-lunargb8be8222020-10-20 00:34:37 -06003098 for (auto &slot : pipe_state->active_slots) {
3099 for (auto &req : slot.second) {
3100 for (auto &sampler : req.second.samplers_used_by_image) {
3101 for (auto &des : sampler) {
3102 des.second = nullptr;
3103 }
3104 }
3105 }
3106 }
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -06003107 cb_state->lastBound[lv_bind_point].UpdateSamplerDescriptorsUsedByImage();
locke-lunargd556cc32019-09-17 01:21:23 -06003108}
3109
3110void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3111 uint32_t viewportCount, const VkViewport *pViewports) {
3112 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07003113 uint32_t bits = ((1u << viewportCount) - 1u) << firstViewport;
3114 cb_state->viewportMask |= bits;
3115 cb_state->trashedViewportMask &= ~bits;
locke-lunargd556cc32019-09-17 01:21:23 -06003116 cb_state->status |= CBSTATUS_VIEWPORT_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003117 cb_state->static_status &= ~CBSTATUS_VIEWPORT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003118
3119 cb_state->dynamicViewports.resize(std::max(size_t(firstViewport + viewportCount), cb_state->dynamicViewports.size()));
3120 for (size_t i = 0; i < viewportCount; ++i) {
3121 cb_state->dynamicViewports[firstViewport + i] = pViewports[i];
3122 }
locke-lunargd556cc32019-09-17 01:21:23 -06003123}
3124
3125void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
3126 uint32_t exclusiveScissorCount,
3127 const VkRect2D *pExclusiveScissors) {
3128 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3129 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
3130 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
3131 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003132 cb_state->static_status &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003133}
3134
3135void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3136 VkImageLayout imageLayout) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003137 if (disabled[command_buffer_state]) return;
3138
locke-lunargd556cc32019-09-17 01:21:23 -06003139 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3140
3141 if (imageView != VK_NULL_HANDLE) {
3142 auto view_state = GetImageViewState(imageView);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003143 cb_state->AddChild(view_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003144 }
3145}
3146
3147void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3148 uint32_t viewportCount,
3149 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3150 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3151 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3152 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3153 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003154 cb_state->static_status &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003155}
3156
3157void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3158 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3159 const VkAllocationCallbacks *pAllocator,
3160 VkAccelerationStructureNV *pAccelerationStructure,
3161 VkResult result) {
3162 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003163 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003164
3165 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003166 auto as_memory_requirements_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003167 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06003168 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure();
locke-lunargd556cc32019-09-17 01:21:23 -06003169 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3170
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003171 auto scratch_memory_req_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003172 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06003173 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure();
locke-lunargd556cc32019-09-17 01:21:23 -06003174 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3175 &as_state->build_scratch_memory_requirements);
3176
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003177 auto update_memory_req_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003178 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06003179 update_memory_req_info.accelerationStructure = as_state->acceleration_structure();
locke-lunargd556cc32019-09-17 01:21:23 -06003180 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3181 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003182 as_state->allocator = pAllocator;
locke-lunargd556cc32019-09-17 01:21:23 -06003183 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3184}
3185
Jeff Bolz95176d02020-04-01 00:36:16 -05003186void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3187 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3188 const VkAllocationCallbacks *pAllocator,
3189 VkAccelerationStructureKHR *pAccelerationStructure,
3190 VkResult result) {
3191 if (VK_SUCCESS != result) return;
sourav parmarcd5fb182020-07-17 12:58:44 -07003192 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE_KHR>(*pAccelerationStructure, pCreateInfo);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003193 as_state->allocator = pAllocator;
sourav parmarcd5fb182020-07-17 12:58:44 -07003194 accelerationStructureMap_khr[*pAccelerationStructure] = std::move(as_state);
Jeff Bolz95176d02020-04-01 00:36:16 -05003195}
3196
sourav parmarcd5fb182020-07-17 12:58:44 -07003197void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructuresKHR(
3198 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
3199 const VkAccelerationStructureBuildRangeInfoKHR *const *ppBuildRangeInfos) {
3200 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3201 if (cb_state == nullptr) {
3202 return;
3203 }
3204 for (uint32_t i = 0; i < infoCount; ++i) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003205 auto *dst_as_state = GetAccelerationStructureStateKHR(pInfos[i].dstAccelerationStructure);
sourav parmarcd5fb182020-07-17 12:58:44 -07003206 if (dst_as_state != nullptr) {
3207 dst_as_state->built = true;
3208 dst_as_state->build_info_khr.initialize(&pInfos[i]);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003209 if (!disabled[command_buffer_state]) {
3210 cb_state->AddChild(dst_as_state);
3211 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003212 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003213 if (!disabled[command_buffer_state]) {
3214 auto *src_as_state = GetAccelerationStructureStateKHR(pInfos[i].srcAccelerationStructure);
3215 if (src_as_state != nullptr) {
3216 cb_state->AddChild(src_as_state);
3217 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003218 }
3219 }
3220 cb_state->hasBuildAccelerationStructureCmd = true;
3221}
3222
3223void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructuresIndirectKHR(
3224 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
3225 const VkDeviceAddress *pIndirectDeviceAddresses, const uint32_t *pIndirectStrides,
3226 const uint32_t *const *ppMaxPrimitiveCounts) {
3227 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3228 if (cb_state == nullptr) {
3229 return;
3230 }
3231 for (uint32_t i = 0; i < infoCount; ++i) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003232 auto *dst_as_state = GetAccelerationStructureStateKHR(pInfos[i].dstAccelerationStructure);
sourav parmarcd5fb182020-07-17 12:58:44 -07003233 if (dst_as_state != nullptr) {
3234 dst_as_state->built = true;
3235 dst_as_state->build_info_khr.initialize(&pInfos[i]);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003236 if (!disabled[command_buffer_state]) {
3237 cb_state->AddChild(dst_as_state);
3238 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003239 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003240 if (!disabled[command_buffer_state]) {
3241 auto *src_as_state = GetAccelerationStructureStateKHR(pInfos[i].srcAccelerationStructure);
3242 if (src_as_state != nullptr) {
3243 cb_state->AddChild(src_as_state);
3244 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003245 }
3246 }
3247 cb_state->hasBuildAccelerationStructureCmd = true;
3248}
locke-lunargd556cc32019-09-17 01:21:23 -06003249void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
Mike Schuchardt2df08912020-12-15 16:28:09 -08003250 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2 *pMemoryRequirements) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003251 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureStateNV(pInfo->accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003252 if (as_state != nullptr) {
3253 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
3254 as_state->memory_requirements = *pMemoryRequirements;
3255 as_state->memory_requirements_checked = true;
3256 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
3257 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
3258 as_state->build_scratch_memory_requirements_checked = true;
3259 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
3260 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
3261 as_state->update_scratch_memory_requirements_checked = true;
3262 }
3263 }
3264}
3265
sourav parmarcd5fb182020-07-17 12:58:44 -07003266void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
3267 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06003268 if (VK_SUCCESS != result) return;
3269 for (uint32_t i = 0; i < bindInfoCount; i++) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003270 const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06003271
sourav parmarcd5fb182020-07-17 12:58:44 -07003272 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureStateNV(info.accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003273 if (as_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06003274 // Track objects tied to memory
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003275 auto mem_state = GetDevMemShared(info.memory);
3276 if (mem_state) {
3277 as_state->SetMemBinding(mem_state, info.memoryOffset);
3278 }
locke-lunargd556cc32019-09-17 01:21:23 -06003279
3280 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05003281 // XXX TODO: Query device address for KHR extension
sourav parmarcd5fb182020-07-17 12:58:44 -07003282 if (enabled[gpu_validation]) {
locke-lunargd556cc32019-09-17 01:21:23 -06003283 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
3284 }
3285 }
3286 }
3287}
3288
3289void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
3290 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
3291 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
3292 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3293 if (cb_state == nullptr) {
3294 return;
3295 }
3296
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003297 auto *dst_as_state = GetAccelerationStructureStateNV(dst);
locke-lunargd556cc32019-09-17 01:21:23 -06003298 if (dst_as_state != nullptr) {
3299 dst_as_state->built = true;
3300 dst_as_state->build_info.initialize(pInfo);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003301 if (!disabled[command_buffer_state]) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003302 cb_state->AddChild(dst_as_state);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003303 }
locke-lunargd556cc32019-09-17 01:21:23 -06003304 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003305 if (!disabled[command_buffer_state]) {
3306 auto *src_as_state = GetAccelerationStructureStateNV(src);
3307 if (src_as_state != nullptr) {
3308 cb_state->AddChild(src_as_state);
3309 }
locke-lunargd556cc32019-09-17 01:21:23 -06003310 }
3311 cb_state->hasBuildAccelerationStructureCmd = true;
3312}
3313
3314void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
3315 VkAccelerationStructureNV dst,
3316 VkAccelerationStructureNV src,
3317 VkCopyAccelerationStructureModeNV mode) {
3318 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3319 if (cb_state) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003320 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureStateNV(src);
3321 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureStateNV(dst);
locke-lunargd556cc32019-09-17 01:21:23 -06003322 if (dst_as_state != nullptr && src_as_state != nullptr) {
3323 dst_as_state->built = true;
3324 dst_as_state->build_info = src_as_state->build_info;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003325 if (!disabled[command_buffer_state]) {
3326 cb_state->AddChild(dst_as_state);
3327 cb_state->AddChild(src_as_state);
3328 }
locke-lunargd556cc32019-09-17 01:21:23 -06003329 }
3330 }
3331}
3332
Jeff Bolz95176d02020-04-01 00:36:16 -05003333void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
3334 VkAccelerationStructureKHR accelerationStructure,
3335 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06003336 if (!accelerationStructure) return;
sourav parmarcd5fb182020-07-17 12:58:44 -07003337 auto *as_state = GetAccelerationStructureStateKHR(accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003338 if (as_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003339 as_state->Destroy();
sourav parmarcd5fb182020-07-17 12:58:44 -07003340 accelerationStructureMap_khr.erase(accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003341 }
3342}
3343
Jeff Bolz95176d02020-04-01 00:36:16 -05003344void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
3345 VkAccelerationStructureNV accelerationStructure,
3346 const VkAllocationCallbacks *pAllocator) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003347 if (!accelerationStructure) return;
3348 auto *as_state = GetAccelerationStructureStateNV(accelerationStructure);
3349 if (as_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003350 as_state->Destroy();
sourav parmarcd5fb182020-07-17 12:58:44 -07003351 accelerationStructureMap.erase(accelerationStructure);
3352 }
Jeff Bolz95176d02020-04-01 00:36:16 -05003353}
3354
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003355void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3356 uint32_t viewportCount,
3357 const VkViewportWScalingNV *pViewportWScalings) {
3358 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3359 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003360 cb_state->static_status &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003361}
3362
locke-lunargd556cc32019-09-17 01:21:23 -06003363void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
3364 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3365 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003366 cb_state->static_status &= ~CBSTATUS_LINE_WIDTH_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003367}
3368
3369void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
3370 uint16_t lineStipplePattern) {
3371 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3372 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003373 cb_state->static_status &= ~CBSTATUS_LINE_STIPPLE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003374}
3375
3376void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
3377 float depthBiasClamp, float depthBiasSlopeFactor) {
3378 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3379 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003380 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003381}
3382
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003383void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
3384 const VkRect2D *pScissors) {
3385 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07003386 uint32_t bits = ((1u << scissorCount) - 1u) << firstScissor;
3387 cb_state->scissorMask |= bits;
3388 cb_state->trashedScissorMask &= ~bits;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003389 cb_state->status |= CBSTATUS_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003390 cb_state->static_status &= ~CBSTATUS_SCISSOR_SET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003391}
3392
locke-lunargd556cc32019-09-17 01:21:23 -06003393void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
3394 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3395 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003396 cb_state->static_status &= ~CBSTATUS_BLEND_CONSTANTS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003397}
3398
3399void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
3400 float maxDepthBounds) {
3401 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3402 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003403 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003404}
3405
3406void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3407 uint32_t compareMask) {
3408 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3409 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003410 cb_state->static_status &= ~CBSTATUS_STENCIL_READ_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003411}
3412
3413void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3414 uint32_t writeMask) {
3415 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3416 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003417 cb_state->static_status &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003418}
3419
3420void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3421 uint32_t reference) {
3422 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3423 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003424 cb_state->static_status &= ~CBSTATUS_STENCIL_REFERENCE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003425}
3426
3427// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
3428// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
3429// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
3430void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
3431 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
3432 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
3433 cvdescriptorset::DescriptorSet *push_descriptor_set,
3434 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
3435 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
3436 // Defensive
3437 assert(pipeline_layout);
3438 if (!pipeline_layout) return;
3439
3440 uint32_t required_size = first_set + set_count;
3441 const uint32_t last_binding_index = required_size - 1;
3442 assert(last_binding_index < pipeline_layout->compat_for_set.size());
3443
3444 // Some useful shorthand
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003445 const auto lv_bind_point = ConvertToLvlBindPoint(pipeline_bind_point);
3446 auto &last_bound = cb_state->lastBound[lv_bind_point];
locke-lunargd556cc32019-09-17 01:21:23 -06003447 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
3448 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
3449
3450 // We need this three times in this function, but nowhere else
3451 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
3452 if (ds && ds->IsPushDescriptor()) {
3453 assert(ds == last_bound.push_descriptor_set.get());
3454 last_bound.push_descriptor_set = nullptr;
3455 return true;
3456 }
3457 return false;
3458 };
3459
3460 // Clean up the "disturbed" before and after the range to be set
3461 if (required_size < current_size) {
3462 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
3463 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
3464 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
3465 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
3466 }
3467 } else {
3468 // We're not disturbing past last, so leave the upper binding data alone.
3469 required_size = current_size;
3470 }
3471 }
3472
3473 // We resize if we need more set entries or if those past "last" are disturbed
3474 if (required_size != current_size) {
3475 last_bound.per_set.resize(required_size);
3476 }
3477
3478 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
3479 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
3480 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
3481 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3482 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
3483 last_bound.per_set[set_idx].dynamicOffsets.clear();
3484 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
3485 }
3486 }
3487
3488 // Now update the bound sets with the input sets
3489 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
3490 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
3491 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
3492 cvdescriptorset::DescriptorSet *descriptor_set =
3493 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
3494
3495 // Record binding (or push)
3496 if (descriptor_set != last_bound.push_descriptor_set.get()) {
3497 // Only cleanup the push descriptors if they aren't the currently used set.
3498 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3499 }
3500 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
3501 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
3502
3503 if (descriptor_set) {
3504 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
3505 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
3506 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
3507 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
3508 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
3509 input_dynamic_offsets = end_offset;
3510 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
3511 } else {
3512 last_bound.per_set[set_idx].dynamicOffsets.clear();
3513 }
3514 if (!descriptor_set->IsPushDescriptor()) {
3515 // Can't cache validation of push_descriptors
3516 cb_state->validated_descriptor_sets.insert(descriptor_set);
3517 }
3518 }
3519 }
3520}
3521
3522// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
3523void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
3524 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3525 uint32_t firstSet, uint32_t setCount,
3526 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
3527 const uint32_t *pDynamicOffsets) {
3528 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3529 auto pipeline_layout = GetPipelineLayout(layout);
3530
3531 // Resize binding arrays
3532 uint32_t last_set_index = firstSet + setCount - 1;
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003533 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
3534 if (last_set_index >= cb_state->lastBound[lv_bind_point].per_set.size()) {
3535 cb_state->lastBound[lv_bind_point].per_set.resize(last_set_index + 1);
locke-lunargd556cc32019-09-17 01:21:23 -06003536 }
3537
3538 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
3539 dynamicOffsetCount, pDynamicOffsets);
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003540 cb_state->lastBound[lv_bind_point].pipeline_layout = layout;
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -06003541 cb_state->lastBound[lv_bind_point].UpdateSamplerDescriptorsUsedByImage();
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003542}
3543
locke-lunargd556cc32019-09-17 01:21:23 -06003544void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
3545 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
3546 const VkWriteDescriptorSet *pDescriptorWrites) {
3547 const auto &pipeline_layout = GetPipelineLayout(layout);
3548 // Short circuit invalid updates
3549 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003550 !pipeline_layout->set_layouts[set]->IsPushDescriptor()) {
locke-lunargd556cc32019-09-17 01:21:23 -06003551 return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003552 }
locke-lunargd556cc32019-09-17 01:21:23 -06003553
3554 // We need a descriptor set to update the bindings with, compatible with the passed layout
Jeremy Gebben50fb1832021-03-19 09:10:13 -06003555 const auto& dsl = pipeline_layout->set_layouts[set];
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003556 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
3557 auto &last_bound = cb_state->lastBound[lv_bind_point];
locke-lunargd556cc32019-09-17 01:21:23 -06003558 auto &push_descriptor_set = last_bound.push_descriptor_set;
3559 // If we are disturbing the current push_desriptor_set clear it
3560 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003561 last_bound.UnbindAndResetPushDescriptorSet(cb_state, new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003562 }
3563
3564 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3565 nullptr);
3566 last_bound.pipeline_layout = layout;
3567
3568 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003569 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003570}
3571
3572void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3573 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3574 uint32_t set, uint32_t descriptorWriteCount,
3575 const VkWriteDescriptorSet *pDescriptorWrites) {
3576 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3577 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3578}
3579
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003580void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3581 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3582 const void *pValues) {
3583 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3584 if (cb_state != nullptr) {
3585 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3586
3587 auto &push_constant_data = cb_state->push_constant_data;
3588 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3589 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
locke-lunargde3f0fa2020-09-10 11:55:31 -06003590 cb_state->push_constant_pipeline_layout_set = layout;
3591
3592 auto flags = stageFlags;
3593 uint32_t bit_shift = 0;
3594 while (flags) {
3595 if (flags & 1) {
3596 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
3597 const auto it = cb_state->push_constant_data_update.find(flag);
3598
3599 if (it != cb_state->push_constant_data_update.end()) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06003600 std::memset(it->second.data() + offset, PC_Byte_Updated, static_cast<std::size_t>(size));
locke-lunargde3f0fa2020-09-10 11:55:31 -06003601 }
3602 }
3603 flags = flags >> 1;
3604 ++bit_shift;
3605 }
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003606 }
3607}
3608
locke-lunargd556cc32019-09-17 01:21:23 -06003609void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3610 VkIndexType indexType) {
locke-lunargd556cc32019-09-17 01:21:23 -06003611 auto cb_state = GetCBState(commandBuffer);
3612
3613 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
Piers Daniell39842ee2020-07-10 16:42:33 -06003614 cb_state->static_status &= ~CBSTATUS_INDEX_BUFFER_BOUND;
locke-lunarg1ae57d62020-11-18 10:49:19 -07003615 cb_state->index_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(buffer);
3616 cb_state->index_buffer_binding.size = cb_state->index_buffer_binding.buffer_state->createInfo.size;
locke-lunargd556cc32019-09-17 01:21:23 -06003617 cb_state->index_buffer_binding.offset = offset;
3618 cb_state->index_buffer_binding.index_type = indexType;
3619 // Add binding for this index buffer to this commandbuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003620 if (!disabled[command_buffer_state]) {
3621 cb_state->AddChild(cb_state->index_buffer_binding.buffer_state.get());
3622 }
locke-lunargd556cc32019-09-17 01:21:23 -06003623}
3624
3625void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3626 uint32_t bindingCount, const VkBuffer *pBuffers,
3627 const VkDeviceSize *pOffsets) {
3628 auto cb_state = GetCBState(commandBuffer);
3629
3630 uint32_t end = firstBinding + bindingCount;
3631 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3632 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3633 }
3634
3635 for (uint32_t i = 0; i < bindingCount; ++i) {
3636 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07003637 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003638 vertex_buffer_binding.offset = pOffsets[i];
Piers Daniell39842ee2020-07-10 16:42:33 -06003639 vertex_buffer_binding.size = VK_WHOLE_SIZE;
3640 vertex_buffer_binding.stride = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06003641 // Add binding for this vertex buffer to this commandbuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003642 if (pBuffers[i] && !disabled[command_buffer_state]) {
3643 cb_state->AddChild(vertex_buffer_binding.buffer_state.get());
Jeff Bolz165818a2020-05-08 11:19:03 -05003644 }
locke-lunargd556cc32019-09-17 01:21:23 -06003645 }
3646}
3647
3648void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3649 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003650 if (disabled[command_buffer_state]) return;
3651
locke-lunargd556cc32019-09-17 01:21:23 -06003652 auto cb_state = GetCBState(commandBuffer);
3653 auto dst_buffer_state = GetBufferState(dstBuffer);
3654
3655 // Update bindings between buffer and cmd buffer
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003656 if (cb_state && dst_buffer_state) {
3657 cb_state->AddChild(dst_buffer_state);
3658 }
locke-lunargd556cc32019-09-17 01:21:23 -06003659}
3660
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06003661static bool SetEventStageMask(VkEvent event, VkPipelineStageFlags2KHR stageMask,
Jeff Bolz310775c2019-10-09 00:46:33 -05003662 EventToStageMap *localEventToStageMap) {
3663 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003664 return false;
3665}
3666
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003667void ValidationStateTracker::RecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask) {
locke-lunargd556cc32019-09-17 01:21:23 -06003668 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003669 if (!disabled[command_buffer_state]) {
3670 auto event_state = GetEventState(event);
3671 if (event_state) {
3672 cb_state->AddChild(event_state);
3673 }
locke-lunargd556cc32019-09-17 01:21:23 -06003674 }
3675 cb_state->events.push_back(event);
3676 if (!cb_state->waitedEvents.count(event)) {
3677 cb_state->writeEventsBeforeWait.push_back(event);
3678 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003679 cb_state->eventUpdates.emplace_back(
3680 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3681 return SetEventStageMask(event, stageMask, localEventToStageMap);
3682 });
locke-lunargd556cc32019-09-17 01:21:23 -06003683}
3684
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003685void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3686 VkPipelineStageFlags stageMask) {
3687 RecordCmdSetEvent(commandBuffer, event, stageMask);
3688}
3689
3690void ValidationStateTracker::PreCallRecordCmdSetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event,
3691 const VkDependencyInfoKHR *pDependencyInfo) {
3692 auto stage_masks = sync_utils::GetGlobalStageMasks(*pDependencyInfo);
3693
3694 RecordCmdSetEvent(commandBuffer, event, stage_masks.src);
3695}
3696
3697void ValidationStateTracker::RecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3698 VkPipelineStageFlags2KHR stageMask) {
locke-lunargd556cc32019-09-17 01:21:23 -06003699 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003700 if (!disabled[command_buffer_state]) {
3701 auto event_state = GetEventState(event);
3702 if (event_state) {
3703 cb_state->AddChild(event_state);
3704 }
locke-lunargd556cc32019-09-17 01:21:23 -06003705 }
3706 cb_state->events.push_back(event);
3707 if (!cb_state->waitedEvents.count(event)) {
3708 cb_state->writeEventsBeforeWait.push_back(event);
3709 }
3710
3711 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003712 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003713 return SetEventStageMask(event, VkPipelineStageFlags2KHR(0), localEventToStageMap);
Jeff Bolz310775c2019-10-09 00:46:33 -05003714 });
locke-lunargd556cc32019-09-17 01:21:23 -06003715}
3716
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003717void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3718 VkPipelineStageFlags stageMask) {
3719 RecordCmdResetEvent(commandBuffer, event, stageMask);
3720}
3721
3722void ValidationStateTracker::PreCallRecordCmdResetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event,
3723 VkPipelineStageFlags2KHR stageMask) {
3724 RecordCmdResetEvent(commandBuffer, event, stageMask);
3725}
3726
3727void ValidationStateTracker::RecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents) {
locke-lunargd556cc32019-09-17 01:21:23 -06003728 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3729 for (uint32_t i = 0; i < eventCount; ++i) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003730 if (!disabled[command_buffer_state]) {
3731 auto event_state = GetEventState(pEvents[i]);
3732 if (event_state) {
3733 cb_state->AddChild(event_state);
3734 }
locke-lunargd556cc32019-09-17 01:21:23 -06003735 }
3736 cb_state->waitedEvents.insert(pEvents[i]);
3737 cb_state->events.push_back(pEvents[i]);
3738 }
3739}
3740
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003741void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3742 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3743 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3744 uint32_t bufferMemoryBarrierCount,
3745 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3746 uint32_t imageMemoryBarrierCount,
3747 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3748 RecordCmdWaitEvents(commandBuffer, eventCount, pEvents);
3749}
3750
3751void ValidationStateTracker::PreCallRecordCmdWaitEvents2KHR(VkCommandBuffer commandBuffer, uint32_t eventCount,
3752 const VkEvent *pEvents, const VkDependencyInfoKHR *pDependencyInfos) {
3753 RecordCmdWaitEvents(commandBuffer, eventCount, pEvents);
3754}
3755
Jeff Bolz310775c2019-10-09 00:46:33 -05003756bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
3757 (*localQueryToStateMap)[object] = value;
3758 return false;
3759}
3760
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003761bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
3762 QueryState value, QueryMap *localQueryToStateMap) {
Jeff Bolz310775c2019-10-09 00:46:33 -05003763 for (uint32_t i = 0; i < queryCount; i++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003764 QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
Jeff Bolz310775c2019-10-09 00:46:33 -05003765 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06003766 }
3767 return false;
3768}
3769
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003770QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex,
3771 uint32_t perfPass) const {
3772 QueryObject query = QueryObject(QueryObject(queryPool, queryIndex), perfPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003773
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003774 auto iter = localQueryToStateMap->find(query);
3775 if (iter != localQueryToStateMap->end()) return iter->second;
Jeff Bolz310775c2019-10-09 00:46:33 -05003776
Jeff Bolz310775c2019-10-09 00:46:33 -05003777 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06003778}
3779
3780void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003781 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003782 cb_state->activeQueries.insert(query_obj);
3783 cb_state->startedQueries.insert(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003784 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3785 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3786 QueryMap *localQueryToStateMap) {
3787 SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
3788 return false;
3789 });
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003790 if (!disabled[command_buffer_state]) {
3791 auto pool_state = GetQueryPoolState(query_obj.pool);
3792 cb_state->AddChild(pool_state);
3793 }
locke-lunargd556cc32019-09-17 01:21:23 -06003794}
3795
3796void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3797 VkFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003798 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003799 QueryObject query = {queryPool, slot};
3800 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3801 RecordCmdBeginQuery(cb_state, query);
3802}
3803
3804void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003805 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003806 cb_state->activeQueries.erase(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003807 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3808 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3809 QueryMap *localQueryToStateMap) {
3810 return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3811 });
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003812 if (!disabled[command_buffer_state]) {
3813 auto pool_state = GetQueryPoolState(query_obj.pool);
3814 cb_state->AddChild(pool_state);
3815 }
locke-lunargd556cc32019-09-17 01:21:23 -06003816}
3817
3818void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003819 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003820 QueryObject query_obj = {queryPool, slot};
3821 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3822 RecordCmdEndQuery(cb_state, query_obj);
3823}
3824
3825void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3826 uint32_t firstQuery, uint32_t queryCount) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003827 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003828 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3829
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02003830 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
3831 QueryObject query = {queryPool, slot};
3832 cb_state->resetQueries.insert(query);
3833 }
3834
Jeff Bolz310775c2019-10-09 00:46:33 -05003835 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003836 bool do_validate, VkQueryPool &firstPerfQueryPool,
3837 uint32_t perfQueryPass,
3838 QueryMap *localQueryToStateMap) {
3839 return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06003840 });
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003841 if (!disabled[command_buffer_state]) {
3842 auto pool_state = GetQueryPoolState(queryPool);
3843 cb_state->AddChild(pool_state);
3844 }
locke-lunargd556cc32019-09-17 01:21:23 -06003845}
3846
3847void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3848 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3849 VkDeviceSize dstOffset, VkDeviceSize stride,
3850 VkQueryResultFlags flags) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003851 if (disabled[query_validation] || disabled[command_buffer_state]) return;
3852
locke-lunargd556cc32019-09-17 01:21:23 -06003853 auto cb_state = GetCBState(commandBuffer);
3854 auto dst_buff_state = GetBufferState(dstBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003855 cb_state->AddChild(dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003856 auto pool_state = GetQueryPoolState(queryPool);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003857 cb_state->AddChild(pool_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003858}
3859
3860void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3861 VkQueryPool queryPool, uint32_t slot) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003862 PostCallRecordCmdWriteTimestamp2KHR(commandBuffer, pipelineStage, queryPool, slot);
3863}
3864
3865void ValidationStateTracker::PostCallRecordCmdWriteTimestamp2KHR(VkCommandBuffer commandBuffer,
3866 VkPipelineStageFlags2KHR pipelineStage, VkQueryPool queryPool,
3867 uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003868 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003869 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003870 if (!disabled[command_buffer_state]) {
3871 auto pool_state = GetQueryPoolState(queryPool);
3872 cb_state->AddChild(pool_state);
3873 }
locke-lunargd556cc32019-09-17 01:21:23 -06003874 QueryObject query = {queryPool, slot};
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003875 cb_state->queryUpdates.emplace_back([query](const ValidationStateTracker *device_data, bool do_validate,
3876 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3877 QueryMap *localQueryToStateMap) {
3878 return SetQueryState(QueryObject(query, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3879 });
locke-lunargd556cc32019-09-17 01:21:23 -06003880}
3881
Marijn Suijten6750fdc2020-12-30 22:06:42 +01003882void ValidationStateTracker::PostCallRecordCmdWriteAccelerationStructuresPropertiesKHR(
3883 VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR *pAccelerationStructures,
3884 VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) {
3885 if (disabled[query_validation]) return;
3886 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003887 if (!disabled[command_buffer_state]) {
3888 auto pool_state = GetQueryPoolState(queryPool);
3889 cb_state->AddChild(pool_state);
3890 }
Marijn Suijten6750fdc2020-12-30 22:06:42 +01003891 cb_state->queryUpdates.emplace_back(
3892 [queryPool, firstQuery, accelerationStructureCount](const ValidationStateTracker *device_data, bool do_validate,
3893 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3894 QueryMap *localQueryToStateMap) {
3895 return SetQueryStateMulti(queryPool, firstQuery, accelerationStructureCount, perfQueryPass, QUERYSTATE_ENDED,
3896 localQueryToStateMap);
3897 });
3898}
3899
locke-lunargd556cc32019-09-17 01:21:23 -06003900void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3901 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3902 VkResult result) {
3903 if (VK_SUCCESS != result) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003904
Jeremy Gebben88f58142021-06-01 10:07:52 -06003905 std::vector<std::shared_ptr<IMAGE_VIEW_STATE>> views;
Mike Schuchardt2df08912020-12-15 16:28:09 -08003906 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) == 0) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003907 views.resize(pCreateInfo->attachmentCount);
locke-lunarg1ae57d62020-11-18 10:49:19 -07003908
locke-lunargd556cc32019-09-17 01:21:23 -06003909 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003910 views[i] = GetShared<IMAGE_VIEW_STATE>(pCreateInfo->pAttachments[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003911 }
3912 }
Jeremy Gebben88f58142021-06-01 10:07:52 -06003913
3914 frameBufferMap[*pFramebuffer] = std::make_shared<FRAMEBUFFER_STATE>(
3915 *pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass), std::move(views));
locke-lunargd556cc32019-09-17 01:21:23 -06003916}
3917
locke-lunargd556cc32019-09-17 01:21:23 -06003918void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
3919 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3920 VkResult result) {
3921 if (VK_SUCCESS != result) return;
Jeremy Gebben88f58142021-06-01 10:07:52 -06003922 renderPassMap[*pRenderPass] = std::make_shared<RENDER_PASS_STATE>(*pRenderPass, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003923}
3924
Mike Schuchardt2df08912020-12-15 16:28:09 -08003925void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07003926 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3927 VkResult result) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003928 if (VK_SUCCESS != result) return;
3929
3930 renderPassMap[*pRenderPass] = std::make_shared<RENDER_PASS_STATE>(*pRenderPass, pCreateInfo);
Tony-LunarG977448c2019-12-02 14:52:02 -07003931}
3932
Mike Schuchardt2df08912020-12-15 16:28:09 -08003933void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07003934 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3935 VkResult result) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003936 if (VK_SUCCESS != result) return;
3937
3938 renderPassMap[*pRenderPass] = std::make_shared<RENDER_PASS_STATE>(*pRenderPass, pCreateInfo);
Tony-LunarG977448c2019-12-02 14:52:02 -07003939}
3940
locke-lunargd556cc32019-09-17 01:21:23 -06003941void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
3942 const VkRenderPassBeginInfo *pRenderPassBegin,
3943 const VkSubpassContents contents) {
3944 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunargaecf2152020-05-12 17:15:41 -06003945 auto render_pass_state = pRenderPassBegin ? GetShared<RENDER_PASS_STATE>(pRenderPassBegin->renderPass) : nullptr;
3946 auto framebuffer = pRenderPassBegin ? GetShared<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer) : nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06003947
3948 if (render_pass_state) {
locke-lunargaecf2152020-05-12 17:15:41 -06003949 cb_state->activeFramebuffer = framebuffer;
locke-lunargd556cc32019-09-17 01:21:23 -06003950 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07003951 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06003952 cb_state->activeSubpass = 0;
3953 cb_state->activeSubpassContents = contents;
locke-lunargfc78e932020-11-19 17:06:24 -07003954
locke-lunargd556cc32019-09-17 01:21:23 -06003955 // Connect this RP to cmdBuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003956 if (!disabled[command_buffer_state]) {
3957 cb_state->AddChild(render_pass_state.get());
3958 }
locke-lunargd556cc32019-09-17 01:21:23 -06003959
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07003960 auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06003961 if (chained_device_group_struct) {
3962 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
3963 } else {
3964 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
3965 }
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003966
locke-lunargfc78e932020-11-19 17:06:24 -07003967 cb_state->active_subpasses = nullptr;
3968 cb_state->active_attachments = nullptr;
3969
3970 if (framebuffer) {
3971 cb_state->framebuffers.insert(framebuffer);
3972
3973 // Set cb_state->active_subpasses
3974 cb_state->active_subpasses =
3975 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
3976 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
3977 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
3978
3979 // Set cb_state->active_attachments & cb_state->attachments_view_states
3980 cb_state->active_attachments =
3981 std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(framebuffer->createInfo.attachmentCount);
3982 UpdateAttachmentsView(*this, *cb_state, *cb_state->activeFramebuffer, pRenderPassBegin);
3983
3984 // Connect this framebuffer and its children to this cmdBuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003985 cb_state->AddChild(framebuffer.get());
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003986 }
locke-lunargd556cc32019-09-17 01:21:23 -06003987 }
3988}
3989
3990void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
3991 const VkRenderPassBeginInfo *pRenderPassBegin,
3992 VkSubpassContents contents) {
3993 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
3994}
3995
3996void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
3997 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08003998 const VkSubpassBeginInfo *pSubpassBeginInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06003999 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4000}
4001
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06004002void ValidationStateTracker::PostCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4003 uint32_t counterBufferCount,
4004 const VkBuffer *pCounterBuffers,
4005 const VkDeviceSize *pCounterBufferOffsets) {
4006 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4007
4008 cb_state->transform_feedback_active = true;
4009}
4010
4011void ValidationStateTracker::PostCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4012 uint32_t counterBufferCount, const VkBuffer *pCounterBuffers,
4013 const VkDeviceSize *pCounterBufferOffsets) {
4014 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4015
4016 cb_state->transform_feedback_active = false;
4017}
4018
Tony-LunarG977448c2019-12-02 14:52:02 -07004019void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
4020 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004021 const VkSubpassBeginInfo *pSubpassBeginInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004022 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4023}
4024
locke-lunargd556cc32019-09-17 01:21:23 -06004025void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4026 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4027 cb_state->activeSubpass++;
4028 cb_state->activeSubpassContents = contents;
locke-lunargfc78e932020-11-19 17:06:24 -07004029
4030 // Update cb_state->active_subpasses
4031 if (cb_state->activeRenderPass && cb_state->activeFramebuffer) {
4032 cb_state->active_subpasses = nullptr;
4033 cb_state->active_subpasses =
4034 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
4035
4036 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
4037 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
4038 }
locke-lunargd556cc32019-09-17 01:21:23 -06004039}
4040
4041void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4042 RecordCmdNextSubpass(commandBuffer, contents);
4043}
4044
4045void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004046 const VkSubpassBeginInfo *pSubpassBeginInfo,
4047 const VkSubpassEndInfo *pSubpassEndInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06004048 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4049}
4050
Tony-LunarG977448c2019-12-02 14:52:02 -07004051void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004052 const VkSubpassBeginInfo *pSubpassBeginInfo,
4053 const VkSubpassEndInfo *pSubpassEndInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004054 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4055}
4056
locke-lunargd556cc32019-09-17 01:21:23 -06004057void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
4058 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4059 cb_state->activeRenderPass = nullptr;
locke-lunargfc78e932020-11-19 17:06:24 -07004060 cb_state->active_attachments = nullptr;
4061 cb_state->active_subpasses = nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004062 cb_state->activeSubpass = 0;
4063 cb_state->activeFramebuffer = VK_NULL_HANDLE;
4064}
4065
4066void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
4067 RecordCmdEndRenderPassState(commandBuffer);
4068}
4069
4070void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004071 const VkSubpassEndInfo *pSubpassEndInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06004072 RecordCmdEndRenderPassState(commandBuffer);
4073}
4074
Tony-LunarG977448c2019-12-02 14:52:02 -07004075void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004076 const VkSubpassEndInfo *pSubpassEndInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004077 RecordCmdEndRenderPassState(commandBuffer);
4078}
locke-lunargd556cc32019-09-17 01:21:23 -06004079void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
4080 const VkCommandBuffer *pCommandBuffers) {
4081 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4082
4083 CMD_BUFFER_STATE *sub_cb_state = NULL;
4084 for (uint32_t i = 0; i < commandBuffersCount; i++) {
4085 sub_cb_state = GetCBState(pCommandBuffers[i]);
4086 assert(sub_cb_state);
4087 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
4088 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
4089 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
4090 // from the validation step to the recording step
4091 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
4092 }
4093 }
4094
4095 // Propagate inital layout and current layout state to the primary cmd buffer
4096 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
4097 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
4098 // for those other classes.
4099 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
4100 const auto image = sub_layout_map_entry.first;
4101 const auto *image_state = GetImageState(image);
4102 if (!image_state) continue; // Can't set layouts of a dead image
4103
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06004104 auto *cb_subres_map = cb_state->GetImageSubresourceLayoutMap(*image_state);
John Zulauf17708d02021-02-22 11:20:58 -07004105 const auto *sub_cb_subres_map = &sub_layout_map_entry.second;
locke-lunargd556cc32019-09-17 01:21:23 -06004106 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
4107 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
4108 }
4109
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06004110 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer();
locke-lunargd556cc32019-09-17 01:21:23 -06004111 cb_state->linkedCommandBuffers.insert(sub_cb_state);
Jeremy Gebben5570abe2021-05-16 18:35:13 -06004112 cb_state->AddChild(sub_cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004113 for (auto &function : sub_cb_state->queryUpdates) {
4114 cb_state->queryUpdates.push_back(function);
4115 }
4116 for (auto &function : sub_cb_state->queue_submit_functions) {
4117 cb_state->queue_submit_functions.push_back(function);
4118 }
David Zhao Akeley44139b12021-04-26 16:16:13 -07004119
4120 // State is trashed after executing secondary command buffers.
4121 // Importantly, this function runs after CoreChecks::PreCallValidateCmdExecuteCommands.
4122 cb_state->trashedViewportMask = ~uint32_t(0);
4123 cb_state->trashedScissorMask = ~uint32_t(0);
4124 cb_state->trashedViewportCount = true;
4125 cb_state->trashedScissorCount = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004126 }
4127}
4128
4129void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
4130 VkFlags flags, void **ppData, VkResult result) {
4131 if (VK_SUCCESS != result) return;
4132 RecordMappedMemory(mem, offset, size, ppData);
4133}
4134
4135void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
4136 auto mem_info = GetDevMemState(mem);
4137 if (mem_info) {
4138 mem_info->mapped_range = MemRange();
4139 mem_info->p_driver_data = nullptr;
4140 }
4141}
4142
4143void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
4144 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
4145 if (image_state) {
locke-lunargae26eac2020-04-16 15:29:05 -06004146 // An Android sepcial image cannot get VkSubresourceLayout until the image binds a memory.
4147 // See: VUID-vkGetImageSubresourceLayout-image-01895
4148 image_state->fragment_encoder =
4149 std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(new subresource_adapter::ImageRangeEncoder(*image_state));
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07004150 const auto swapchain_info = LvlFindInChain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06004151 if (swapchain_info) {
John Zulauf29d00532021-03-04 13:28:54 -07004152 auto *swapchain = GetSwapchainState(swapchain_info->swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004153 if (swapchain) {
John Zulaufd13b38e2021-03-05 08:17:38 -07004154 SWAPCHAIN_IMAGE &swap_image = swapchain->images[swapchain_info->imageIndex];
John Zulauf29d00532021-03-04 13:28:54 -07004155 if (swap_image.bound_images.empty()) {
4156 // If this is the first "binding" of an image to this swapchain index, get a fake allocation
4157 image_state->swapchain_fake_address = fake_memory.Alloc(image_state->fragment_encoder->TotalSize());
4158 } else {
4159 image_state->swapchain_fake_address = (*swap_image.bound_images.cbegin())->swapchain_fake_address;
4160 }
John Zulaufd13b38e2021-03-05 08:17:38 -07004161 swap_image.bound_images.emplace(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004162 image_state->bind_swapchain = swapchain_info->swapchain;
4163 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
John Zulaufd13b38e2021-03-05 08:17:38 -07004164
John Zulauf29d00532021-03-04 13:28:54 -07004165 // All images bound to this swapchain and index are aliases
Jeremy Gebben5570abe2021-05-16 18:35:13 -06004166 image_state->AddAliasingImage(swap_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06004167 }
4168 } else {
4169 // Track bound memory range information
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004170 auto mem_info = GetDevMemShared(bindInfo.memory);
locke-lunargd556cc32019-09-17 01:21:23 -06004171 if (mem_info) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -06004172 mem_info->bound_images.insert(image_state);
John Zulaufd13b38e2021-03-05 08:17:38 -07004173 if (image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -06004174 image_state->AddAliasingImage(mem_info->bound_images);
John Zulaufd13b38e2021-03-05 08:17:38 -07004175 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004176 // Track objects tied to memory
4177 image_state->SetMemBinding(mem_info, bindInfo.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06004178 }
locke-lunargd556cc32019-09-17 01:21:23 -06004179 }
locke-lunargd556cc32019-09-17 01:21:23 -06004180 }
4181}
4182
4183void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
4184 VkDeviceSize memoryOffset, VkResult result) {
4185 if (VK_SUCCESS != result) return;
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06004186 auto bind_info = LvlInitStruct<VkBindImageMemoryInfo>();
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004187 bind_info.image = image;
4188 bind_info.memory = mem;
4189 bind_info.memoryOffset = memoryOffset;
4190 UpdateBindImageMemoryState(bind_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004191}
4192
4193void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004194 const VkBindImageMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004195 if (VK_SUCCESS != result) return;
4196 for (uint32_t i = 0; i < bindInfoCount; i++) {
4197 UpdateBindImageMemoryState(pBindInfos[i]);
4198 }
4199}
4200
4201void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004202 const VkBindImageMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004203 if (VK_SUCCESS != result) return;
4204 for (uint32_t i = 0; i < bindInfoCount; i++) {
4205 UpdateBindImageMemoryState(pBindInfos[i]);
4206 }
4207}
4208
4209void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
4210 auto event_state = GetEventState(event);
4211 if (event_state) {
4212 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
4213 }
locke-lunargd556cc32019-09-17 01:21:23 -06004214}
4215
4216void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
4217 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
4218 VkResult result) {
4219 if (VK_SUCCESS != result) return;
4220 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
4221 pImportSemaphoreFdInfo->flags);
4222}
4223
4224void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004225 VkExternalSemaphoreHandleTypeFlagBits handle_type) {
locke-lunargd556cc32019-09-17 01:21:23 -06004226 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
Mike Schuchardt2df08912020-12-15 16:28:09 -08004227 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT) {
locke-lunargd556cc32019-09-17 01:21:23 -06004228 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
4229 semaphore_state->scope = kSyncScopeExternalPermanent;
4230 }
4231}
4232
4233#ifdef VK_USE_PLATFORM_WIN32_KHR
4234void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
4235 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
4236 if (VK_SUCCESS != result) return;
4237 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
4238 pImportSemaphoreWin32HandleInfo->flags);
4239}
4240
4241void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
4242 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4243 HANDLE *pHandle, VkResult result) {
4244 if (VK_SUCCESS != result) return;
4245 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
4246}
4247
4248void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
4249 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
4250 if (VK_SUCCESS != result) return;
4251 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
4252 pImportFenceWin32HandleInfo->flags);
4253}
4254
4255void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
4256 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4257 HANDLE *pHandle, VkResult result) {
4258 if (VK_SUCCESS != result) return;
4259 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
4260}
4261#endif
4262
4263void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
4264 VkResult result) {
4265 if (VK_SUCCESS != result) return;
4266 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
4267}
4268
Mike Schuchardt2df08912020-12-15 16:28:09 -08004269void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBits handle_type,
4270 VkFenceImportFlags flags) {
locke-lunargd556cc32019-09-17 01:21:23 -06004271 FENCE_STATE *fence_node = GetFenceState(fence);
4272 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08004273 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT || flags & VK_FENCE_IMPORT_TEMPORARY_BIT) &&
locke-lunargd556cc32019-09-17 01:21:23 -06004274 fence_node->scope == kSyncScopeInternal) {
4275 fence_node->scope = kSyncScopeExternalTemporary;
4276 } else {
4277 fence_node->scope = kSyncScopeExternalPermanent;
4278 }
4279 }
4280}
4281
4282void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
4283 VkResult result) {
4284 if (VK_SUCCESS != result) return;
4285 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
4286}
4287
Mike Schuchardt2df08912020-12-15 16:28:09 -08004288void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBits handle_type) {
locke-lunargd556cc32019-09-17 01:21:23 -06004289 FENCE_STATE *fence_state = GetFenceState(fence);
4290 if (fence_state) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08004291 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT) {
locke-lunargd556cc32019-09-17 01:21:23 -06004292 // Export with reference transference becomes external
4293 fence_state->scope = kSyncScopeExternalPermanent;
4294 } else if (fence_state->scope == kSyncScopeInternal) {
4295 // Export with copy transference has a side effect of resetting the fence
4296 fence_state->state = FENCE_UNSIGNALED;
4297 }
4298 }
4299}
4300
4301void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
4302 VkResult result) {
4303 if (VK_SUCCESS != result) return;
4304 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
4305}
4306
4307void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
4308 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
4309 if (VK_SUCCESS != result) return;
John Zulaufd5115702021-01-18 12:34:33 -07004310 const auto event = *pEvent;
Jeremy Gebbencbf22862021-03-03 12:01:22 -07004311 eventMap.emplace(event, std::make_shared<EVENT_STATE>(event, pCreateInfo->flags));
locke-lunargd556cc32019-09-17 01:21:23 -06004312}
4313
4314void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
4315 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
4316 SWAPCHAIN_NODE *old_swapchain_state) {
4317 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004318 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004319 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
4320 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
4321 swapchain_state->shared_presentable = true;
4322 }
4323 surface_state->swapchain = swapchain_state.get();
4324 swapchainMap[*pSwapchain] = std::move(swapchain_state);
4325 } else {
4326 surface_state->swapchain = nullptr;
4327 }
4328 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
4329 if (old_swapchain_state) {
4330 old_swapchain_state->retired = true;
4331 }
4332 return;
4333}
4334
4335void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
4336 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
4337 VkResult result) {
4338 auto surface_state = GetSurfaceState(pCreateInfo->surface);
4339 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
4340 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
4341}
4342
4343void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
4344 const VkAllocationCallbacks *pAllocator) {
4345 if (!swapchain) return;
4346 auto swapchain_data = GetSwapchainState(swapchain);
4347 if (swapchain_data) {
John Zulauffaa7a522021-03-05 12:22:45 -07004348 for (auto &swapchain_image : swapchain_data->images) {
4349 // TODO: missing validation that the bound images are empty (except for image_state above)
4350 // Clean up the aliases and the bound_images *before* erasing the image_state.
Jeremy Gebben5570abe2021-05-16 18:35:13 -06004351 RemoveAliasingImages(swapchain_image.bound_images);
John Zulauffaa7a522021-03-05 12:22:45 -07004352 swapchain_image.bound_images.clear();
4353
4354 if (swapchain_image.image_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004355 swapchain_image.image_state->Destroy();
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06004356 imageMap.erase(swapchain_image.image_state->image());
John Zulauffaa7a522021-03-05 12:22:45 -07004357 swapchain_image.image_state = nullptr;
John Zulauf2d60a452021-03-04 15:12:03 -07004358 }
locke-lunargd556cc32019-09-17 01:21:23 -06004359 }
4360
4361 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
4362 if (surface_state) {
4363 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
4364 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004365 swapchain_data->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06004366 swapchainMap.erase(swapchain);
4367 }
4368}
4369
sfricke-samsung5c1b7392020-12-13 22:17:15 -08004370void ValidationStateTracker::PostCallRecordCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
4371 const VkDisplayModeCreateInfoKHR *pCreateInfo,
4372 const VkAllocationCallbacks *pAllocator, VkDisplayModeKHR *pMode,
4373 VkResult result) {
4374 if (VK_SUCCESS != result) return;
4375 if (!pMode) return;
Jeremy Gebben5573a8c2021-06-04 08:55:10 -06004376 display_mode_map[*pMode] = std::make_shared<DISPLAY_MODE_STATE>(*pMode, physicalDevice);
sfricke-samsung5c1b7392020-12-13 22:17:15 -08004377}
4378
locke-lunargd556cc32019-09-17 01:21:23 -06004379void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
4380 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
4381 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004382 auto semaphore_state = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
4383 if (semaphore_state) {
4384 semaphore_state->signaler.first = VK_NULL_HANDLE;
4385 semaphore_state->signaled = false;
locke-lunargd556cc32019-09-17 01:21:23 -06004386 }
4387 }
4388
4389 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
4390 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
4391 // confused itself just as much.
4392 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
4393 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
4394 // Mark the image as having been released to the WSI
4395 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
4396 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
John Zulauffaa7a522021-03-05 12:22:45 -07004397 IMAGE_STATE *image_state = swapchain_data->images[pPresentInfo->pImageIndices[i]].image_state;
locke-lunargd556cc32019-09-17 01:21:23 -06004398 if (image_state) {
4399 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004400 if (image_state->shared_presentable) {
4401 image_state->layout_locked = true;
4402 }
locke-lunargd556cc32019-09-17 01:21:23 -06004403 }
4404 }
4405 }
4406 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
4407 // its semaphore waits) /never/ participate in any completion proof.
4408}
4409
4410void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
4411 const VkSwapchainCreateInfoKHR *pCreateInfos,
4412 const VkAllocationCallbacks *pAllocator,
4413 VkSwapchainKHR *pSwapchains, VkResult result) {
4414 if (pCreateInfos) {
4415 for (uint32_t i = 0; i < swapchainCount; i++) {
4416 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
4417 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
4418 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
4419 }
4420 }
4421}
4422
4423void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4424 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004425 auto fence_state = GetFenceState(fence);
4426 if (fence_state && fence_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06004427 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
4428 // import
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004429 fence_state->state = FENCE_INFLIGHT;
4430 fence_state->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
locke-lunargd556cc32019-09-17 01:21:23 -06004431 }
4432
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004433 auto semaphore_state = GetSemaphoreState(semaphore);
4434 if (semaphore_state && semaphore_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06004435 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
4436 // temporary import
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004437 semaphore_state->signaled = true;
4438 semaphore_state->signaler.first = VK_NULL_HANDLE;
locke-lunargd556cc32019-09-17 01:21:23 -06004439 }
4440
4441 // Mark the image as acquired.
4442 auto swapchain_data = GetSwapchainState(swapchain);
4443 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
John Zulauffaa7a522021-03-05 12:22:45 -07004444 IMAGE_STATE *image_state = swapchain_data->images[*pImageIndex].image_state;
locke-lunargd556cc32019-09-17 01:21:23 -06004445 if (image_state) {
4446 image_state->acquired = true;
4447 image_state->shared_presentable = swapchain_data->shared_presentable;
4448 }
4449 }
4450}
4451
4452void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4453 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
4454 VkResult result) {
4455 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4456 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
4457}
4458
4459void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
4460 uint32_t *pImageIndex, VkResult result) {
4461 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4462 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
4463 pAcquireInfo->fence, pImageIndex);
4464}
4465
4466void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
4467 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
4468 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
4469 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
4470 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
4471 phys_device_state.phys_device = pPhysicalDevices[i];
4472 // Init actual features for each physical device
4473 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
4474 }
4475 }
4476}
4477
4478// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
4479static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004480 VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004481 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
4482
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004483 if (pQueueFamilyProperties) { // Save queue family properties
locke-lunargd556cc32019-09-17 01:21:23 -06004484 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
4485 for (uint32_t i = 0; i < count; ++i) {
4486 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
4487 }
4488 }
4489}
4490
4491void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
4492 uint32_t *pQueueFamilyPropertyCount,
4493 VkQueueFamilyProperties *pQueueFamilyProperties) {
4494 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4495 assert(physical_device_state);
Mike Schuchardt2df08912020-12-15 16:28:09 -08004496 VkQueueFamilyProperties2 *pqfp = nullptr;
4497 std::vector<VkQueueFamilyProperties2> qfp;
locke-lunargd556cc32019-09-17 01:21:23 -06004498 qfp.resize(*pQueueFamilyPropertyCount);
4499 if (pQueueFamilyProperties) {
4500 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06004501 qfp[i] = LvlInitStruct<VkQueueFamilyProperties2>();
locke-lunargd556cc32019-09-17 01:21:23 -06004502 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
4503 }
4504 pqfp = qfp.data();
4505 }
4506 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
4507}
4508
4509void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004510 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004511 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4512 assert(physical_device_state);
4513 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4514 pQueueFamilyProperties);
4515}
4516
4517void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004518 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004519 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4520 assert(physical_device_state);
4521 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4522 pQueueFamilyProperties);
4523}
4524void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
4525 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004526 if (!surface) return;
4527 auto surface_state = GetSurfaceState(surface);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004528 surface_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06004529 surface_map.erase(surface);
4530}
4531
4532void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004533 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06004534}
4535
4536void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
4537 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
4538 const VkAllocationCallbacks *pAllocator,
4539 VkSurfaceKHR *pSurface, VkResult result) {
4540 if (VK_SUCCESS != result) return;
4541 RecordVulkanSurface(pSurface);
4542}
4543
4544#ifdef VK_USE_PLATFORM_ANDROID_KHR
4545void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
4546 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
4547 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4548 VkResult result) {
4549 if (VK_SUCCESS != result) return;
4550 RecordVulkanSurface(pSurface);
4551}
4552#endif // VK_USE_PLATFORM_ANDROID_KHR
4553
4554#ifdef VK_USE_PLATFORM_IOS_MVK
4555void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
4556 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4557 VkResult result) {
4558 if (VK_SUCCESS != result) return;
4559 RecordVulkanSurface(pSurface);
4560}
4561#endif // VK_USE_PLATFORM_IOS_MVK
4562
4563#ifdef VK_USE_PLATFORM_MACOS_MVK
4564void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
4565 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
4566 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4567 VkResult result) {
4568 if (VK_SUCCESS != result) return;
4569 RecordVulkanSurface(pSurface);
4570}
4571#endif // VK_USE_PLATFORM_MACOS_MVK
4572
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07004573#ifdef VK_USE_PLATFORM_METAL_EXT
4574void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
4575 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
4576 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4577 VkResult result) {
4578 if (VK_SUCCESS != result) return;
4579 RecordVulkanSurface(pSurface);
4580}
4581#endif // VK_USE_PLATFORM_METAL_EXT
4582
locke-lunargd556cc32019-09-17 01:21:23 -06004583#ifdef VK_USE_PLATFORM_WAYLAND_KHR
4584void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
4585 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
4586 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4587 VkResult result) {
4588 if (VK_SUCCESS != result) return;
4589 RecordVulkanSurface(pSurface);
4590}
4591#endif // VK_USE_PLATFORM_WAYLAND_KHR
4592
4593#ifdef VK_USE_PLATFORM_WIN32_KHR
4594void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
4595 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
4596 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4597 VkResult result) {
4598 if (VK_SUCCESS != result) return;
4599 RecordVulkanSurface(pSurface);
4600}
4601#endif // VK_USE_PLATFORM_WIN32_KHR
4602
4603#ifdef VK_USE_PLATFORM_XCB_KHR
4604void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
4605 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4606 VkResult result) {
4607 if (VK_SUCCESS != result) return;
4608 RecordVulkanSurface(pSurface);
4609}
4610#endif // VK_USE_PLATFORM_XCB_KHR
4611
4612#ifdef VK_USE_PLATFORM_XLIB_KHR
4613void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
4614 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4615 VkResult result) {
4616 if (VK_SUCCESS != result) return;
4617 RecordVulkanSurface(pSurface);
4618}
4619#endif // VK_USE_PLATFORM_XLIB_KHR
4620
Niklas Haas8b84af12020-04-19 22:20:11 +02004621void ValidationStateTracker::PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance,
4622 const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
4623 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4624 VkResult result) {
4625 if (VK_SUCCESS != result) return;
4626 RecordVulkanSurface(pSurface);
4627}
4628
Cort23cf2282019-09-20 18:58:18 +02004629void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004630 VkPhysicalDeviceFeatures *pFeatures) {
4631 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Yilong Li358152a2020-07-08 02:16:45 -07004632 // Reset the features2 safe struct before setting up the features field.
4633 physical_device_state->features2 = safe_VkPhysicalDeviceFeatures2();
Cortffba2642019-09-20 22:09:41 +02004634 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02004635}
4636
4637void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004638 VkPhysicalDeviceFeatures2 *pFeatures) {
4639 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02004640 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004641}
4642
4643void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004644 VkPhysicalDeviceFeatures2 *pFeatures) {
4645 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02004646 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004647}
4648
locke-lunargd556cc32019-09-17 01:21:23 -06004649void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
4650 VkSurfaceKHR surface,
4651 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
4652 VkResult result) {
4653 if (VK_SUCCESS != result) return;
4654 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004655 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004656
4657 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
4658 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004659}
4660
4661void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
4662 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4663 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
4664 if (VK_SUCCESS != result) return;
4665 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004666 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004667
4668 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
4669 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004670}
4671
4672void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
4673 VkSurfaceKHR surface,
4674 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
4675 VkResult result) {
4676 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004677 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
4678 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
4679 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
4680 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
4681 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
4682 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
4683 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
4684 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
4685 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
4686 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004687
4688 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
4689 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004690}
4691
4692void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
4693 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
4694 VkBool32 *pSupported, VkResult result) {
4695 if (VK_SUCCESS != result) return;
4696 auto surface_state = GetSurfaceState(surface);
4697 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
4698}
4699
4700void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
4701 VkSurfaceKHR surface,
4702 uint32_t *pPresentModeCount,
4703 VkPresentModeKHR *pPresentModes,
4704 VkResult result) {
4705 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4706
4707 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
4708 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004709
4710 if (*pPresentModeCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004711 if (*pPresentModeCount > physical_device_state->present_modes.size()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004712 physical_device_state->present_modes.resize(*pPresentModeCount);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004713 }
locke-lunargd556cc32019-09-17 01:21:23 -06004714 }
4715 if (pPresentModes) {
locke-lunargd556cc32019-09-17 01:21:23 -06004716 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
4717 physical_device_state->present_modes[i] = pPresentModes[i];
4718 }
4719 }
4720}
4721
4722void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
4723 uint32_t *pSurfaceFormatCount,
4724 VkSurfaceFormatKHR *pSurfaceFormats,
4725 VkResult result) {
4726 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4727
4728 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004729
4730 if (*pSurfaceFormatCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004731 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004732 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004733 }
locke-lunargd556cc32019-09-17 01:21:23 -06004734 }
4735 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06004736 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4737 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
4738 }
4739 }
4740}
4741
4742void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
4743 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4744 uint32_t *pSurfaceFormatCount,
4745 VkSurfaceFormat2KHR *pSurfaceFormats,
4746 VkResult result) {
4747 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4748
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004749 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004750 if (*pSurfaceFormatCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004751 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size()) {
4752 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
4753 }
locke-lunargd556cc32019-09-17 01:21:23 -06004754 }
4755 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06004756 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004757 physical_device_state->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
locke-lunargd556cc32019-09-17 01:21:23 -06004758 }
4759 }
4760}
4761
4762void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4763 const VkDebugUtilsLabelEXT *pLabelInfo) {
4764 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4765}
4766
4767void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
4768 EndCmdDebugUtilsLabel(report_data, commandBuffer);
4769}
4770
4771void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4772 const VkDebugUtilsLabelEXT *pLabelInfo) {
4773 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4774
4775 // Squirrel away an easily accessible copy.
4776 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4777 cb_state->debug_label = LoggingLabel(pLabelInfo);
4778}
4779
4780void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004781 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004782 if (NULL != pPhysicalDeviceGroupProperties) {
4783 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
4784 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
4785 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
4786 auto &phys_device_state = physical_device_map[cur_phys_dev];
4787 phys_device_state.phys_device = cur_phys_dev;
4788 // Init actual features for each physical device
4789 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
4790 }
4791 }
4792 }
4793}
4794
4795void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004796 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties,
locke-lunargd556cc32019-09-17 01:21:23 -06004797 VkResult result) {
4798 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4799 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4800}
4801
4802void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004803 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties,
locke-lunargd556cc32019-09-17 01:21:23 -06004804 VkResult result) {
4805 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4806 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4807}
4808
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004809void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
4810 uint32_t queueFamilyIndex,
4811 uint32_t *pCounterCount,
4812 VkPerformanceCounterKHR *pCounters) {
4813 if (NULL == pCounters) return;
4814
4815 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4816 assert(physical_device_state);
4817
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004818 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queue_family_counters(new QUEUE_FAMILY_PERF_COUNTERS());
4819 queue_family_counters->counters.resize(*pCounterCount);
4820 for (uint32_t i = 0; i < *pCounterCount; i++) queue_family_counters->counters[i] = pCounters[i];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004821
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004822 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queue_family_counters);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004823}
4824
4825void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
4826 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
4827 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
4828 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4829 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
4830}
4831
4832void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
4833 VkResult result) {
4834 if (result == VK_SUCCESS) performance_lock_acquired = true;
4835}
4836
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004837void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
4838 performance_lock_acquired = false;
4839 for (auto &cmd_buffer : commandBufferMap) {
4840 cmd_buffer.second->performance_lock_released = true;
4841 }
4842}
4843
locke-lunargd556cc32019-09-17 01:21:23 -06004844void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004845 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004846 const VkAllocationCallbacks *pAllocator) {
4847 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004848 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4849 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004850 desc_template_map.erase(descriptorUpdateTemplate);
4851}
4852
4853void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004854 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004855 const VkAllocationCallbacks *pAllocator) {
4856 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004857 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4858 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004859 desc_template_map.erase(descriptorUpdateTemplate);
4860}
4861
Mike Schuchardt2df08912020-12-15 16:28:09 -08004862void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
4863 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate) {
locke-lunargd556cc32019-09-17 01:21:23 -06004864 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004865 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004866 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
4867}
4868
Mike Schuchardt2df08912020-12-15 16:28:09 -08004869void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(VkDevice device,
4870 const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
4871 const VkAllocationCallbacks *pAllocator,
4872 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate,
4873 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004874 if (VK_SUCCESS != result) return;
4875 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4876}
4877
4878void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004879 VkDevice device, const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4880 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004881 if (VK_SUCCESS != result) return;
4882 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4883}
4884
4885void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004886 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004887 const void *pData) {
4888 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
4889 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
4890 assert(0);
4891 } else {
4892 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
4893 // TODO: Record template push descriptor updates
4894 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
4895 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
4896 }
4897 }
4898}
4899
4900void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
4901 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4902 const void *pData) {
4903 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4904}
4905
4906void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004907 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004908 const void *pData) {
4909 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4910}
4911
Mike Schuchardt2df08912020-12-15 16:28:09 -08004912void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
4913 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4914 VkPipelineLayout layout, uint32_t set,
4915 const void *pData) {
locke-lunargd556cc32019-09-17 01:21:23 -06004916 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4917
4918 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4919 if (template_state) {
4920 auto layout_data = GetPipelineLayout(layout);
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -06004921 auto dsl = layout_data ? layout_data->GetDsl(set) : nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004922 const auto &template_ci = template_state->create_info;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004923 if (dsl && !dsl->Destroyed()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004924 // Decode the template into a set of write updates
4925 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
4926 dsl->GetDescriptorSetLayout());
4927 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
4928 static_cast<uint32_t>(decoded_template.desc_writes.size()),
4929 decoded_template.desc_writes.data());
4930 }
4931 }
4932}
4933
4934void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
4935 uint32_t *pPropertyCount, void *pProperties) {
4936 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4937 if (*pPropertyCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06004938 physical_device_state->display_plane_property_count = *pPropertyCount;
4939 }
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004940 if (*pPropertyCount || pProperties) {
4941 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004942 }
4943}
4944
4945void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
4946 uint32_t *pPropertyCount,
4947 VkDisplayPlanePropertiesKHR *pProperties,
4948 VkResult result) {
4949 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4950 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4951}
4952
4953void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
4954 uint32_t *pPropertyCount,
4955 VkDisplayPlaneProperties2KHR *pProperties,
4956 VkResult result) {
4957 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4958 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4959}
4960
4961void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4962 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
4963 QueryObject query_obj = {queryPool, query, index};
4964 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4965 RecordCmdBeginQuery(cb_state, query_obj);
4966}
4967
4968void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4969 uint32_t query, uint32_t index) {
4970 QueryObject query_obj = {queryPool, query, index};
4971 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4972 RecordCmdEndQuery(cb_state, query_obj);
4973}
4974
4975void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
4976 VkSamplerYcbcrConversion ycbcr_conversion) {
Jeremy Gebben5d970742021-05-31 16:04:14 -06004977 auto ycbcr_state = std::make_shared<SAMPLER_YCBCR_CONVERSION_STATE>(ycbcr_conversion, create_info,
4978 GetPotentialFormatFeatures(create_info->format));
4979 // If format is VK_FORMAT_UNDEFINED, format_features will be set by external AHB features
locke-lunargd556cc32019-09-17 01:21:23 -06004980 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004981 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion, ycbcr_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06004982 }
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004983 samplerYcbcrConversionMap[ycbcr_conversion] = std::move(ycbcr_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004984}
4985
4986void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
4987 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4988 const VkAllocationCallbacks *pAllocator,
4989 VkSamplerYcbcrConversion *pYcbcrConversion,
4990 VkResult result) {
4991 if (VK_SUCCESS != result) return;
4992 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4993}
4994
4995void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
4996 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4997 const VkAllocationCallbacks *pAllocator,
4998 VkSamplerYcbcrConversion *pYcbcrConversion,
4999 VkResult result) {
5000 if (VK_SUCCESS != result) return;
5001 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5002}
5003
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005004void ValidationStateTracker::RecordDestroySamplerYcbcrConversionState(VkSamplerYcbcrConversion ycbcr_conversion) {
5005 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
5006 RecordDestroySamplerYcbcrConversionANDROID(ycbcr_conversion);
5007 }
5008
5009 auto ycbcr_state = GetSamplerYcbcrConversionState(ycbcr_conversion);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005010 ycbcr_state->Destroy();
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005011 samplerYcbcrConversionMap.erase(ycbcr_conversion);
5012}
5013
locke-lunargd556cc32019-09-17 01:21:23 -06005014void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
5015 const VkAllocationCallbacks *pAllocator) {
5016 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005017 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005018}
5019
5020void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
5021 VkSamplerYcbcrConversion ycbcrConversion,
5022 const VkAllocationCallbacks *pAllocator) {
5023 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005024 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005025}
5026
Tony-LunarG977448c2019-12-02 14:52:02 -07005027void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5028 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005029 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07005030 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06005031
5032 // Do nothing if the query pool has been destroyed.
5033 auto query_pool_state = GetQueryPoolState(queryPool);
5034 if (!query_pool_state) return;
5035
5036 // Reset the state of existing entries.
5037 QueryObject query_obj{queryPool, 0};
5038 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
5039 for (uint32_t i = 0; i < max_query_count; ++i) {
5040 query_obj.query = firstQuery + i;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005041 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005042 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005043 for (uint32_t pass_index = 0; pass_index < query_pool_state->n_performance_passes; pass_index++) {
5044 query_obj.perf_pass = pass_index;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005045 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005046 }
5047 }
locke-lunargd556cc32019-09-17 01:21:23 -06005048 }
5049}
5050
Tony-LunarG977448c2019-12-02 14:52:02 -07005051void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5052 uint32_t queryCount) {
5053 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5054}
5055
5056void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5057 uint32_t queryCount) {
5058 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5059}
5060
locke-lunargd556cc32019-09-17 01:21:23 -06005061void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
5062 const TEMPLATE_STATE *template_state, const void *pData) {
5063 // Translate the templated update into a normal update for validation...
5064 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
5065 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
5066 decoded_update.desc_writes.data(), 0, NULL);
5067}
5068
5069// Update the common AllocateDescriptorSetsData
5070void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005071 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005072 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05005073 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06005074 if (layout) {
5075 ds_data->layout_nodes[i] = layout;
5076 // Count total descriptors required per type
5077 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
5078 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005079 uint32_t type_index = static_cast<uint32_t>(binding_layout->descriptorType);
5080 ds_data->required_descriptors_by_type[type_index] += binding_layout->descriptorCount;
locke-lunargd556cc32019-09-17 01:21:23 -06005081 }
5082 }
5083 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
5084 }
5085}
5086
5087// Decrement allocated sets from the pool and insert new sets into set_map
5088void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
5089 const VkDescriptorSet *descriptor_sets,
5090 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
5091 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
5092 // Account for sets and individual descriptors allocated from pool
5093 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
5094 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
5095 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
5096 }
5097
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07005098 const auto *variable_count_info = LvlFindInChain<VkDescriptorSetVariableDescriptorCountAllocateInfo>(p_alloc_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06005099 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
5100
5101 // Create tracking object for each descriptor set; insert into global map and the pool's set.
5102 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
5103 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
5104
Jeff Bolz41a1ced2019-10-11 11:40:49 -05005105 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07005106 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06005107 pool_state->sets.insert(new_ds.get());
locke-lunargd556cc32019-09-17 01:21:23 -06005108 setMap[descriptor_sets[i]] = std::move(new_ds);
5109 }
5110}
5111
5112// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
Jeremy Kniager05631e72020-06-08 14:21:35 -06005113void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type,
locke-lunarg540b2252020-08-03 13:23:36 -06005114 VkPipelineBindPoint bind_point, const char *function) {
5115 UpdateDrawState(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005116 cb_state->hasDispatchCmd = true;
5117}
5118
locke-lunargd556cc32019-09-17 01:21:23 -06005119// Generic function to handle state update for all CmdDraw* type functions
locke-lunarg540b2252020-08-03 13:23:36 -06005120void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, VkPipelineBindPoint bind_point,
5121 const char *function) {
5122 UpdateStateCmdDrawDispatchType(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005123 cb_state->hasDrawCmd = true;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005124
5125 // Update the consumed viewport/scissor count.
5126 uint32_t& used = cb_state->usedViewportScissorCount;
5127 used = std::max(used, cb_state->pipelineStaticViewportCount);
5128 used = std::max(used, cb_state->pipelineStaticScissorCount);
5129 cb_state->usedDynamicViewportCount |= !!(cb_state->dynamic_status & CBSTATUS_VIEWPORT_WITH_COUNT_SET); // !! silences MSVC warn
5130 cb_state->usedDynamicScissorCount |= !!(cb_state->dynamic_status & CBSTATUS_SCISSOR_WITH_COUNT_SET);
locke-lunargd556cc32019-09-17 01:21:23 -06005131}
5132
5133void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
5134 uint32_t firstVertex, uint32_t firstInstance) {
5135 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005136 UpdateStateCmdDrawType(cb_state, CMD_DRAW, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDraw()");
locke-lunargd556cc32019-09-17 01:21:23 -06005137}
5138
5139void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
5140 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
5141 uint32_t firstInstance) {
5142 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005143 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXED, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexed()");
locke-lunargd556cc32019-09-17 01:21:23 -06005144}
5145
5146void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5147 uint32_t count, uint32_t stride) {
5148 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5149 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005150 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndirect()");
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005151 if (!disabled[command_buffer_state]) {
5152 cb_state->AddChild(buffer_state);
5153 }
locke-lunargd556cc32019-09-17 01:21:23 -06005154}
5155
5156void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5157 VkDeviceSize offset, uint32_t count, uint32_t stride) {
5158 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5159 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005160 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexedIndirect()");
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005161 if (!disabled[command_buffer_state]) {
5162 cb_state->AddChild(buffer_state);
5163 }
locke-lunargd556cc32019-09-17 01:21:23 -06005164}
5165
5166void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
5167 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005168 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCH, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatch()");
locke-lunargd556cc32019-09-17 01:21:23 -06005169}
5170
5171void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5172 VkDeviceSize offset) {
5173 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005174 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCHINDIRECT, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatchIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06005175 BUFFER_STATE *buffer_state = GetBufferState(buffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005176 if (!disabled[command_buffer_state]) {
5177 cb_state->AddChild(buffer_state);
5178 }
locke-lunargd556cc32019-09-17 01:21:23 -06005179}
5180
Tony-LunarG977448c2019-12-02 14:52:02 -07005181void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5182 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
locke-lunarg540b2252020-08-03 13:23:36 -06005183 uint32_t stride, const char *function) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005184 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5185 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5186 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005187 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005188 if (!disabled[command_buffer_state]) {
5189 cb_state->AddChild(buffer_state);
5190 cb_state->AddChild(count_buffer_state);
5191 }
Tony-LunarG977448c2019-12-02 14:52:02 -07005192}
5193
locke-lunargd556cc32019-09-17 01:21:23 -06005194void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5195 VkDeviceSize offset, VkBuffer countBuffer,
5196 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5197 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005198 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5199 "vkCmdDrawIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005200}
5201
5202void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5203 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5204 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005205 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5206 "vkCmdDrawIndirectCount()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005207}
5208
5209void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5210 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
locke-lunarg540b2252020-08-03 13:23:36 -06005211 uint32_t maxDrawCount, uint32_t stride, const char *function) {
locke-lunargd556cc32019-09-17 01:21:23 -06005212 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5213 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5214 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005215 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005216 if (!disabled[command_buffer_state]) {
5217 cb_state->AddChild(buffer_state);
5218 cb_state->AddChild(count_buffer_state);
5219 }
locke-lunargd556cc32019-09-17 01:21:23 -06005220}
5221
5222void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5223 VkDeviceSize offset, VkBuffer countBuffer,
5224 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5225 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005226 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5227 "vkCmdDrawIndexedIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005228}
5229
5230void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
5231 VkDeviceSize offset, VkBuffer countBuffer,
5232 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5233 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005234 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5235 "vkCmdDrawIndexedIndirectCount()");
locke-lunargd556cc32019-09-17 01:21:23 -06005236}
5237
5238void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
5239 uint32_t firstTask) {
5240 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005241 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSNV, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawMeshTasksNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005242}
5243
5244void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5245 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
5246 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005247 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
5248 "vkCmdDrawMeshTasksIndirectNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005249 BUFFER_STATE *buffer_state = GetBufferState(buffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005250 if (!disabled[command_buffer_state] && buffer_state) {
5251 cb_state->AddChild(buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -06005252 }
5253}
5254
5255void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5256 VkDeviceSize offset, VkBuffer countBuffer,
5257 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5258 uint32_t stride) {
5259 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5260 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5261 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005262 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTCOUNTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
5263 "vkCmdDrawMeshTasksIndirectCountNV()");
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005264 if (!disabled[command_buffer_state]) {
5265 if (buffer_state) {
5266 cb_state->AddChild(buffer_state);
5267 }
5268 if (count_buffer_state) {
5269 cb_state->AddChild(count_buffer_state);
5270 }
locke-lunargd556cc32019-09-17 01:21:23 -06005271 }
5272}
5273
5274void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
5275 const VkAllocationCallbacks *pAllocator,
5276 VkShaderModule *pShaderModule, VkResult result,
5277 void *csm_state_data) {
5278 if (VK_SUCCESS != result) return;
5279 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
5280
Tony-LunarG8a51b7d2020-07-01 15:57:23 -06005281 spv_target_env spirv_environment = PickSpirvEnv(api_version, (device_extensions.vk_khr_spirv_1_4 != kNotEnabled));
locke-lunargd556cc32019-09-17 01:21:23 -06005282 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005283 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
5284 csm_state->unique_shader_id)
5285 : std::make_shared<SHADER_MODULE_STATE>();
sfricke-samsung962cad92021-04-13 00:46:29 -07005286 new_shader_module->SetPushConstantUsedInShader();
locke-lunargd556cc32019-09-17 01:21:23 -06005287 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
5288}
5289
5290void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06005291 PipelineStageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005292 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
locke-lunargde3f0fa2020-09-10 11:55:31 -06005293 stage_state->entry_point_name = pStage->pName;
5294 stage_state->shader_state = GetShared<SHADER_MODULE_STATE>(pStage->module);
5295 auto module = stage_state->shader_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -06005296 if (!module->has_valid_spirv) return;
5297
5298 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
sfricke-samsung962cad92021-04-13 00:46:29 -07005299 auto entrypoint = module->FindEntrypoint(pStage->pName, pStage->stage);
locke-lunargd556cc32019-09-17 01:21:23 -06005300 if (entrypoint == module->end()) return;
5301
locke-lunarg654e3692020-06-04 17:19:15 -06005302 stage_state->stage_flag = pStage->stage;
5303
locke-lunargd556cc32019-09-17 01:21:23 -06005304 // Mark accessible ids
sfricke-samsung962cad92021-04-13 00:46:29 -07005305 stage_state->accessible_ids = module->MarkAccessibleIds(entrypoint);
5306 module->ProcessExecutionModes(entrypoint, pipeline);
locke-lunargd556cc32019-09-17 01:21:23 -06005307
sfricke-samsung962cad92021-04-13 00:46:29 -07005308 stage_state->descriptor_uses = module->CollectInterfaceByDescriptorSlot(
5309 stage_state->accessible_ids, &stage_state->has_writable_descriptor, &stage_state->has_atomic_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06005310 // Capture descriptor uses for the pipeline
locke-lunarg36045992020-08-20 16:54:37 -06005311 for (const auto &use : stage_state->descriptor_uses) {
locke-lunargd556cc32019-09-17 01:21:23 -06005312 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06005313 const uint32_t slot = use.first.first;
locke-lunarg351c9d82020-10-23 14:43:21 -06005314 pipeline->active_slots[slot][use.first.second].is_writable |= use.second.is_writable;
locke-lunarg36045992020-08-20 16:54:37 -06005315 auto &reqs = pipeline->active_slots[slot][use.first.second].reqs;
sfricke-samsung962cad92021-04-13 00:46:29 -07005316 reqs = descriptor_req(reqs | module->DescriptorTypeToReqs(use.second.type_id));
locke-lunarg25b6c352020-08-06 17:44:18 -06005317 if (use.second.is_atomic_operation) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_VIEW_ATOMIC_OPERATION);
locke-lunarg12d20992020-09-21 12:46:49 -06005318 if (use.second.is_sampler_implicitLod_dref_proj) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_IMPLICITLOD_DREF_PROJ);
locke-lunargae2a43c2020-09-22 17:21:57 -06005319 if (use.second.is_sampler_bias_offset) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_BIAS_OFFSET);
locke-lunarg12d20992020-09-21 12:46:49 -06005320
John Zulauf649edd52019-10-02 14:39:41 -06005321 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunarg36045992020-08-20 16:54:37 -06005322 if (use.second.samplers_used_by_image.size()) {
locke-lunarg654a9052020-10-13 16:28:42 -06005323 auto &samplers_used_by_image = pipeline->active_slots[slot][use.first.second].samplers_used_by_image;
5324 if (use.second.samplers_used_by_image.size() > samplers_used_by_image.size()) {
5325 samplers_used_by_image.resize(use.second.samplers_used_by_image.size());
5326 }
locke-lunarg654a9052020-10-13 16:28:42 -06005327 uint32_t image_index = 0;
5328 for (const auto &samplers : use.second.samplers_used_by_image) {
5329 for (const auto &sampler : samplers) {
locke-lunargb8be8222020-10-20 00:34:37 -06005330 samplers_used_by_image[image_index].emplace(sampler, nullptr);
locke-lunarg654a9052020-10-13 16:28:42 -06005331 }
5332 ++image_index;
5333 }
locke-lunarg36045992020-08-20 16:54:37 -06005334 }
locke-lunargd556cc32019-09-17 01:21:23 -06005335 }
locke-lunarg78486832020-09-09 19:39:42 -06005336
locke-lunarg96dc9632020-06-10 17:22:18 -06005337 if (pStage->stage == VK_SHADER_STAGE_FRAGMENT_BIT) {
sfricke-samsung962cad92021-04-13 00:46:29 -07005338 pipeline->fragmentShader_writable_output_location_list = module->CollectWritableOutputLocationinFS(*pStage);
locke-lunarg96dc9632020-06-10 17:22:18 -06005339 }
locke-lunargd556cc32019-09-17 01:21:23 -06005340}
5341
sfricke-samsung70ad9ce2021-04-04 00:53:54 -07005342// Discussed in details in https://github.com/KhronosGroup/Vulkan-Docs/issues/1081
5343// Internal discussion and CTS were written to prove that this is not called after an incompatible vkCmdBindPipeline
5344// "Binding a pipeline with a layout that is not compatible with the push constant layout does not disturb the push constant values"
5345//
5346// vkCmdBindDescriptorSet has nothing to do with push constants and don't need to call this after neither
5347//
5348// Part of this assumes apps at draw/dispath/traceRays/etc time will have it properly compatabile or else other VU will be triggered
locke-lunargd556cc32019-09-17 01:21:23 -06005349void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
5350 if (cb_state == nullptr) {
5351 return;
5352 }
5353
5354 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
5355 if (pipeline_layout_state == nullptr) {
5356 return;
5357 }
5358
5359 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
5360 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
5361 cb_state->push_constant_data.clear();
locke-lunargde3f0fa2020-09-10 11:55:31 -06005362 cb_state->push_constant_data_update.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06005363 uint32_t size_needed = 0;
John Zulauf79f06582021-02-27 18:38:39 -07005364 for (const auto &push_constant_range : *cb_state->push_constant_data_ranges) {
locke-lunargde3f0fa2020-09-10 11:55:31 -06005365 auto size = push_constant_range.offset + push_constant_range.size;
5366 size_needed = std::max(size_needed, size);
5367
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005368 auto stage_flags = push_constant_range.stageFlags;
locke-lunargde3f0fa2020-09-10 11:55:31 -06005369 uint32_t bit_shift = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005370 while (stage_flags) {
5371 if (stage_flags & 1) {
locke-lunargde3f0fa2020-09-10 11:55:31 -06005372 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
5373 const auto it = cb_state->push_constant_data_update.find(flag);
5374
5375 if (it != cb_state->push_constant_data_update.end()) {
5376 if (it->second.size() < push_constant_range.offset) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06005377 it->second.resize(push_constant_range.offset, PC_Byte_Not_Set);
locke-lunargde3f0fa2020-09-10 11:55:31 -06005378 }
5379 if (it->second.size() < size) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06005380 it->second.resize(size, PC_Byte_Not_Updated);
locke-lunargde3f0fa2020-09-10 11:55:31 -06005381 }
5382 } else {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06005383 std::vector<uint8_t> bytes;
5384 bytes.resize(push_constant_range.offset, PC_Byte_Not_Set);
5385 bytes.resize(size, PC_Byte_Not_Updated);
locke-lunargde3f0fa2020-09-10 11:55:31 -06005386 cb_state->push_constant_data_update[flag] = bytes;
5387 }
5388 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005389 stage_flags = stage_flags >> 1;
locke-lunargde3f0fa2020-09-10 11:55:31 -06005390 ++bit_shift;
5391 }
locke-lunargd556cc32019-09-17 01:21:23 -06005392 }
5393 cb_state->push_constant_data.resize(size_needed, 0);
5394 }
5395}
John Zulauf22b0fbe2019-10-15 06:26:16 -06005396
5397void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
5398 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
5399 VkResult result) {
5400 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
5401 auto swapchain_state = GetSwapchainState(swapchain);
5402
5403 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
5404
5405 if (pSwapchainImages) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005406 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
John Zulauf29d00532021-03-04 13:28:54 -07005407 SWAPCHAIN_IMAGE &swapchain_image = swapchain_state->images[i];
John Zulauffaa7a522021-03-05 12:22:45 -07005408 if (swapchain_image.image_state) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06005409
5410 // Add imageMap entries for each swapchain image
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06005411 auto image_ci = LvlInitStruct<VkImageCreateInfo>();
5412 image_ci.pNext = LvlFindInChain<VkImageFormatListCreateInfo>(swapchain_state->createInfo.pNext);
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -06005413 image_ci.flags = 0; // to be updated below
John Zulauf22b0fbe2019-10-15 06:26:16 -06005414 image_ci.imageType = VK_IMAGE_TYPE_2D;
5415 image_ci.format = swapchain_state->createInfo.imageFormat;
5416 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
5417 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
5418 image_ci.extent.depth = 1;
5419 image_ci.mipLevels = 1;
5420 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
5421 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
5422 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
5423 image_ci.usage = swapchain_state->createInfo.imageUsage;
5424 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
5425 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
5426 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
5427 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5428
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005429 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005430 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005431 }
5432 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005433 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005434 }
5435 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08005436 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005437 }
John Zulauf22b0fbe2019-10-15 06:26:16 -06005438
locke-lunarg296a3c92020-03-25 01:04:29 -06005439 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(device, pSwapchainImages[i], &image_ci);
John Zulauffaa7a522021-03-05 12:22:45 -07005440 auto *image_state = imageMap[pSwapchainImages[i]].get();
5441 assert(image_state);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005442 image_state->valid = false;
5443 image_state->create_from_swapchain = swapchain;
5444 image_state->bind_swapchain = swapchain;
5445 image_state->bind_swapchain_imageIndex = i;
Tony-LunarGe64e4fe2020-02-17 16:21:55 -07005446 image_state->is_swapchain_image = true;
sfricke-samsungc8a50852021-03-31 13:56:50 -07005447 image_state->unprotected = ((image_ci.flags & VK_IMAGE_CREATE_PROTECTED_BIT) == 0);
John Zulauf29d00532021-03-04 13:28:54 -07005448
5449 // Since swapchains can't be linear, we can create an encoder here, and SyncValNeeds a fake_base_address
5450 image_state->fragment_encoder = std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(
5451 new subresource_adapter::ImageRangeEncoder(*image_state));
5452
5453 if (swapchain_image.bound_images.empty()) {
5454 // First time "bind" allocates
5455 image_state->swapchain_fake_address = fake_memory.Alloc(image_state->fragment_encoder->TotalSize());
5456 } else {
5457 // All others reuse
5458 image_state->swapchain_fake_address = (*swapchain_image.bound_images.cbegin())->swapchain_fake_address;
5459 // Since there are others, need to update the aliasing information
Jeremy Gebben5570abe2021-05-16 18:35:13 -06005460 image_state->AddAliasingImage(swapchain_image.bound_images);
John Zulauf29d00532021-03-04 13:28:54 -07005461 }
5462
5463 swapchain_image.image_state = image_state; // Don't move, it's already a reference to the imageMap
John Zulauffaa7a522021-03-05 12:22:45 -07005464 swapchain_image.bound_images.emplace(image_state);
Petr Kraus44f1c482020-04-25 20:09:25 +02005465
5466 AddImageStateProps(*image_state, device, physical_device);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005467 }
5468 }
5469
5470 if (*pSwapchainImageCount) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005471 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
5472 }
5473}
sourav parmar35e7a002020-06-09 17:58:44 -07005474
sourav parmar35e7a002020-06-09 17:58:44 -07005475void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer,
5476 const VkCopyAccelerationStructureInfoKHR *pInfo) {
5477 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5478 if (cb_state) {
sourav parmarcd5fb182020-07-17 12:58:44 -07005479 ACCELERATION_STRUCTURE_STATE_KHR *src_as_state = GetAccelerationStructureStateKHR(pInfo->src);
5480 ACCELERATION_STRUCTURE_STATE_KHR *dst_as_state = GetAccelerationStructureStateKHR(pInfo->dst);
sourav parmar35e7a002020-06-09 17:58:44 -07005481 if (dst_as_state != nullptr && src_as_state != nullptr) {
5482 dst_as_state->built = true;
5483 dst_as_state->build_info_khr = src_as_state->build_info_khr;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005484 if (!disabled[command_buffer_state]) {
5485 cb_state->AddChild(dst_as_state);
5486 cb_state->AddChild(src_as_state);
5487 }
sourav parmar35e7a002020-06-09 17:58:44 -07005488 }
5489 }
5490}
Piers Daniell39842ee2020-07-10 16:42:33 -06005491
5492void ValidationStateTracker::PreCallRecordCmdSetCullModeEXT(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) {
5493 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5494 cb_state->status |= CBSTATUS_CULL_MODE_SET;
5495 cb_state->static_status &= ~CBSTATUS_CULL_MODE_SET;
5496}
5497
5498void ValidationStateTracker::PreCallRecordCmdSetFrontFaceEXT(VkCommandBuffer commandBuffer, VkFrontFace frontFace) {
5499 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5500 cb_state->status |= CBSTATUS_FRONT_FACE_SET;
5501 cb_state->static_status &= ~CBSTATUS_FRONT_FACE_SET;
5502}
5503
5504void ValidationStateTracker::PreCallRecordCmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer,
5505 VkPrimitiveTopology primitiveTopology) {
5506 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5507 cb_state->primitiveTopology = primitiveTopology;
5508 cb_state->status |= CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5509 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5510}
5511
5512void ValidationStateTracker::PreCallRecordCmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer, uint32_t viewportCount,
5513 const VkViewport *pViewports) {
5514 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07005515 uint32_t bits = (1u << viewportCount) - 1u;
5516 cb_state->viewportWithCountMask |= bits;
5517 cb_state->trashedViewportMask &= ~bits;
Tobias Hector6663c9b2020-11-05 10:18:02 +00005518 cb_state->viewportWithCountCount = viewportCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005519 cb_state->trashedViewportCount = false;
Piers Daniell39842ee2020-07-10 16:42:33 -06005520 cb_state->status |= CBSTATUS_VIEWPORT_WITH_COUNT_SET;
5521 cb_state->static_status &= ~CBSTATUS_VIEWPORT_WITH_COUNT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005522
5523 cb_state->dynamicViewports.resize(std::max(size_t(viewportCount), cb_state->dynamicViewports.size()));
5524 for (size_t i = 0; i < viewportCount; ++i) {
5525 cb_state->dynamicViewports[i] = pViewports[i];
5526 }
Piers Daniell39842ee2020-07-10 16:42:33 -06005527}
5528
5529void ValidationStateTracker::PreCallRecordCmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer, uint32_t scissorCount,
5530 const VkRect2D *pScissors) {
5531 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07005532 uint32_t bits = (1u << scissorCount) - 1u;
5533 cb_state->scissorWithCountMask |= bits;
5534 cb_state->trashedScissorMask &= ~bits;
5535 cb_state->scissorWithCountCount = scissorCount;
5536 cb_state->trashedScissorCount = false;
Piers Daniell39842ee2020-07-10 16:42:33 -06005537 cb_state->status |= CBSTATUS_SCISSOR_WITH_COUNT_SET;
5538 cb_state->static_status &= ~CBSTATUS_SCISSOR_WITH_COUNT_SET;
5539}
5540
5541void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer, uint32_t firstBinding,
5542 uint32_t bindingCount, const VkBuffer *pBuffers,
5543 const VkDeviceSize *pOffsets, const VkDeviceSize *pSizes,
5544 const VkDeviceSize *pStrides) {
5545 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5546 if (pStrides) {
5547 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5548 cb_state->static_status &= ~CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5549 }
5550
5551 uint32_t end = firstBinding + bindingCount;
5552 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
5553 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
5554 }
5555
5556 for (uint32_t i = 0; i < bindingCount; ++i) {
5557 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07005558 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
Piers Daniell39842ee2020-07-10 16:42:33 -06005559 vertex_buffer_binding.offset = pOffsets[i];
5560 vertex_buffer_binding.size = (pSizes) ? pSizes[i] : VK_WHOLE_SIZE;
5561 vertex_buffer_binding.stride = (pStrides) ? pStrides[i] : 0;
5562 // Add binding for this vertex buffer to this commandbuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005563 if (!disabled[command_buffer_state] && pBuffers[i]) {
5564 cb_state->AddChild(vertex_buffer_binding.buffer_state.get());
Piers Daniell39842ee2020-07-10 16:42:33 -06005565 }
5566 }
5567}
5568
5569void ValidationStateTracker::PreCallRecordCmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) {
5570 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5571 cb_state->status |= CBSTATUS_DEPTH_TEST_ENABLE_SET;
5572 cb_state->static_status &= ~CBSTATUS_DEPTH_TEST_ENABLE_SET;
5573}
5574
5575void ValidationStateTracker::PreCallRecordCmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) {
5576 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5577 cb_state->status |= CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5578 cb_state->static_status &= ~CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5579}
5580
5581void ValidationStateTracker::PreCallRecordCmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) {
5582 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5583 cb_state->status |= CBSTATUS_DEPTH_COMPARE_OP_SET;
5584 cb_state->static_status &= ~CBSTATUS_DEPTH_COMPARE_OP_SET;
5585}
5586
5587void ValidationStateTracker::PreCallRecordCmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer,
5588 VkBool32 depthBoundsTestEnable) {
5589 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5590 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5591 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5592}
5593void ValidationStateTracker::PreCallRecordCmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) {
5594 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5595 cb_state->status |= CBSTATUS_STENCIL_TEST_ENABLE_SET;
5596 cb_state->static_status &= ~CBSTATUS_STENCIL_TEST_ENABLE_SET;
5597}
5598
5599void ValidationStateTracker::PreCallRecordCmdSetStencilOpEXT(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
5600 VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp,
5601 VkCompareOp compareOp) {
5602 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5603 cb_state->status |= CBSTATUS_STENCIL_OP_SET;
5604 cb_state->static_status &= ~CBSTATUS_STENCIL_OP_SET;
5605}
locke-lunarg4189aa22020-10-21 00:23:48 -06005606
5607void ValidationStateTracker::PreCallRecordCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle,
5608 uint32_t discardRectangleCount,
5609 const VkRect2D *pDiscardRectangles) {
5610 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5611 cb_state->status |= CBSTATUS_DISCARD_RECTANGLE_SET;
5612 cb_state->static_status &= ~CBSTATUS_DISCARD_RECTANGLE_SET;
5613}
5614
5615void ValidationStateTracker::PreCallRecordCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,
5616 const VkSampleLocationsInfoEXT *pSampleLocationsInfo) {
5617 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5618 cb_state->status |= CBSTATUS_SAMPLE_LOCATIONS_SET;
5619 cb_state->static_status &= ~CBSTATUS_SAMPLE_LOCATIONS_SET;
5620}
5621
5622void ValidationStateTracker::PreCallRecordCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer,
5623 VkCoarseSampleOrderTypeNV sampleOrderType,
5624 uint32_t customSampleOrderCount,
5625 const VkCoarseSampleOrderCustomNV *pCustomSampleOrders) {
5626 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5627 cb_state->status |= CBSTATUS_COARSE_SAMPLE_ORDER_SET;
5628 cb_state->static_status &= ~CBSTATUS_COARSE_SAMPLE_ORDER_SET;
5629}
Vikram Kushwahaa57b0c32021-04-19 12:21:46 -07005630
5631void ValidationStateTracker::PreCallRecordCmdSetPatchControlPointsEXT(VkCommandBuffer commandBuffer, uint32_t patchControlPoints) {
5632 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5633 cb_state->status |= CBSTATUS_PATCH_CONTROL_POINTS_SET;
5634 cb_state->static_status &= ~CBSTATUS_PATCH_CONTROL_POINTS_SET;
5635}
5636
5637void ValidationStateTracker::PreCallRecordCmdSetLogicOpEXT(VkCommandBuffer commandBuffer, VkLogicOp logicOp) {
5638 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5639 cb_state->status |= CBSTATUS_LOGIC_OP_SET;
5640 cb_state->static_status &= ~CBSTATUS_LOGIC_OP_SET;
5641}
5642
5643void ValidationStateTracker::PreCallRecordCmdSetRasterizerDiscardEnableEXT(VkCommandBuffer commandBuffer,
5644 VkBool32 rasterizerDiscardEnable) {
5645 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5646 cb_state->status |= CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET;
5647 cb_state->static_status &= ~CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET;
5648}
5649
5650void ValidationStateTracker::PreCallRecordCmdSetDepthBiasEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable) {
5651 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5652 cb_state->status |= CBSTATUS_DEPTH_BIAS_ENABLE_SET;
5653 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_ENABLE_SET;
5654}
5655
5656void ValidationStateTracker::PreCallRecordCmdSetPrimitiveRestartEnableEXT(VkCommandBuffer commandBuffer,
5657 VkBool32 primitiveRestartEnable) {
5658 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5659 cb_state->status |= CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET;
5660 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005661}
Piers Daniell924cd832021-05-18 13:48:47 -06005662
5663void ValidationStateTracker::PreCallRecordCmdSetVertexInputEXT(
5664 VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount,
5665 const VkVertexInputBindingDescription2EXT *pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount,
5666 const VkVertexInputAttributeDescription2EXT *pVertexAttributeDescriptions) {
5667 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5668 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET | CBSTATUS_VERTEX_INPUT_SET;
5669 cb_state->static_status &= ~(CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET | CBSTATUS_VERTEX_INPUT_SET);
5670}