blob: 18cf9d80d40bab0b9869953c677c731a1c81cf6e [file] [log] [blame]
sfricke-samsung486a51e2021-01-02 00:10:15 -08001/* Copyright (c) 2015-2021 The Khronos Group Inc.
2 * Copyright (c) 2015-2021 Valve Corporation
3 * Copyright (c) 2015-2021 LunarG, Inc.
4 * Copyright (C) 2015-2021 Google Inc.
Tobias Hector6663c9b2020-11-05 10:18:02 +00005 * Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved.
locke-lunargd556cc32019-09-17 01:21:23 -06006 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Mark Lobodzinski <mark@lunarg.com>
20 * Author: Dave Houlton <daveh@lunarg.com>
21 * Shannon McPherson <shannon@lunarg.com>
Tobias Hector6663c9b2020-11-05 10:18:02 +000022 * Author: Tobias Hector <tobias.hector@amd.com>
locke-lunargd556cc32019-09-17 01:21:23 -060023 */
24
David Zhao Akeley44139b12021-04-26 16:16:13 -070025#include <algorithm>
locke-lunargd556cc32019-09-17 01:21:23 -060026#include <cmath>
locke-lunargd556cc32019-09-17 01:21:23 -060027
28#include "vk_enum_string_helper.h"
29#include "vk_format_utils.h"
30#include "vk_layer_data.h"
31#include "vk_layer_utils.h"
32#include "vk_layer_logging.h"
33#include "vk_typemap_helper.h"
34
35#include "chassis.h"
36#include "state_tracker.h"
37#include "shader_validation.h"
Jeremy Gebben74aa7622020-12-15 11:18:00 -070038#include "sync_utils.h"
Jeremy Gebben159b3cc2021-06-03 09:09:03 -060039#include "cmd_buffer_state.h"
40#include "render_pass_state.h"
locke-lunarg4189aa22020-10-21 00:23:48 -060041
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -060042void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
43 if (add_obj) {
44 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
45 // Call base class
46 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
47 }
48}
49
John Zulauf2bc1fde2020-04-24 15:09:51 -060050// NOTE: Beware the lifespan of the rp_begin when holding the return. If the rp_begin isn't a "safe" copy, "IMAGELESS"
51// attachments won't persist past the API entry point exit.
Jeremy Gebben88f58142021-06-01 10:07:52 -060052static std::pair<uint32_t, const VkImageView *> GetFramebufferAttachments(const VkRenderPassBeginInfo &rp_begin,
53 const FRAMEBUFFER_STATE &fb_state) {
John Zulauf2bc1fde2020-04-24 15:09:51 -060054 const VkImageView *attachments = fb_state.createInfo.pAttachments;
55 uint32_t count = fb_state.createInfo.attachmentCount;
56 if (fb_state.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -070057 const auto *framebuffer_attachments = LvlFindInChain<VkRenderPassAttachmentBeginInfo>(rp_begin.pNext);
John Zulauf2bc1fde2020-04-24 15:09:51 -060058 if (framebuffer_attachments) {
59 attachments = framebuffer_attachments->pAttachments;
60 count = framebuffer_attachments->attachmentCount;
61 }
62 }
63 return std::make_pair(count, attachments);
64}
65
John Zulauf64ffe552021-02-06 10:25:07 -070066template <typename ImageViewPointer, typename Get>
67std::vector<ImageViewPointer> GetAttachmentViewsImpl(const VkRenderPassBeginInfo &rp_begin, const FRAMEBUFFER_STATE &fb_state,
68 const Get &get_fn) {
69 std::vector<ImageViewPointer> views;
John Zulauf2bc1fde2020-04-24 15:09:51 -060070
71 const auto count_attachment = GetFramebufferAttachments(rp_begin, fb_state);
72 const auto attachment_count = count_attachment.first;
73 const auto *attachments = count_attachment.second;
74 views.resize(attachment_count, nullptr);
75 for (uint32_t i = 0; i < attachment_count; i++) {
76 if (attachments[i] != VK_NULL_HANDLE) {
John Zulauf64ffe552021-02-06 10:25:07 -070077 views[i] = get_fn(attachments[i]);
John Zulauf2bc1fde2020-04-24 15:09:51 -060078 }
79 }
80 return views;
81}
82
John Zulauf64ffe552021-02-06 10:25:07 -070083std::vector<std::shared_ptr<const IMAGE_VIEW_STATE>> ValidationStateTracker::GetSharedAttachmentViews(
84 const VkRenderPassBeginInfo &rp_begin, const FRAMEBUFFER_STATE &fb_state) const {
85 auto get_fn = [this](VkImageView handle) { return this->GetShared<IMAGE_VIEW_STATE>(handle); };
86 return GetAttachmentViewsImpl<std::shared_ptr<const IMAGE_VIEW_STATE>>(rp_begin, fb_state, get_fn);
87}
88
locke-lunargd556cc32019-09-17 01:21:23 -060089#ifdef VK_USE_PLATFORM_ANDROID_KHR
90// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
91// This could also move into a seperate core_validation_android.cpp file... ?
92
93void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -070094 const VkExternalFormatANDROID *ext_fmt_android = LvlFindInChain<VkExternalFormatANDROID>(create_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -060095 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
96 is_node->has_ahb_format = true;
97 is_node->ahb_format = ext_fmt_android->externalFormat;
Spencer Fricke6bba8c72020-04-06 07:41:21 -070098 // VUID 01894 will catch if not found in map
99 auto it = ahb_ext_formats_map.find(ext_fmt_android->externalFormat);
100 if (it != ahb_ext_formats_map.end()) {
101 is_node->format_features = it->second;
102 }
locke-lunargd556cc32019-09-17 01:21:23 -0600103 }
104}
105
106void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700107 VkSamplerYcbcrConversion ycbcr_conversion,
108 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700109 const VkExternalFormatANDROID *ext_format_android = LvlFindInChain<VkExternalFormatANDROID>(create_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600110 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
111 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700112 // VUID 01894 will catch if not found in map
113 auto it = ahb_ext_formats_map.find(ext_format_android->externalFormat);
114 if (it != ahb_ext_formats_map.end()) {
115 ycbcr_state->format_features = it->second;
116 }
locke-lunargd556cc32019-09-17 01:21:23 -0600117 }
118};
119
120void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
121 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
122};
123
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700124void ValidationStateTracker::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
125 VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties, VkResult result) {
126 if (VK_SUCCESS != result) return;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700127 auto ahb_format_props = LvlFindInChain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700128 if (ahb_format_props) {
Jeremy Gebbenfc6f8152021-03-18 16:58:55 -0600129 ahb_ext_formats_map.emplace(ahb_format_props->externalFormat, ahb_format_props->formatFeatures);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700130 }
131}
132
locke-lunargd556cc32019-09-17 01:21:23 -0600133#else
134
135void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
136
137void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700138 VkSamplerYcbcrConversion ycbcr_conversion,
139 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state){};
locke-lunargd556cc32019-09-17 01:21:23 -0600140
141void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
142
143#endif // VK_USE_PLATFORM_ANDROID_KHR
144
Petr Kraus44f1c482020-04-25 20:09:25 +0200145void AddImageStateProps(IMAGE_STATE &image_state, const VkDevice device, const VkPhysicalDevice physical_device) {
146 // Add feature support according to Image Format Features (vkspec.html#resources-image-format-features)
147 // if format is AHB external format then the features are already set
148 if (image_state.has_ahb_format == false) {
149 const VkImageTiling image_tiling = image_state.createInfo.tiling;
150 const VkFormat image_format = image_state.createInfo.format;
151 if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
152 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {
153 VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, nullptr};
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600154 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state.image(), &drm_format_properties);
Petr Kraus44f1c482020-04-25 20:09:25 +0200155
156 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
157 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
158 nullptr};
159 format_properties_2.pNext = (void *)&drm_properties_list;
160 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Lionel Landwerlin09351a72020-06-22 18:15:59 +0300161 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
162 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
163 drm_properties_list.pDrmFormatModifierProperties = &drm_properties[0];
164 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Petr Kraus44f1c482020-04-25 20:09:25 +0200165
166 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300167 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier ==
168 drm_format_properties.drmFormatModifier) {
169 image_state.format_features =
Petr Kraus44f1c482020-04-25 20:09:25 +0200170 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300171 break;
Petr Kraus44f1c482020-04-25 20:09:25 +0200172 }
173 }
174 } else {
175 VkFormatProperties format_properties;
176 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_format, &format_properties);
177 image_state.format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
178 : format_properties.optimalTilingFeatures;
179 }
180 }
181}
182
locke-lunargd556cc32019-09-17 01:21:23 -0600183void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
184 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
185 if (VK_SUCCESS != result) return;
locke-lunarg296a3c92020-03-25 01:04:29 -0600186 auto is_node = std::make_shared<IMAGE_STATE>(device, *pImage, pCreateInfo);
sfricke-samsung71bc6572020-04-29 15:49:43 -0700187 is_node->disjoint = ((pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT) != 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600188 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
189 RecordCreateImageANDROID(pCreateInfo, is_node.get());
190 }
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700191 const auto swapchain_info = LvlFindInChain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600192 if (swapchain_info) {
193 is_node->create_from_swapchain = swapchain_info->swapchain;
194 }
195
locke-lunargd556cc32019-09-17 01:21:23 -0600196 // Record the memory requirements in case they won't be queried
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700197 // External AHB memory can't be queried until after memory is bound
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600198 if (is_node->IsExternalAHB() == false) {
sfricke-samsung71bc6572020-04-29 15:49:43 -0700199 if (is_node->disjoint == false) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600200 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements[0]);
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700201 } else {
202 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
203 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
204 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
205 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
206 mem_req_info2.pNext = &image_plane_req;
207 mem_req_info2.image = *pImage;
208
209 assert(plane_count != 0); // assumes each format has at least first plane
210 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
211 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600212 is_node->requirements[0] = mem_reqs2.memoryRequirements;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700213
214 if (plane_count >= 2) {
215 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
216 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600217 is_node->requirements[1] = mem_reqs2.memoryRequirements;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700218 }
219 if (plane_count >= 3) {
220 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
221 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600222 is_node->requirements[2] = mem_reqs2.memoryRequirements;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700223 }
224 }
locke-lunargd556cc32019-09-17 01:21:23 -0600225 }
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700226
Petr Kraus44f1c482020-04-25 20:09:25 +0200227 AddImageStateProps(*is_node, device, physical_device);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700228
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700229 imageMap.emplace(*pImage, std::move(is_node));
locke-lunargd556cc32019-09-17 01:21:23 -0600230}
231
232void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
233 if (!image) return;
234 IMAGE_STATE *image_state = GetImageState(image);
locke-lunargd556cc32019-09-17 01:21:23 -0600235 // Clean up memory mapping, bindings and range references for image
locke-lunargd556cc32019-09-17 01:21:23 -0600236 if (image_state->bind_swapchain) {
237 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
238 if (swapchain) {
John Zulaufd13b38e2021-03-05 08:17:38 -0700239 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600240 }
241 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600242 image_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -0600243 imageMap.erase(image);
244}
245
246void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
247 VkImageLayout imageLayout, const VkClearColorValue *pColor,
248 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600249
250 if (disabled[command_buffer_state]) return;
251
locke-lunargd556cc32019-09-17 01:21:23 -0600252 auto cb_node = GetCBState(commandBuffer);
253 auto image_state = GetImageState(image);
254 if (cb_node && image_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600255 cb_node->AddChild(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600256 }
257}
258
259void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
260 VkImageLayout imageLayout,
261 const VkClearDepthStencilValue *pDepthStencil,
262 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600263 if (disabled[command_buffer_state]) return;
264
locke-lunargd556cc32019-09-17 01:21:23 -0600265 auto cb_node = GetCBState(commandBuffer);
266 auto image_state = GetImageState(image);
267 if (cb_node && image_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600268 cb_node->AddChild(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600269 }
270}
271
272void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
273 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
274 uint32_t regionCount, const VkImageCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600275 if (disabled[command_buffer_state]) return;
276
locke-lunargd556cc32019-09-17 01:21:23 -0600277 auto cb_node = GetCBState(commandBuffer);
278 auto src_image_state = GetImageState(srcImage);
279 auto dst_image_state = GetImageState(dstImage);
280
281 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600282 cb_node->AddChild(src_image_state);
283 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600284}
285
Jeff Leger178b1e52020-10-05 12:22:23 -0400286void ValidationStateTracker::PreCallRecordCmdCopyImage2KHR(VkCommandBuffer commandBuffer,
287 const VkCopyImageInfo2KHR *pCopyImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600288 if (disabled[command_buffer_state]) return;
289
Jeff Leger178b1e52020-10-05 12:22:23 -0400290 auto cb_node = GetCBState(commandBuffer);
291 auto src_image_state = GetImageState(pCopyImageInfo->srcImage);
292 auto dst_image_state = GetImageState(pCopyImageInfo->dstImage);
293
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600294 cb_node->AddChild(src_image_state);
295 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400296}
297
locke-lunargd556cc32019-09-17 01:21:23 -0600298void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
299 VkImageLayout srcImageLayout, VkImage dstImage,
300 VkImageLayout dstImageLayout, uint32_t regionCount,
301 const VkImageResolve *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600302 if (disabled[command_buffer_state]) return;
303
locke-lunargd556cc32019-09-17 01:21:23 -0600304 auto cb_node = GetCBState(commandBuffer);
305 auto src_image_state = GetImageState(srcImage);
306 auto dst_image_state = GetImageState(dstImage);
307
308 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600309 cb_node->AddChild(src_image_state);
310 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600311}
312
Jeff Leger178b1e52020-10-05 12:22:23 -0400313void ValidationStateTracker::PreCallRecordCmdResolveImage2KHR(VkCommandBuffer commandBuffer,
314 const VkResolveImageInfo2KHR *pResolveImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600315 if (disabled[command_buffer_state]) return;
316
Jeff Leger178b1e52020-10-05 12:22:23 -0400317 auto cb_node = GetCBState(commandBuffer);
318 auto src_image_state = GetImageState(pResolveImageInfo->srcImage);
319 auto dst_image_state = GetImageState(pResolveImageInfo->dstImage);
320
321 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600322 cb_node->AddChild(src_image_state);
323 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400324}
325
locke-lunargd556cc32019-09-17 01:21:23 -0600326void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
327 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
328 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600329 if (disabled[command_buffer_state]) return;
330
locke-lunargd556cc32019-09-17 01:21:23 -0600331 auto cb_node = GetCBState(commandBuffer);
332 auto src_image_state = GetImageState(srcImage);
333 auto dst_image_state = GetImageState(dstImage);
334
335 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600336 cb_node->AddChild(src_image_state);
337 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600338}
339
Jeff Leger178b1e52020-10-05 12:22:23 -0400340void ValidationStateTracker::PreCallRecordCmdBlitImage2KHR(VkCommandBuffer commandBuffer,
341 const VkBlitImageInfo2KHR *pBlitImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600342 if (disabled[command_buffer_state]) return;
343
Jeff Leger178b1e52020-10-05 12:22:23 -0400344 auto cb_node = GetCBState(commandBuffer);
345 auto src_image_state = GetImageState(pBlitImageInfo->srcImage);
346 auto dst_image_state = GetImageState(pBlitImageInfo->dstImage);
347
348 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600349 cb_node->AddChild(src_image_state);
350 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400351}
352
locke-lunargd556cc32019-09-17 01:21:23 -0600353void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
354 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
355 VkResult result) {
356 if (result != VK_SUCCESS) return;
357 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500358 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600359
360 // Get a set of requirements in the case the app does not
sfricke-samsungad90e722020-07-08 20:54:24 -0700361 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
locke-lunargd556cc32019-09-17 01:21:23 -0600362
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700363 bufferMap.emplace(*pBuffer, std::move(buffer_state));
locke-lunargd556cc32019-09-17 01:21:23 -0600364}
365
366void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
367 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
368 VkResult result) {
369 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500370 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
locke-lunarg25b6c352020-08-06 17:44:18 -0600371 auto buffer_view_state = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
372
373 VkFormatProperties format_properties;
374 DispatchGetPhysicalDeviceFormatProperties(physical_device, pCreateInfo->format, &format_properties);
375 buffer_view_state->format_features = format_properties.bufferFeatures;
376
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700377 bufferViewMap.emplace(*pView, std::move(buffer_view_state));
locke-lunargd556cc32019-09-17 01:21:23 -0600378}
379
380void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
381 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
382 VkResult result) {
383 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500384 auto image_state = GetImageShared(pCreateInfo->image);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700385 auto image_view_state = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
386
387 // Add feature support according to Image View Format Features (vkspec.html#resources-image-view-format-features)
388 const VkImageTiling image_tiling = image_state->createInfo.tiling;
389 const VkFormat image_view_format = pCreateInfo->format;
390 if (image_state->has_ahb_format == true) {
391 // The ImageView uses same Image's format feature since they share same AHB
392 image_view_state->format_features = image_state->format_features;
393 } else if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
394 // Parameter validation should catch if this is used without VK_EXT_image_drm_format_modifier
395 assert(device_extensions.vk_ext_image_drm_format_modifier);
396 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
397 nullptr};
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600398 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state->image(), &drm_format_properties);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700399
400 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
401 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
402 nullptr};
403 format_properties_2.pNext = (void *)&drm_properties_list;
nyorain38a9d232021-03-06 13:06:12 +0100404
405 // First call is to get the number of modifiers compatible with the queried format
406 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
407
408 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
409 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
410 drm_properties_list.pDrmFormatModifierProperties = drm_properties.data();
411
412 // Second call, now with an allocated array in pDrmFormatModifierProperties, is to get the modifiers
413 // compatible with the queried format
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700414 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
415
416 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300417 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier == drm_format_properties.drmFormatModifier) {
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700418 image_view_state->format_features |=
419 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300420 break;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700421 }
422 }
423 } else {
424 VkFormatProperties format_properties;
425 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_view_format, &format_properties);
426 image_view_state->format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
427 : format_properties.optimalTilingFeatures;
428 }
429
sfricke-samsungd23f6e62021-01-17 09:05:47 -0800430 auto usage_create_info = LvlFindInChain<VkImageViewUsageCreateInfo>(pCreateInfo->pNext);
431 image_view_state->inherited_usage = (usage_create_info) ? usage_create_info->usage : image_state->createInfo.usage;
432
locke-lunarg9939d4b2020-10-26 20:11:08 -0600433 // filter_cubic_props is used in CmdDraw validation. But it takes a lot of performance if it does in CmdDraw.
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700434 image_view_state->filter_cubic_props = LvlInitStruct<VkFilterCubicImageViewImageFormatPropertiesEXT>();
locke-lunarg9939d4b2020-10-26 20:11:08 -0600435 if (IsExtEnabled(device_extensions.vk_ext_filter_cubic)) {
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700436 auto imageview_format_info = LvlInitStruct<VkPhysicalDeviceImageViewImageFormatInfoEXT>();
locke-lunarg9939d4b2020-10-26 20:11:08 -0600437 imageview_format_info.imageViewType = pCreateInfo->viewType;
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700438 auto image_format_info = LvlInitStruct<VkPhysicalDeviceImageFormatInfo2>(&imageview_format_info);
locke-lunarg9939d4b2020-10-26 20:11:08 -0600439 image_format_info.type = image_state->createInfo.imageType;
440 image_format_info.format = image_state->createInfo.format;
441 image_format_info.tiling = image_state->createInfo.tiling;
sfricke-samsungd23f6e62021-01-17 09:05:47 -0800442 image_format_info.usage = image_view_state->inherited_usage;
locke-lunarg9939d4b2020-10-26 20:11:08 -0600443 image_format_info.flags = image_state->createInfo.flags;
444
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700445 auto image_format_properties = LvlInitStruct<VkImageFormatProperties2>(&image_view_state->filter_cubic_props);
locke-lunarg9939d4b2020-10-26 20:11:08 -0600446
447 DispatchGetPhysicalDeviceImageFormatProperties2(physical_device, &image_format_info, &image_format_properties);
448 }
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700449 imageViewMap.emplace(*pView, std::move(image_view_state));
locke-lunargd556cc32019-09-17 01:21:23 -0600450}
451
452void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
453 uint32_t regionCount, const VkBufferCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600454 if (disabled[command_buffer_state]) return;
455
locke-lunargd556cc32019-09-17 01:21:23 -0600456 auto cb_node = GetCBState(commandBuffer);
457 auto src_buffer_state = GetBufferState(srcBuffer);
458 auto dst_buffer_state = GetBufferState(dstBuffer);
459
460 // Update bindings between buffers and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600461 cb_node->AddChild(src_buffer_state);
462 cb_node->AddChild(dst_buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600463}
464
Jeff Leger178b1e52020-10-05 12:22:23 -0400465void ValidationStateTracker::PreCallRecordCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer,
466 const VkCopyBufferInfo2KHR *pCopyBufferInfos) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600467 if (disabled[command_buffer_state]) return;
468
Jeff Leger178b1e52020-10-05 12:22:23 -0400469 auto cb_node = GetCBState(commandBuffer);
470 auto src_buffer_state = GetBufferState(pCopyBufferInfos->srcBuffer);
471 auto dst_buffer_state = GetBufferState(pCopyBufferInfos->dstBuffer);
472
473 // Update bindings between buffers and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600474 cb_node->AddChild(src_buffer_state);
475 cb_node->AddChild(dst_buffer_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400476}
477
locke-lunargd556cc32019-09-17 01:21:23 -0600478void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
479 const VkAllocationCallbacks *pAllocator) {
480 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
481 if (!image_view_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -0600482
483 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600484 image_view_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -0600485 imageViewMap.erase(imageView);
486}
487
488void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
489 if (!buffer) return;
490 auto buffer_state = GetBufferState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -0600491
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600492 buffer_state->Destroy();
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600493 bufferMap.erase(buffer_state->buffer());
locke-lunargd556cc32019-09-17 01:21:23 -0600494}
495
496void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
497 const VkAllocationCallbacks *pAllocator) {
498 if (!bufferView) return;
499 auto buffer_view_state = GetBufferViewState(bufferView);
locke-lunargd556cc32019-09-17 01:21:23 -0600500
501 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600502 buffer_view_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -0600503 bufferViewMap.erase(bufferView);
504}
505
506void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
507 VkDeviceSize size, uint32_t data) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600508 if (disabled[command_buffer_state]) return;
509
locke-lunargd556cc32019-09-17 01:21:23 -0600510 auto cb_node = GetCBState(commandBuffer);
511 auto buffer_state = GetBufferState(dstBuffer);
512 // Update bindings between buffer and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600513 cb_node->AddChild(buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600514}
515
516void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
517 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
518 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600519 if (disabled[command_buffer_state]) return;
520
locke-lunargd556cc32019-09-17 01:21:23 -0600521 auto cb_node = GetCBState(commandBuffer);
522 auto src_image_state = GetImageState(srcImage);
523 auto dst_buffer_state = GetBufferState(dstBuffer);
524
525 // Update bindings between buffer/image and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600526 cb_node->AddChild(src_image_state);
527 cb_node->AddChild(dst_buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600528}
529
Jeff Leger178b1e52020-10-05 12:22:23 -0400530void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
531 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600532 if (disabled[command_buffer_state]) return;
533
Jeff Leger178b1e52020-10-05 12:22:23 -0400534 auto cb_node = GetCBState(commandBuffer);
535 auto src_image_state = GetImageState(pCopyImageToBufferInfo->srcImage);
536 auto dst_buffer_state = GetBufferState(pCopyImageToBufferInfo->dstBuffer);
537
538 // Update bindings between buffer/image and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600539 cb_node->AddChild(src_image_state);
540 cb_node->AddChild(dst_buffer_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400541}
542
locke-lunargd556cc32019-09-17 01:21:23 -0600543void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
544 VkImageLayout dstImageLayout, uint32_t regionCount,
545 const VkBufferImageCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600546 if (disabled[command_buffer_state]) return;
547
locke-lunargd556cc32019-09-17 01:21:23 -0600548 auto cb_node = GetCBState(commandBuffer);
549 auto src_buffer_state = GetBufferState(srcBuffer);
550 auto dst_image_state = GetImageState(dstImage);
551
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600552 cb_node->AddChild(src_buffer_state);
553 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600554}
555
Jeff Leger178b1e52020-10-05 12:22:23 -0400556void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
557 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600558
559 if (disabled[command_buffer_state]) return;
560
Jeff Leger178b1e52020-10-05 12:22:23 -0400561 auto cb_node = GetCBState(commandBuffer);
562 auto src_buffer_state = GetBufferState(pCopyBufferToImageInfo->srcBuffer);
563 auto dst_image_state = GetImageState(pCopyBufferToImageInfo->dstImage);
564
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600565 cb_node->AddChild(src_buffer_state);
566 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400567}
568
Jeremy Gebben159b3cc2021-06-03 09:09:03 -0600569QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
570 auto it = queueMap.find(queue);
571 if (it == queueMap.end()) {
572 return nullptr;
573 }
574 return &it->second;
locke-lunargd556cc32019-09-17 01:21:23 -0600575}
576
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600577const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
578 auto it = queueMap.find(queue);
579 if (it == queueMap.cend()) {
580 return nullptr;
581 }
582 return &it->second;
locke-lunargd556cc32019-09-17 01:21:23 -0600583}
584
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700585void ValidationStateTracker::RemoveAliasingImages(const layer_data::unordered_set<IMAGE_STATE *> &bound_images) {
locke-lunargd556cc32019-09-17 01:21:23 -0600586 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
587 // reference. It doesn't need two ways clear.
John Zulaufd13b38e2021-03-05 08:17:38 -0700588 for (auto *bound_image : bound_images) {
589 if (bound_image) {
590 bound_image->aliasing_images.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600591 }
592 }
593}
594
locke-lunargd556cc32019-09-17 01:21:23 -0600595const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
596 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
597 auto it = phys_dev_map->find(phys);
598 if (it == phys_dev_map->end()) {
599 return nullptr;
600 }
601 return &it->second;
602}
603
604PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
605 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
606 auto it = phys_dev_map->find(phys);
607 if (it == phys_dev_map->end()) {
608 return nullptr;
609 }
610 return &it->second;
611}
612
613PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
614const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
615
616// Return ptr to memory binding for given handle of specified type
617template <typename State, typename Result>
618static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
619 switch (typed_handle.type) {
620 case kVulkanObjectTypeImage:
621 return state->GetImageState(typed_handle.Cast<VkImage>());
622 case kVulkanObjectTypeBuffer:
623 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
624 case kVulkanObjectTypeAccelerationStructureNV:
sourav parmarcd5fb182020-07-17 12:58:44 -0700625 return state->GetAccelerationStructureStateNV(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -0600626 default:
627 break;
628 }
629 return nullptr;
630}
631
632const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
633 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
634}
635
636BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
637 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
638}
639
locke-lunarg540b2252020-08-03 13:23:36 -0600640void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, const VkPipelineBindPoint bind_point,
641 const char *function) {
locke-lunargb8d7a7a2020-10-25 16:01:52 -0600642 const auto lv_bind_point = ConvertToLvlBindPoint(bind_point);
643 auto &state = cb_state->lastBound[lv_bind_point];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700644 PIPELINE_STATE *pipe = state.pipeline_state;
locke-lunargd556cc32019-09-17 01:21:23 -0600645 if (VK_NULL_HANDLE != state.pipeline_layout) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700646 for (const auto &set_binding_pair : pipe->active_slots) {
647 uint32_t set_index = set_binding_pair.first;
locke-lunargd556cc32019-09-17 01:21:23 -0600648 // Pull the set node
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700649 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[set_index].bound_descriptor_set;
locke-lunargd556cc32019-09-17 01:21:23 -0600650
Tony-LunarG77822802020-05-28 16:35:46 -0600651 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
locke-lunargd556cc32019-09-17 01:21:23 -0600652
Tony-LunarG77822802020-05-28 16:35:46 -0600653 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
654 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
655 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700656 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pipe);
Tony-LunarG77822802020-05-28 16:35:46 -0600657
658 if (reduced_map.IsManyDescriptors()) {
659 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700660 descriptor_set->UpdateValidationCache(*cb_state, *pipe, binding_req_map);
Tony-LunarG77822802020-05-28 16:35:46 -0600661 }
662
663 // We can skip updating the state if "nothing" has changed since the last validation.
664 // See CoreChecks::ValidateCmdBufDrawState for more details.
665 bool descriptor_set_changed =
666 !reduced_map.IsManyDescriptors() ||
667 // Update if descriptor set (or contents) has changed
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700668 state.per_set[set_index].validated_set != descriptor_set ||
669 state.per_set[set_index].validated_set_change_count != descriptor_set->GetChangeCount() ||
Tony-LunarG77822802020-05-28 16:35:46 -0600670 (!disabled[image_layout_validation] &&
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700671 state.per_set[set_index].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
Tony-LunarG77822802020-05-28 16:35:46 -0600672 bool need_update = descriptor_set_changed ||
673 // Update if previous bindingReqMap doesn't include new bindingReqMap
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700674 !std::includes(state.per_set[set_index].validated_set_binding_req_map.begin(),
675 state.per_set[set_index].validated_set_binding_req_map.end(), binding_req_map.begin(),
Tony-LunarG77822802020-05-28 16:35:46 -0600676 binding_req_map.end());
677
678 if (need_update) {
679 // Bind this set and its active descriptor resources to the command buffer
680 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
681 // Only record the bindings that haven't already been recorded
682 BindingReqMap delta_reqs;
683 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700684 state.per_set[set_index].validated_set_binding_req_map.begin(),
685 state.per_set[set_index].validated_set_binding_req_map.end(),
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700686 layer_data::insert_iterator<BindingReqMap>(delta_reqs, delta_reqs.begin()));
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700687 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pipe, delta_reqs, function);
Tony-LunarG77822802020-05-28 16:35:46 -0600688 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700689 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pipe, binding_req_map, function);
locke-lunargd556cc32019-09-17 01:21:23 -0600690 }
691
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700692 state.per_set[set_index].validated_set = descriptor_set;
693 state.per_set[set_index].validated_set_change_count = descriptor_set->GetChangeCount();
694 state.per_set[set_index].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
Tony-LunarG77822802020-05-28 16:35:46 -0600695 if (reduced_map.IsManyDescriptors()) {
696 // Check whether old == new before assigning, the equality check is much cheaper than
697 // freeing and reallocating the map.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700698 if (state.per_set[set_index].validated_set_binding_req_map != set_binding_pair.second) {
699 state.per_set[set_index].validated_set_binding_req_map = set_binding_pair.second;
Jeff Bolz56308942019-10-06 22:05:23 -0500700 }
Tony-LunarG77822802020-05-28 16:35:46 -0600701 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700702 state.per_set[set_index].validated_set_binding_req_map = BindingReqMap();
locke-lunargd556cc32019-09-17 01:21:23 -0600703 }
704 }
705 }
706 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700707 if (!pipe->vertex_binding_descriptions_.empty()) {
locke-lunargd556cc32019-09-17 01:21:23 -0600708 cb_state->vertex_buffer_used = true;
709 }
710}
711
712// Remove set from setMap and delete the set
713void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500714 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600715 descriptor_set->Destroy();
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500716
locke-lunargd556cc32019-09-17 01:21:23 -0600717 setMap.erase(descriptor_set->GetSet());
718}
719
720// Free all DS Pools including their Sets & related sub-structs
721// NOTE : Calls to this function should be wrapped in mutex
722void ValidationStateTracker::DeleteDescriptorSetPools() {
723 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
724 // Remove this pools' sets from setMap and delete them
John Zulauf79f06582021-02-27 18:38:39 -0700725 for (auto *ds : ii->second->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -0600726 FreeDescriptorSet(ds);
727 }
728 ii->second->sets.clear();
729 ii = descriptorPoolMap.erase(ii);
730 }
731}
732
733// For given object struct return a ptr of BASE_NODE type for its wrapping struct
734BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500735 if (object_struct.node) {
736#ifdef _DEBUG
737 // assert that lookup would find the same object
738 VulkanTypedHandle other = object_struct;
739 other.node = nullptr;
740 assert(object_struct.node == GetStateStructPtrFromObject(other));
741#endif
742 return object_struct.node;
743 }
locke-lunargd556cc32019-09-17 01:21:23 -0600744 BASE_NODE *base_ptr = nullptr;
745 switch (object_struct.type) {
746 case kVulkanObjectTypeDescriptorSet: {
747 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
748 break;
749 }
750 case kVulkanObjectTypeSampler: {
751 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
752 break;
753 }
754 case kVulkanObjectTypeQueryPool: {
755 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
756 break;
757 }
758 case kVulkanObjectTypePipeline: {
759 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
760 break;
761 }
762 case kVulkanObjectTypeBuffer: {
763 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
764 break;
765 }
766 case kVulkanObjectTypeBufferView: {
767 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
768 break;
769 }
770 case kVulkanObjectTypeImage: {
771 base_ptr = GetImageState(object_struct.Cast<VkImage>());
772 break;
773 }
774 case kVulkanObjectTypeImageView: {
775 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
776 break;
777 }
778 case kVulkanObjectTypeEvent: {
779 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
780 break;
781 }
782 case kVulkanObjectTypeDescriptorPool: {
783 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
784 break;
785 }
786 case kVulkanObjectTypeCommandPool: {
787 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
788 break;
789 }
790 case kVulkanObjectTypeFramebuffer: {
791 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
792 break;
793 }
794 case kVulkanObjectTypeRenderPass: {
795 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
796 break;
797 }
798 case kVulkanObjectTypeDeviceMemory: {
799 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
800 break;
801 }
802 case kVulkanObjectTypeAccelerationStructureNV: {
sourav parmarcd5fb182020-07-17 12:58:44 -0700803 base_ptr = GetAccelerationStructureStateNV(object_struct.Cast<VkAccelerationStructureNV>());
804 break;
805 }
806 case kVulkanObjectTypeAccelerationStructureKHR: {
807 base_ptr = GetAccelerationStructureStateKHR(object_struct.Cast<VkAccelerationStructureKHR>());
locke-lunargd556cc32019-09-17 01:21:23 -0600808 break;
809 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500810 case kVulkanObjectTypeUnknown:
811 // This can happen if an element of the object_bindings vector has been
812 // zeroed out, after an object is destroyed.
813 break;
locke-lunargd556cc32019-09-17 01:21:23 -0600814 default:
815 // TODO : Any other objects to be handled here?
816 assert(0);
817 break;
818 }
819 return base_ptr;
820}
821
sfricke-samsungbf1a2ed2020-06-14 23:31:00 -0700822// Gets union of all features defined by Potential Format Features
823// except, does not handle the external format case for AHB as that only can be used for sampled images
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700824VkFormatFeatureFlags ValidationStateTracker::GetPotentialFormatFeatures(VkFormat format) const {
825 VkFormatFeatureFlags format_features = 0;
826
827 if (format != VK_FORMAT_UNDEFINED) {
828 VkFormatProperties format_properties;
829 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
830 format_features |= format_properties.linearTilingFeatures;
831 format_features |= format_properties.optimalTilingFeatures;
832 if (device_extensions.vk_ext_image_drm_format_modifier) {
833 // VK_KHR_get_physical_device_properties2 is required in this case
834 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2};
835 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
836 nullptr};
837 format_properties_2.pNext = (void *)&drm_properties_list;
Marc Alcala Prieto773871c2021-02-04 19:24:43 +0100838
839 // First call is to get the number of modifiers compatible with the queried format
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700840 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
Marc Alcala Prieto773871c2021-02-04 19:24:43 +0100841
842 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
843 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
844 drm_properties_list.pDrmFormatModifierProperties = drm_properties.data();
845
846 // Second call, now with an allocated array in pDrmFormatModifierProperties, is to get the modifiers
847 // compatible with the queried format
848 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
849
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700850 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
851 format_features |= drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
852 }
853 }
854 }
855
856 return format_features;
857}
858
locke-lunargd556cc32019-09-17 01:21:23 -0600859// Reset the command buffer state
860// Maintain the createInfo and set state to CB_NEW, but clear all other state
861void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700862 CMD_BUFFER_STATE *cb_state = GetCBState(cb);
863 if (cb_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600864 cb_state->Reset();
locke-lunargd556cc32019-09-17 01:21:23 -0600865 // Clean up the label data
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600866 ResetCmdDebugUtilsLabel(report_data, cb_state->commandBuffer());
locke-lunargd556cc32019-09-17 01:21:23 -0600867 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600868
locke-lunargd556cc32019-09-17 01:21:23 -0600869 if (command_buffer_reset_callback) {
870 (*command_buffer_reset_callback)(cb);
871 }
872}
873
874void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
875 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
876 VkResult result) {
877 if (VK_SUCCESS != result) return;
878
Locke Linf3873542021-04-26 11:25:10 -0600879 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
880 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
881 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
882
locke-lunargd556cc32019-09-17 01:21:23 -0600883 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
884 if (nullptr == enabled_features_found) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700885 const auto *features2 = LvlFindInChain<VkPhysicalDeviceFeatures2>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600886 if (features2) {
887 enabled_features_found = &(features2->features);
Locke Linf3873542021-04-26 11:25:10 -0600888
889 const auto *provoking_vertex_features = lvl_find_in_chain<VkPhysicalDeviceProvokingVertexFeaturesEXT>(features2->pNext);
890 if (provoking_vertex_features) {
891 state_tracker->enabled_features.provoking_vertex_features = *provoking_vertex_features;
892 }
locke-lunargd556cc32019-09-17 01:21:23 -0600893 }
894 }
895
locke-lunargd556cc32019-09-17 01:21:23 -0600896 if (nullptr == enabled_features_found) {
897 state_tracker->enabled_features.core = {};
898 } else {
899 state_tracker->enabled_features.core = *enabled_features_found;
900 }
901
902 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
903 // previously set them through an explicit API call.
904 uint32_t count;
905 auto pd_state = GetPhysicalDeviceState(gpu);
906 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
907 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
908 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
909 // Save local link to this device's physical device state
910 state_tracker->physical_device_state = pd_state;
911
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700912 const auto *vulkan_12_features = LvlFindInChain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700913 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700914 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700915 } else {
sfricke-samsung27c70722020-05-02 08:42:39 -0700916 // Set Extension Feature Aliases to false as there is no struct to check
917 state_tracker->enabled_features.core12.drawIndirectCount = VK_FALSE;
918 state_tracker->enabled_features.core12.samplerMirrorClampToEdge = VK_FALSE;
919 state_tracker->enabled_features.core12.descriptorIndexing = VK_FALSE;
920 state_tracker->enabled_features.core12.samplerFilterMinmax = VK_FALSE;
921 state_tracker->enabled_features.core12.shaderOutputLayer = VK_FALSE;
922 state_tracker->enabled_features.core12.shaderOutputViewportIndex = VK_FALSE;
sfricke-samsunga4143ac2020-12-18 00:00:53 -0800923 state_tracker->enabled_features.core12.subgroupBroadcastDynamicId = VK_FALSE;
sfricke-samsung27c70722020-05-02 08:42:39 -0700924
925 // These structs are only allowed in pNext chain if there is no VkPhysicalDeviceVulkan12Features
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700926
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700927 const auto *eight_bit_storage_features = LvlFindInChain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700928 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700929 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
930 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
931 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
932 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700933 }
934
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700935 const auto *float16_int8_features = LvlFindInChain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700936 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700937 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
938 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700939 }
940
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700941 const auto *descriptor_indexing_features = LvlFindInChain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700942 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700943 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
944 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
945 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
946 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
947 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
948 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
949 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
950 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
951 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
952 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
953 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
954 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
955 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
956 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
957 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
958 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
959 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
960 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
961 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
962 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
963 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
964 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
965 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
966 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
967 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
968 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
969 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
970 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
971 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
972 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
973 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
974 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
975 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
976 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
977 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
978 descriptor_indexing_features->descriptorBindingPartiallyBound;
979 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
980 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
981 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700982 }
983
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700984 const auto *scalar_block_layout_features = LvlFindInChain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700985 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700986 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700987 }
988
989 const auto *imageless_framebuffer_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700990 LvlFindInChain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700991 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700992 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700993 }
994
995 const auto *uniform_buffer_standard_layout_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700996 LvlFindInChain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700997 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700998 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
999 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001000 }
1001
1002 const auto *subgroup_extended_types_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001003 LvlFindInChain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001004 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001005 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
1006 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001007 }
1008
1009 const auto *separate_depth_stencil_layouts_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001010 LvlFindInChain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001011 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001012 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
1013 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001014 }
1015
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001016 const auto *host_query_reset_features = LvlFindInChain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001017 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001018 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001019 }
1020
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001021 const auto *timeline_semaphore_features = LvlFindInChain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001022 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001023 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001024 }
1025
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001026 const auto *buffer_device_address = LvlFindInChain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001027 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001028 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
1029 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1030 buffer_device_address->bufferDeviceAddressCaptureReplay;
1031 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1032 buffer_device_address->bufferDeviceAddressMultiDevice;
1033 }
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001034
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001035 const auto *atomic_int64_features = LvlFindInChain<VkPhysicalDeviceShaderAtomicInt64Features>(pCreateInfo->pNext);
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001036 if (atomic_int64_features) {
1037 state_tracker->enabled_features.core12.shaderBufferInt64Atomics = atomic_int64_features->shaderBufferInt64Atomics;
1038 state_tracker->enabled_features.core12.shaderSharedInt64Atomics = atomic_int64_features->shaderSharedInt64Atomics;
1039 }
1040
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001041 const auto *memory_model_features = LvlFindInChain<VkPhysicalDeviceVulkanMemoryModelFeatures>(pCreateInfo->pNext);
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001042 if (memory_model_features) {
1043 state_tracker->enabled_features.core12.vulkanMemoryModel = memory_model_features->vulkanMemoryModel;
1044 state_tracker->enabled_features.core12.vulkanMemoryModelDeviceScope =
1045 memory_model_features->vulkanMemoryModelDeviceScope;
1046 state_tracker->enabled_features.core12.vulkanMemoryModelAvailabilityVisibilityChains =
1047 memory_model_features->vulkanMemoryModelAvailabilityVisibilityChains;
1048 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001049 }
1050
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001051 const auto *vulkan_11_features = LvlFindInChain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001052 if (vulkan_11_features) {
1053 state_tracker->enabled_features.core11 = *vulkan_11_features;
1054 } else {
1055 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
1056
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001057 const auto *sixteen_bit_storage_features = LvlFindInChain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001058 if (sixteen_bit_storage_features) {
1059 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
1060 sixteen_bit_storage_features->storageBuffer16BitAccess;
1061 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
1062 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
1063 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
1064 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
1065 }
1066
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001067 const auto *multiview_features = LvlFindInChain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001068 if (multiview_features) {
1069 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1070 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1071 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1072 }
1073
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001074 const auto *variable_pointers_features = LvlFindInChain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001075 if (variable_pointers_features) {
1076 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1077 variable_pointers_features->variablePointersStorageBuffer;
1078 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1079 }
1080
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001081 const auto *protected_memory_features = LvlFindInChain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001082 if (protected_memory_features) {
1083 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1084 }
1085
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001086 const auto *ycbcr_conversion_features = LvlFindInChain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001087 if (ycbcr_conversion_features) {
1088 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1089 }
1090
1091 const auto *shader_draw_parameters_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001092 LvlFindInChain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001093 if (shader_draw_parameters_features) {
1094 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001095 }
1096 }
1097
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001098 const auto *device_group_ci = LvlFindInChain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
Tony-LunarGca4891a2020-08-10 15:46:46 -06001099 if (device_group_ci) {
1100 state_tracker->physical_device_count = device_group_ci->physicalDeviceCount;
1101 state_tracker->device_group_create_info = *device_group_ci;
1102 } else {
1103 state_tracker->physical_device_count = 1;
1104 }
locke-lunargd556cc32019-09-17 01:21:23 -06001105
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001106 const auto *exclusive_scissor_features = LvlFindInChain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001107 if (exclusive_scissor_features) {
1108 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1109 }
1110
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001111 const auto *shading_rate_image_features = LvlFindInChain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001112 if (shading_rate_image_features) {
1113 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1114 }
1115
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001116 const auto *mesh_shader_features = LvlFindInChain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001117 if (mesh_shader_features) {
1118 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1119 }
1120
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001121 const auto *inline_uniform_block_features = LvlFindInChain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001122 if (inline_uniform_block_features) {
1123 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1124 }
1125
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001126 const auto *transform_feedback_features = LvlFindInChain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001127 if (transform_feedback_features) {
1128 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1129 }
1130
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001131 const auto *vtx_attrib_div_features = LvlFindInChain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001132 if (vtx_attrib_div_features) {
1133 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1134 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001135
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001136 const auto *buffer_device_address_ext = LvlFindInChain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001137 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001138 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001139 }
1140
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001141 const auto *cooperative_matrix_features = LvlFindInChain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001142 if (cooperative_matrix_features) {
1143 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1144 }
1145
locke-lunargd556cc32019-09-17 01:21:23 -06001146 const auto *compute_shader_derivatives_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001147 LvlFindInChain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001148 if (compute_shader_derivatives_features) {
1149 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1150 }
1151
1152 const auto *fragment_shader_barycentric_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001153 LvlFindInChain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001154 if (fragment_shader_barycentric_features) {
1155 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1156 }
1157
1158 const auto *shader_image_footprint_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001159 LvlFindInChain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001160 if (shader_image_footprint_features) {
1161 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1162 }
1163
1164 const auto *fragment_shader_interlock_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001165 LvlFindInChain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001166 if (fragment_shader_interlock_features) {
1167 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1168 }
1169
1170 const auto *demote_to_helper_invocation_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001171 LvlFindInChain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001172 if (demote_to_helper_invocation_features) {
1173 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1174 }
1175
1176 const auto *texel_buffer_alignment_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001177 LvlFindInChain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001178 if (texel_buffer_alignment_features) {
1179 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1180 }
1181
locke-lunargd556cc32019-09-17 01:21:23 -06001182 const auto *pipeline_exe_props_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001183 LvlFindInChain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001184 if (pipeline_exe_props_features) {
1185 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1186 }
1187
Jeff Bolz82f854d2019-09-17 14:56:47 -05001188 const auto *dedicated_allocation_image_aliasing_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001189 LvlFindInChain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
Jeff Bolz82f854d2019-09-17 14:56:47 -05001190 if (dedicated_allocation_image_aliasing_features) {
1191 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1192 *dedicated_allocation_image_aliasing_features;
1193 }
1194
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001195 const auto *performance_query_features = LvlFindInChain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001196 if (performance_query_features) {
1197 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1198 }
1199
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001200 const auto *device_coherent_memory_features = LvlFindInChain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
Tobias Hector782bcde2019-11-28 16:19:42 +00001201 if (device_coherent_memory_features) {
1202 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1203 }
1204
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001205 const auto *ycbcr_image_array_features = LvlFindInChain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsungcead0802020-01-30 22:20:10 -08001206 if (ycbcr_image_array_features) {
1207 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1208 }
1209
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001210 const auto *ray_query_features = LvlFindInChain<VkPhysicalDeviceRayQueryFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001211 if (ray_query_features) {
1212 state_tracker->enabled_features.ray_query_features = *ray_query_features;
1213 }
1214
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001215 const auto *ray_tracing_pipeline_features = LvlFindInChain<VkPhysicalDeviceRayTracingPipelineFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001216 if (ray_tracing_pipeline_features) {
1217 state_tracker->enabled_features.ray_tracing_pipeline_features = *ray_tracing_pipeline_features;
1218 }
1219
1220 const auto *ray_tracing_acceleration_structure_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001221 LvlFindInChain<VkPhysicalDeviceAccelerationStructureFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001222 if (ray_tracing_acceleration_structure_features) {
1223 state_tracker->enabled_features.ray_tracing_acceleration_structure_features = *ray_tracing_acceleration_structure_features;
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001224 }
1225
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001226 const auto *robustness2_features = LvlFindInChain<VkPhysicalDeviceRobustness2FeaturesEXT>(pCreateInfo->pNext);
Jeff Bolz165818a2020-05-08 11:19:03 -05001227 if (robustness2_features) {
1228 state_tracker->enabled_features.robustness2_features = *robustness2_features;
1229 }
1230
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001231 const auto *fragment_density_map_features = LvlFindInChain<VkPhysicalDeviceFragmentDensityMapFeaturesEXT>(pCreateInfo->pNext);
janharaldfredriksen-arm3b793772020-05-12 18:55:53 +02001232 if (fragment_density_map_features) {
1233 state_tracker->enabled_features.fragment_density_map_features = *fragment_density_map_features;
1234 }
1235
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001236 const auto *fragment_density_map_features2 = LvlFindInChain<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT>(pCreateInfo->pNext);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001237 if (fragment_density_map_features2) {
1238 state_tracker->enabled_features.fragment_density_map2_features = *fragment_density_map_features2;
1239 }
1240
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001241 const auto *astc_decode_features = LvlFindInChain<VkPhysicalDeviceASTCDecodeFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0c4a06f2020-06-27 01:24:32 -07001242 if (astc_decode_features) {
1243 state_tracker->enabled_features.astc_decode_features = *astc_decode_features;
1244 }
1245
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001246 const auto *custom_border_color_features = LvlFindInChain<VkPhysicalDeviceCustomBorderColorFeaturesEXT>(pCreateInfo->pNext);
Tony-LunarG7337b312020-04-15 16:40:25 -06001247 if (custom_border_color_features) {
1248 state_tracker->enabled_features.custom_border_color_features = *custom_border_color_features;
1249 }
1250
sfricke-samsungfd661d62020-05-16 00:57:27 -07001251 const auto *pipeline_creation_cache_control_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001252 LvlFindInChain<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsungfd661d62020-05-16 00:57:27 -07001253 if (pipeline_creation_cache_control_features) {
1254 state_tracker->enabled_features.pipeline_creation_cache_control_features = *pipeline_creation_cache_control_features;
1255 }
1256
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001257 const auto *fragment_shading_rate_features = LvlFindInChain<VkPhysicalDeviceFragmentShadingRateFeaturesKHR>(pCreateInfo->pNext);
Tobias Hector6663c9b2020-11-05 10:18:02 +00001258 if (fragment_shading_rate_features) {
1259 state_tracker->enabled_features.fragment_shading_rate_features = *fragment_shading_rate_features;
1260 }
1261
Piers Daniell39842ee2020-07-10 16:42:33 -06001262 const auto *extended_dynamic_state_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001263 LvlFindInChain<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT>(pCreateInfo->pNext);
Piers Daniell39842ee2020-07-10 16:42:33 -06001264 if (extended_dynamic_state_features) {
1265 state_tracker->enabled_features.extended_dynamic_state_features = *extended_dynamic_state_features;
1266 }
1267
Vikram Kushwahaa57b0c32021-04-19 12:21:46 -07001268 const auto *extended_dynamic_state2_features =
1269 LvlFindInChain<VkPhysicalDeviceExtendedDynamicState2FeaturesEXT>(pCreateInfo->pNext);
1270 if (extended_dynamic_state2_features) {
1271 state_tracker->enabled_features.extended_dynamic_state2_features = *extended_dynamic_state2_features;
1272 }
1273
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001274 const auto *multiview_features = LvlFindInChain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
locke-lunarg3fa463a2020-10-23 16:39:04 -06001275 if (multiview_features) {
1276 state_tracker->enabled_features.multiview_features = *multiview_features;
1277 }
1278
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001279 const auto *portability_features = LvlFindInChain<VkPhysicalDevicePortabilitySubsetFeaturesKHR>(pCreateInfo->pNext);
Nathaniel Cesariob3f2d702020-11-09 09:20:49 -07001280 if (portability_features) {
1281 state_tracker->enabled_features.portability_subset_features = *portability_features;
1282 }
1283
sfricke-samsung0065ce02020-12-03 22:46:37 -08001284 const auto *shader_integer_functions2_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001285 LvlFindInChain<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001286 if (shader_integer_functions2_features) {
1287 state_tracker->enabled_features.shader_integer_functions2_features = *shader_integer_functions2_features;
1288 }
1289
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001290 const auto *shader_sm_builtins_feature = LvlFindInChain<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001291 if (shader_sm_builtins_feature) {
1292 state_tracker->enabled_features.shader_sm_builtins_feature = *shader_sm_builtins_feature;
1293 }
1294
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001295 const auto *shader_atomic_float_feature = LvlFindInChain<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001296 if (shader_atomic_float_feature) {
1297 state_tracker->enabled_features.shader_atomic_float_feature = *shader_atomic_float_feature;
1298 }
1299
1300 const auto *shader_image_atomic_int64_feature =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001301 LvlFindInChain<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001302 if (shader_image_atomic_int64_feature) {
1303 state_tracker->enabled_features.shader_image_atomic_int64_feature = *shader_image_atomic_int64_feature;
1304 }
1305
sfricke-samsung486a51e2021-01-02 00:10:15 -08001306 const auto *shader_clock_feature = LvlFindInChain<VkPhysicalDeviceShaderClockFeaturesKHR>(pCreateInfo->pNext);
1307 if (shader_clock_feature) {
1308 state_tracker->enabled_features.shader_clock_feature = *shader_clock_feature;
1309 }
1310
Jeremy Gebben5f585ae2021-02-02 09:03:06 -07001311 const auto *conditional_rendering_features =
1312 LvlFindInChain<VkPhysicalDeviceConditionalRenderingFeaturesEXT>(pCreateInfo->pNext);
1313 if (conditional_rendering_features) {
1314 state_tracker->enabled_features.conditional_rendering = *conditional_rendering_features;
1315 }
1316
Shannon McPhersondb287d42021-02-02 15:27:32 -07001317 const auto *workgroup_memory_explicit_layout_features =
1318 LvlFindInChain<VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>(pCreateInfo->pNext);
1319 if (workgroup_memory_explicit_layout_features) {
1320 state_tracker->enabled_features.workgroup_memory_explicit_layout_features = *workgroup_memory_explicit_layout_features;
1321 }
1322
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001323 const auto *synchronization2_features =
1324 LvlFindInChain<VkPhysicalDeviceSynchronization2FeaturesKHR>(pCreateInfo->pNext);
1325 if (synchronization2_features) {
1326 state_tracker->enabled_features.synchronization2_features = *synchronization2_features;
1327 }
1328
Locke Linf3873542021-04-26 11:25:10 -06001329 const auto *provoking_vertex_features = lvl_find_in_chain<VkPhysicalDeviceProvokingVertexFeaturesEXT>(pCreateInfo->pNext);
1330 if (provoking_vertex_features) {
1331 state_tracker->enabled_features.provoking_vertex_features = *provoking_vertex_features;
1332 }
1333
Piers Daniellcb6d8032021-04-19 18:51:26 -06001334 const auto *vertex_input_dynamic_state_features =
1335 LvlFindInChain<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT>(pCreateInfo->pNext);
1336 if (vertex_input_dynamic_state_features) {
1337 state_tracker->enabled_features.vertex_input_dynamic_state_features = *vertex_input_dynamic_state_features;
1338 }
1339
David Zhao Akeley44139b12021-04-26 16:16:13 -07001340 const auto *inherited_viewport_scissor_features =
1341 LvlFindInChain<VkPhysicalDeviceInheritedViewportScissorFeaturesNV>(pCreateInfo->pNext);
1342 if (inherited_viewport_scissor_features) {
1343 state_tracker->enabled_features.inherited_viewport_scissor_features = *inherited_viewport_scissor_features;
1344 }
1345
Tony-LunarG4490de42021-06-21 15:49:19 -06001346 const auto *multi_draw_features = LvlFindInChain<VkPhysicalDeviceMultiDrawFeaturesEXT>(pCreateInfo->pNext);
1347 if (multi_draw_features) {
1348 state_tracker->enabled_features.multi_draw_features = *multi_draw_features;
1349 }
1350
locke-lunargd556cc32019-09-17 01:21:23 -06001351 // Store physical device properties and physical device mem limits into CoreChecks structs
1352 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1353 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001354 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1355 &state_tracker->phys_dev_props_core11);
1356 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1357 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001358
1359 const auto &dev_ext = state_tracker->device_extensions;
1360 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1361
1362 if (dev_ext.vk_khr_push_descriptor) {
1363 // Get the needed push_descriptor limits
1364 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1365 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1366 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1367 }
1368
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001369 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001370 VkPhysicalDeviceDescriptorIndexingProperties descriptor_indexing_prop;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001371 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1372 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1373 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1374 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1375 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1376 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1377 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1378 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1379 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1380 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1381 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1382 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1383 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1384 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1385 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1386 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1387 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1388 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1389 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1390 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1391 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1392 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1393 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1394 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1395 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1396 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1397 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
1398 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1399 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
1400 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
1401 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
1402 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
1403 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
1404 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
1405 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1406 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1407 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
1408 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
1409 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1410 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
1411 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
1412 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
1413 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
1414 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
1415 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
1416 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
1417 }
1418
locke-lunargd556cc32019-09-17 01:21:23 -06001419 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1420 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1421 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1422 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001423
1424 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001425 VkPhysicalDeviceDepthStencilResolveProperties depth_stencil_resolve_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001426 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
1427 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
1428 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
1429 depth_stencil_resolve_props.supportedStencilResolveModes;
1430 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
1431 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
1432 }
1433
locke-lunargd556cc32019-09-17 01:21:23 -06001434 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001435 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
sourav parmarcd5fb182020-07-17 12:58:44 -07001436 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing_pipeline, &phys_dev_props->ray_tracing_propsKHR);
1437 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_acceleration_structure, &phys_dev_props->acc_structure_props);
locke-lunargd556cc32019-09-17 01:21:23 -06001438 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1439 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001440 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map_2, &phys_dev_props->fragment_density_map2_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001441 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
sfricke-samsung8f658d42020-05-03 20:12:24 -07001442 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_sample_locations, &phys_dev_props->sample_locations_props);
Tony-LunarG7337b312020-04-15 16:40:25 -06001443 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_custom_border_color, &phys_dev_props->custom_border_color_props);
locke-lunarg3fa463a2020-10-23 16:39:04 -06001444 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_multiview, &phys_dev_props->multiview_props);
Nathaniel Cesario3291c912020-11-17 16:54:41 -07001445 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_portability_subset, &phys_dev_props->portability_props);
Locke Lin016d8482021-05-27 12:11:31 -06001446 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_provoking_vertex, &phys_dev_props->provoking_vertex_props);
Tony-LunarG4490de42021-06-21 15:49:19 -06001447 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_multi_draw, &phys_dev_props->multi_draw_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001448
1449 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001450 VkPhysicalDeviceTimelineSemaphoreProperties timeline_semaphore_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001451 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
1452 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
1453 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
1454 }
1455
1456 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001457 VkPhysicalDeviceFloatControlsProperties float_controls_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001458 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
1459 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
1460 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
1461 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
1462 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
1463 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
1464 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
1465 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
1466 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
1467 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
1468 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
1469 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
1470 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
1471 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
1472 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
1473 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
1474 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
1475 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
1476 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
1477 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
1478 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
1479 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07001480
locke-lunargd556cc32019-09-17 01:21:23 -06001481 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1482 // Get the needed cooperative_matrix properties
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07001483 auto cooperative_matrix_props = LvlInitStruct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1484 auto prop2 = LvlInitStruct<VkPhysicalDeviceProperties2>(&cooperative_matrix_props);
locke-lunargd556cc32019-09-17 01:21:23 -06001485 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1486 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1487
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001488 uint32_t num_cooperative_matrix_properties = 0;
1489 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &num_cooperative_matrix_properties, NULL);
1490 state_tracker->cooperative_matrix_properties.resize(num_cooperative_matrix_properties,
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07001491 LvlInitStruct<VkCooperativeMatrixPropertiesNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06001492
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001493 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &num_cooperative_matrix_properties,
locke-lunargd556cc32019-09-17 01:21:23 -06001494 state_tracker->cooperative_matrix_properties.data());
1495 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001496 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06001497 // Get the needed subgroup limits
Locke Lin016d8482021-05-27 12:11:31 -06001498 auto subgroup_prop = LvlInitStruct<VkPhysicalDeviceSubgroupProperties>();
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07001499 auto prop2 = LvlInitStruct<VkPhysicalDeviceProperties2>(&subgroup_prop);
locke-lunargd556cc32019-09-17 01:21:23 -06001500 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1501
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001502 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
1503 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
1504 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
1505 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06001506 }
1507
Tobias Hector6663c9b2020-11-05 10:18:02 +00001508 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_fragment_shading_rate, &phys_dev_props->fragment_shading_rate_props);
1509
locke-lunargd556cc32019-09-17 01:21:23 -06001510 // Store queue family data
1511 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1512 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001513 const VkDeviceQueueCreateInfo &queue_create_info = pCreateInfo->pQueueCreateInfos[i];
sfricke-samsungb585ec12021-05-06 03:10:13 -07001514 state_tracker->queue_family_index_set.insert(queue_create_info.queueFamilyIndex);
1515 state_tracker->device_queue_info_list.push_back(
1516 {i, queue_create_info.queueFamilyIndex, queue_create_info.flags, queue_create_info.queueCount});
locke-lunargd556cc32019-09-17 01:21:23 -06001517 }
1518 }
1519}
1520
1521void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1522 if (!device) return;
1523
locke-lunargd556cc32019-09-17 01:21:23 -06001524 // Reset all command buffers before destroying them, to unlink object_bindings.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001525 for (auto &command_buffer : commandBufferMap) {
1526 ResetCommandBufferState(command_buffer.first);
locke-lunargd556cc32019-09-17 01:21:23 -06001527 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001528 pipelineMap.clear();
1529 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001530 commandBufferMap.clear();
1531
1532 // This will also delete all sets in the pool & remove them from setMap
1533 DeleteDescriptorSetPools();
1534 // All sets should be removed
1535 assert(setMap.empty());
1536 descriptorSetLayoutMap.clear();
1537 imageViewMap.clear();
1538 imageMap.clear();
1539 bufferViewMap.clear();
1540 bufferMap.clear();
1541 // Queues persist until device is destroyed
1542 queueMap.clear();
1543}
1544
locke-lunargd556cc32019-09-17 01:21:23 -06001545// Track which resources are in-flight by atomically incrementing their "in_use" count
1546void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1547 cb_node->submitCount++;
locke-lunargd556cc32019-09-17 01:21:23 -06001548
locke-lunargd556cc32019-09-17 01:21:23 -06001549 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1550 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1551 // should then be flagged prior to calling this function
1552 for (auto event : cb_node->writeEventsBeforeWait) {
1553 auto event_state = GetEventState(event);
1554 if (event_state) event_state->write_in_use++;
1555 }
1556}
1557
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001558void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
Jeremy Gebbencbf22862021-03-03 12:01:22 -07001559 layer_data::unordered_map<VkQueue, uint64_t> other_queue_seqs;
1560 layer_data::unordered_map<VkSemaphore, uint64_t> timeline_semaphore_counters;
locke-lunargd556cc32019-09-17 01:21:23 -06001561
1562 // Roll this queue forward, one submission at a time.
1563 while (pQueue->seq < seq) {
1564 auto &submission = pQueue->submissions.front();
1565
1566 for (auto &wait : submission.waitSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001567 auto semaphore_state = GetSemaphoreState(wait.semaphore);
1568 if (semaphore_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001569 semaphore_state->EndUse();
locke-lunargd556cc32019-09-17 01:21:23 -06001570 }
Mike Schuchardt2df08912020-12-15 16:28:09 -08001571 if (wait.type == VK_SEMAPHORE_TYPE_TIMELINE) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001572 auto &last_counter = timeline_semaphore_counters[wait.semaphore];
1573 last_counter = std::max(last_counter, wait.payload);
Marshall Drew-Brook03847582020-11-06 15:10:45 -08001574 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001575 auto &last_seq = other_queue_seqs[wait.queue];
1576 last_seq = std::max(last_seq, wait.seq);
Marshall Drew-Brook03847582020-11-06 15:10:45 -08001577 }
locke-lunargd556cc32019-09-17 01:21:23 -06001578 }
1579
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001580 for (auto &signal : submission.signalSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001581 auto semaphore_state = GetSemaphoreState(signal.semaphore);
1582 if (semaphore_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001583 semaphore_state->EndUse();
Mike Schuchardt2df08912020-12-15 16:28:09 -08001584 if (semaphore_state->type == VK_SEMAPHORE_TYPE_TIMELINE && semaphore_state->payload < signal.payload) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001585 semaphore_state->payload = signal.payload;
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001586 }
locke-lunargd556cc32019-09-17 01:21:23 -06001587 }
1588 }
1589
1590 for (auto &semaphore : submission.externalSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001591 auto semaphore_state = GetSemaphoreState(semaphore);
1592 if (semaphore_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001593 semaphore_state->EndUse();
locke-lunargd556cc32019-09-17 01:21:23 -06001594 }
1595 }
1596
1597 for (auto cb : submission.cbs) {
1598 auto cb_node = GetCBState(cb);
1599 if (!cb_node) {
1600 continue;
1601 }
1602 // First perform decrement on general case bound objects
locke-lunargd556cc32019-09-17 01:21:23 -06001603 for (auto event : cb_node->writeEventsBeforeWait) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001604 auto event_node = eventMap.find(event);
1605 if (event_node != eventMap.end()) {
John Zulauf48057322020-12-02 11:59:31 -07001606 event_node->second->write_in_use--;
locke-lunargd556cc32019-09-17 01:21:23 -06001607 }
1608 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001609 QueryMap local_query_to_state_map;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001610 VkQueryPool first_pool = VK_NULL_HANDLE;
Jeff Bolz310775c2019-10-09 00:46:33 -05001611 for (auto &function : cb_node->queryUpdates) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001612 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &local_query_to_state_map);
Jeff Bolz310775c2019-10-09 00:46:33 -05001613 }
1614
John Zulauf79f06582021-02-27 18:38:39 -07001615 for (const auto &query_state_pair : local_query_to_state_map) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001616 if (query_state_pair.second == QUERYSTATE_ENDED) {
1617 queryToStateMap[query_state_pair.first] = QUERYSTATE_AVAILABLE;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001618 }
locke-lunargd556cc32019-09-17 01:21:23 -06001619 }
Jeremy Gebben5570abe2021-05-16 18:35:13 -06001620 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
1621 cb_node->EndUse();
1622 }
locke-lunargd556cc32019-09-17 01:21:23 -06001623 }
1624
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001625 auto fence_state = GetFenceState(submission.fence);
1626 if (fence_state && fence_state->scope == kSyncScopeInternal) {
1627 fence_state->state = FENCE_RETIRED;
locke-lunargd556cc32019-09-17 01:21:23 -06001628 }
1629
1630 pQueue->submissions.pop_front();
1631 pQueue->seq++;
1632 }
1633
1634 // Roll other queues forward to the highest seq we saw a wait for
John Zulauf79f06582021-02-27 18:38:39 -07001635 for (const auto &qs : other_queue_seqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001636 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001637 }
John Zulauf79f06582021-02-27 18:38:39 -07001638 for (const auto &sc : timeline_semaphore_counters) {
Marshall Drew-Brook03847582020-11-06 15:10:45 -08001639 RetireTimelineSemaphore(sc.first, sc.second);
1640 }
locke-lunargd556cc32019-09-17 01:21:23 -06001641}
1642
1643// Submit a fence to a queue, delimiting previous fences and previous untracked
1644// work by it.
1645static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1646 pFence->state = FENCE_INFLIGHT;
1647 pFence->signaler.first = pQueue->queue;
1648 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1649}
1650
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001651uint64_t ValidationStateTracker::RecordSubmitFence(QUEUE_STATE *queue_state, VkFence fence, uint32_t submit_count) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001652 auto fence_state = GetFenceState(fence);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001653 uint64_t early_retire_seq = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001654 if (fence_state) {
1655 if (fence_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06001656 // Mark fence in use
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001657 SubmitFence(queue_state, fence_state, std::max(1u, submit_count));
1658 if (!submit_count) {
locke-lunargd556cc32019-09-17 01:21:23 -06001659 // If no submissions, but just dropping a fence on the end of the queue,
1660 // record an empty submission with just the fence, so we can determine
1661 // its completion.
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001662 CB_SUBMISSION submission;
1663 submission.fence = fence;
1664 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06001665 }
1666 } else {
1667 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001668 early_retire_seq = queue_state->seq + queue_state->submissions.size();
locke-lunargd556cc32019-09-17 01:21:23 -06001669 }
1670 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001671 return early_retire_seq;
1672}
1673
1674void ValidationStateTracker::RecordSubmitCommandBuffer(CB_SUBMISSION &submission, VkCommandBuffer command_buffer) {
1675 auto cb_node = GetCBState(command_buffer);
1676 if (cb_node) {
1677 submission.cbs.push_back(command_buffer);
John Zulauf79f06582021-02-27 18:38:39 -07001678 for (auto *secondary_cmd_buffer : cb_node->linkedCommandBuffers) {
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06001679 submission.cbs.push_back(secondary_cmd_buffer->commandBuffer());
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001680 IncrementResources(secondary_cmd_buffer);
1681 }
1682 IncrementResources(cb_node);
Jeremy Gebben5570abe2021-05-16 18:35:13 -06001683 // increment use count for all bound objects including secondary cbs
1684 cb_node->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001685
1686 VkQueryPool first_pool = VK_NULL_HANDLE;
1687 EventToStageMap local_event_to_stage_map;
1688 QueryMap local_query_to_state_map;
1689 for (auto &function : cb_node->queryUpdates) {
1690 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &local_query_to_state_map);
1691 }
1692
John Zulauf79f06582021-02-27 18:38:39 -07001693 for (const auto &query_state_pair : local_query_to_state_map) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001694 queryToStateMap[query_state_pair.first] = query_state_pair.second;
1695 }
1696
John Zulauf79f06582021-02-27 18:38:39 -07001697 for (const auto &function : cb_node->eventUpdates) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001698 function(nullptr, /*do_validate*/ false, &local_event_to_stage_map);
1699 }
1700
John Zulauf79f06582021-02-27 18:38:39 -07001701 for (const auto &eventStagePair : local_event_to_stage_map) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001702 eventMap[eventStagePair.first]->stageMask = eventStagePair.second;
1703 }
1704 }
1705}
1706
1707void ValidationStateTracker::RecordSubmitWaitSemaphore(CB_SUBMISSION &submission, VkQueue queue, VkSemaphore semaphore,
1708 uint64_t value, uint64_t next_seq) {
1709 auto semaphore_state = GetSemaphoreState(semaphore);
1710 if (semaphore_state) {
1711 if (semaphore_state->scope == kSyncScopeInternal) {
1712 SEMAPHORE_WAIT wait;
1713 wait.semaphore = semaphore;
1714 wait.type = semaphore_state->type;
1715 if (semaphore_state->type == VK_SEMAPHORE_TYPE_BINARY) {
1716 if (semaphore_state->signaler.first != VK_NULL_HANDLE) {
1717 wait.queue = semaphore_state->signaler.first;
1718 wait.seq = semaphore_state->signaler.second;
1719 submission.waitSemaphores.emplace_back(std::move(wait));
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001720 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001721 }
1722 semaphore_state->signaler.first = VK_NULL_HANDLE;
1723 semaphore_state->signaled = false;
1724 } else if (semaphore_state->payload < value) {
1725 wait.queue = queue;
1726 wait.seq = next_seq;
1727 wait.payload = value;
1728 submission.waitSemaphores.emplace_back(std::move(wait));
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001729 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001730 }
1731 } else {
1732 submission.externalSemaphores.push_back(semaphore);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001733 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001734 if (semaphore_state->scope == kSyncScopeExternalTemporary) {
1735 semaphore_state->scope = kSyncScopeInternal;
1736 }
1737 }
1738 }
1739}
1740
1741bool ValidationStateTracker::RecordSubmitSignalSemaphore(CB_SUBMISSION &submission, VkQueue queue, VkSemaphore semaphore,
1742 uint64_t value, uint64_t next_seq) {
1743 bool retire_early = false;
1744 auto semaphore_state = GetSemaphoreState(semaphore);
1745 if (semaphore_state) {
1746 if (semaphore_state->scope == kSyncScopeInternal) {
1747 SEMAPHORE_SIGNAL signal;
1748 signal.semaphore = semaphore;
1749 signal.seq = next_seq;
1750 if (semaphore_state->type == VK_SEMAPHORE_TYPE_BINARY) {
1751 semaphore_state->signaler.first = queue;
1752 semaphore_state->signaler.second = next_seq;
1753 semaphore_state->signaled = true;
1754 } else {
1755 signal.payload = value;
1756 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001757 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001758 submission.signalSemaphores.emplace_back(std::move(signal));
1759 } else {
1760 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1761 retire_early = true;
1762 }
1763 }
1764 return retire_early;
1765}
1766
1767void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1768 VkFence fence, VkResult result) {
1769 if (result != VK_SUCCESS) return;
1770 auto queue_state = GetQueueState(queue);
1771
1772 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, submitCount);
locke-lunargd556cc32019-09-17 01:21:23 -06001773
1774 // Now process each individual submit
1775 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001776 CB_SUBMISSION submission;
locke-lunargd556cc32019-09-17 01:21:23 -06001777 const VkSubmitInfo *submit = &pSubmits[submit_idx];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001778 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001779 auto *timeline_semaphore_submit = LvlFindInChain<VkTimelineSemaphoreSubmitInfo>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001780 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
Jeremy Gebben4ae5cc72021-03-10 15:34:44 -07001781 uint64_t value = 0;
1782 if (timeline_semaphore_submit && timeline_semaphore_submit->pWaitSemaphoreValues != nullptr &&
1783 (i < timeline_semaphore_submit->waitSemaphoreValueCount)) {
1784 value = timeline_semaphore_submit->pWaitSemaphoreValues[i];
1785 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001786 RecordSubmitWaitSemaphore(submission, queue, submit->pWaitSemaphores[i], value, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001787 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001788
1789 bool retire_early = false;
locke-lunargd556cc32019-09-17 01:21:23 -06001790 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
Jeremy Gebben4ae5cc72021-03-10 15:34:44 -07001791 uint64_t value = 0;
1792 if (timeline_semaphore_submit && timeline_semaphore_submit->pSignalSemaphoreValues != nullptr &&
1793 (i < timeline_semaphore_submit->signalSemaphoreValueCount)) {
1794 value = timeline_semaphore_submit->pSignalSemaphoreValues[i];
1795 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001796 retire_early |= RecordSubmitSignalSemaphore(submission, queue, submit->pSignalSemaphores[i], value, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001797 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001798 if (retire_early) {
1799 early_retire_seq = std::max(early_retire_seq, next_seq);
1800 }
1801
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001802 const auto perf_submit = LvlFindInChain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001803 submission.perf_submit_pass = perf_submit ? perf_submit->counterPassIndex : 0;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001804
locke-lunargd556cc32019-09-17 01:21:23 -06001805 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001806 RecordSubmitCommandBuffer(submission, submit->pCommandBuffers[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06001807 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001808 submission.fence = submit_idx == (submitCount - 1) ? fence : VK_NULL_HANDLE;
1809 queue_state->submissions.emplace_back(std::move(submission));
1810 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001811
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001812 if (early_retire_seq) {
1813 RetireWorkOnQueue(queue_state, early_retire_seq);
1814 }
1815}
1816
1817void ValidationStateTracker::PostCallRecordQueueSubmit2KHR(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR *pSubmits,
1818 VkFence fence, VkResult result) {
1819 if (result != VK_SUCCESS) return;
1820 auto queue_state = GetQueueState(queue);
1821
1822 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, submitCount);
1823
1824 // Now process each individual submit
1825 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1826 CB_SUBMISSION submission;
1827 const VkSubmitInfo2KHR *submit = &pSubmits[submit_idx];
1828 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
1829 for (uint32_t i = 0; i < submit->waitSemaphoreInfoCount; ++i) {
1830 const auto &sem_info = submit->pWaitSemaphoreInfos[i];
1831 RecordSubmitWaitSemaphore(submission, queue, sem_info.semaphore, sem_info.value, next_seq);
1832 }
1833 bool retire_early = false;
1834 for (uint32_t i = 0; i < submit->signalSemaphoreInfoCount; ++i) {
1835 const auto &sem_info = submit->pSignalSemaphoreInfos[i];
1836 retire_early |= RecordSubmitSignalSemaphore(submission, queue, sem_info.semaphore, sem_info.value, next_seq);
1837 }
1838 if (retire_early) {
1839 early_retire_seq = std::max(early_retire_seq, next_seq);
1840 }
1841 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
1842 submission.perf_submit_pass = perf_submit ? perf_submit->counterPassIndex : 0;
1843
1844 for (uint32_t i = 0; i < submit->commandBufferInfoCount; i++) {
1845 RecordSubmitCommandBuffer(submission, submit->pCommandBufferInfos[i].commandBuffer);
1846 }
1847 submission.fence = submit_idx == (submitCount - 1) ? fence : VK_NULL_HANDLE;
1848 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06001849 }
1850
1851 if (early_retire_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001852 RetireWorkOnQueue(queue_state, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001853 }
1854}
1855
1856void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1857 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1858 VkResult result) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06001859 if (VK_SUCCESS != result) {
1860 return;
locke-lunargd556cc32019-09-17 01:21:23 -06001861 }
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06001862 const auto &memory_type = phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex];
1863 const auto &memory_heap = phys_dev_mem_props.memoryHeaps[memory_type.heapIndex];
1864 auto fake_address = fake_memory.Alloc(pAllocateInfo->allocationSize);
1865
1866 layer_data::optional<DedicatedBinding> dedicated_binding;
1867
1868 auto dedicated = LvlFindInChain<VkMemoryDedicatedAllocateInfo>(pAllocateInfo->pNext);
1869 if (dedicated) {
1870 if (dedicated->buffer) {
1871 const auto *buffer_state = GetBufferState(dedicated->buffer);
1872 assert(buffer_state);
1873 if (!buffer_state) {
1874 return;
1875 }
1876 dedicated_binding.emplace(dedicated->buffer, buffer_state->createInfo);
1877 } else if (dedicated->image) {
1878 const auto *image_state = GetImageState(dedicated->image);
1879 assert(image_state);
1880 if (!image_state) {
1881 return;
1882 }
1883 dedicated_binding.emplace(dedicated->image, image_state->createInfo);
1884 }
1885 }
1886 memObjMap[*pMemory] = std::make_shared<DEVICE_MEMORY_STATE>(*pMemory, pAllocateInfo, fake_address, memory_type, memory_heap,
1887 std::move(dedicated_binding));
locke-lunargd556cc32019-09-17 01:21:23 -06001888 return;
1889}
1890
1891void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1892 if (!mem) return;
1893 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06001894 if (!mem_info) return;
locke-lunargd556cc32019-09-17 01:21:23 -06001895 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001896 mem_info->Destroy();
John Zulauf79952712020-04-07 11:25:54 -06001897 fake_memory.Free(mem_info->fake_base_address);
locke-lunargd556cc32019-09-17 01:21:23 -06001898 memObjMap.erase(mem);
1899}
1900
1901void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
1902 VkFence fence, VkResult result) {
1903 if (result != VK_SUCCESS) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001904 auto queue_state = GetQueueState(queue);
locke-lunargd556cc32019-09-17 01:21:23 -06001905
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001906 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, bindInfoCount);
locke-lunargd556cc32019-09-17 01:21:23 -06001907
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001908 for (uint32_t bind_idx = 0; bind_idx < bindInfoCount; ++bind_idx) {
1909 const VkBindSparseInfo &bind_info = pBindInfo[bind_idx];
locke-lunargd556cc32019-09-17 01:21:23 -06001910 // Track objects tied to memory
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001911 for (uint32_t j = 0; j < bind_info.bufferBindCount; j++) {
1912 for (uint32_t k = 0; k < bind_info.pBufferBinds[j].bindCount; k++) {
1913 auto sparse_binding = bind_info.pBufferBinds[j].pBinds[k];
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001914 auto buffer_state = GetBufferState(bind_info.pBufferBinds[j].buffer);
1915 auto mem_state = GetDevMemShared(sparse_binding.memory);
1916 if (buffer_state && mem_state) {
1917 buffer_state->SetSparseMemBinding(mem_state, sparse_binding.memoryOffset, sparse_binding.size);
1918 }
locke-lunargd556cc32019-09-17 01:21:23 -06001919 }
1920 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001921 for (uint32_t j = 0; j < bind_info.imageOpaqueBindCount; j++) {
1922 for (uint32_t k = 0; k < bind_info.pImageOpaqueBinds[j].bindCount; k++) {
1923 auto sparse_binding = bind_info.pImageOpaqueBinds[j].pBinds[k];
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001924 auto image_state = GetImageState(bind_info.pImageOpaqueBinds[j].image);
1925 auto mem_state = GetDevMemShared(sparse_binding.memory);
1926 if (image_state && mem_state) {
1927 image_state->SetSparseMemBinding(mem_state, sparse_binding.memoryOffset, sparse_binding.size);
1928 }
locke-lunargd556cc32019-09-17 01:21:23 -06001929 }
1930 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001931 for (uint32_t j = 0; j < bind_info.imageBindCount; j++) {
1932 for (uint32_t k = 0; k < bind_info.pImageBinds[j].bindCount; k++) {
1933 auto sparse_binding = bind_info.pImageBinds[j].pBinds[k];
locke-lunargd556cc32019-09-17 01:21:23 -06001934 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
1935 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001936 auto image_state = GetImageState(bind_info.pImageBinds[j].image);
1937 auto mem_state = GetDevMemShared(sparse_binding.memory);
1938 if (image_state && mem_state) {
1939 image_state->SetSparseMemBinding(mem_state, sparse_binding.memoryOffset, size);
1940 }
locke-lunargd556cc32019-09-17 01:21:23 -06001941 }
1942 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001943 CB_SUBMISSION submission;
1944 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001945 for (uint32_t i = 0; i < bind_info.waitSemaphoreCount; ++i) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001946 RecordSubmitWaitSemaphore(submission, queue, bind_info.pWaitSemaphores[i], 0, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001947 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001948 bool retire_early = false;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001949 for (uint32_t i = 0; i < bind_info.signalSemaphoreCount; ++i) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001950 retire_early |= RecordSubmitSignalSemaphore(submission, queue, bind_info.pSignalSemaphores[i], 0, next_seq);
1951 }
1952 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1953 if (retire_early) {
1954 early_retire_seq = std::max(early_retire_seq, queue_state->seq + queue_state->submissions.size() + 1);
locke-lunargd556cc32019-09-17 01:21:23 -06001955 }
1956
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001957 submission.fence = bind_idx == (bindInfoCount - 1) ? fence : VK_NULL_HANDLE;
1958 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06001959 }
1960
1961 if (early_retire_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001962 RetireWorkOnQueue(queue_state, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001963 }
1964}
1965
1966void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
1967 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
1968 VkResult result) {
1969 if (VK_SUCCESS != result) return;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001970 semaphoreMap[*pSemaphore] = std::make_shared<SEMAPHORE_STATE>(*pSemaphore, LvlFindInChain<VkSemaphoreTypeCreateInfo>(pCreateInfo->pNext));
locke-lunargd556cc32019-09-17 01:21:23 -06001971}
1972
Mike Schuchardt2df08912020-12-15 16:28:09 -08001973void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBits handle_type,
1974 VkSemaphoreImportFlags flags) {
locke-lunargd556cc32019-09-17 01:21:23 -06001975 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
1976 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001977 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) &&
locke-lunargd556cc32019-09-17 01:21:23 -06001978 sema_node->scope == kSyncScopeInternal) {
1979 sema_node->scope = kSyncScopeExternalTemporary;
1980 } else {
1981 sema_node->scope = kSyncScopeExternalPermanent;
1982 }
1983 }
1984}
1985
Mike Schuchardt2df08912020-12-15 16:28:09 -08001986void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfo *pSignalInfo,
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001987 VkResult result) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001988 auto *semaphore_state = GetSemaphoreState(pSignalInfo->semaphore);
1989 semaphore_state->payload = pSignalInfo->value;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001990}
1991
locke-lunargd556cc32019-09-17 01:21:23 -06001992void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
1993 auto mem_info = GetDevMemState(mem);
1994 if (mem_info) {
1995 mem_info->mapped_range.offset = offset;
1996 mem_info->mapped_range.size = size;
1997 mem_info->p_driver_data = *ppData;
1998 }
1999}
2000
2001void ValidationStateTracker::RetireFence(VkFence fence) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002002 auto fence_state = GetFenceState(fence);
2003 if (fence_state && fence_state->scope == kSyncScopeInternal) {
2004 if (fence_state->signaler.first != VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -06002005 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002006 RetireWorkOnQueue(GetQueueState(fence_state->signaler.first), fence_state->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002007 } else {
2008 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
2009 // the fence as retired.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002010 fence_state->state = FENCE_RETIRED;
locke-lunargd556cc32019-09-17 01:21:23 -06002011 }
2012 }
2013}
2014
2015void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2016 VkBool32 waitAll, uint64_t timeout, VkResult result) {
2017 if (VK_SUCCESS != result) return;
2018
2019 // When we know that all fences are complete we can clean/remove their CBs
2020 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
2021 for (uint32_t i = 0; i < fenceCount; i++) {
2022 RetireFence(pFences[i]);
2023 }
2024 }
2025 // NOTE : Alternate case not handled here is when some fences have completed. In
2026 // this case for app to guarantee which fences completed it will have to call
2027 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
2028}
2029
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002030void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002031 auto semaphore_state = GetSemaphoreState(semaphore);
2032 if (semaphore_state) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002033 for (auto &pair : queueMap) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002034 QUEUE_STATE &queue_state = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002035 uint64_t max_seq = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002036 for (const auto &submission : queue_state.submissions) {
2037 for (const auto &signal_semaphore : submission.signalSemaphores) {
2038 if (signal_semaphore.semaphore == semaphore && signal_semaphore.payload <= until_payload) {
2039 if (signal_semaphore.seq > max_seq) {
2040 max_seq = signal_semaphore.seq;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002041 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002042 }
2043 }
2044 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06002045 if (max_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002046 RetireWorkOnQueue(&queue_state, max_seq);
Tony-LunarG47d5e272020-04-07 15:35:55 -06002047 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002048 }
2049 }
2050}
2051
John Zulauff89de662020-04-13 18:57:34 -06002052void ValidationStateTracker::RecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2053 VkResult result) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002054 if (VK_SUCCESS != result) return;
2055
2056 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
2057 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
2058 }
2059}
2060
John Zulauff89de662020-04-13 18:57:34 -06002061void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2062 VkResult result) {
2063 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2064}
2065
2066void ValidationStateTracker::PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo,
2067 uint64_t timeout, VkResult result) {
2068 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2069}
2070
Adrian Coca Lorentec7d76102020-09-28 13:58:16 +02002071void ValidationStateTracker::RecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2072 VkResult result) {
2073 if (VK_SUCCESS != result) return;
2074
2075 RetireTimelineSemaphore(semaphore, *pValue);
2076}
2077
2078void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2079 VkResult result) {
2080 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2081}
2082void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2083 VkResult result) {
2084 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2085}
2086
locke-lunargd556cc32019-09-17 01:21:23 -06002087void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
2088 if (VK_SUCCESS != result) return;
2089 RetireFence(fence);
2090}
2091
2092void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
Jeremy Gebben5573a8c2021-06-04 08:55:10 -06002093 queueMap.emplace(queue, QUEUE_STATE(queue, queue_family_index));
locke-lunargd556cc32019-09-17 01:21:23 -06002094}
2095
2096void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2097 VkQueue *pQueue) {
2098 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2099}
2100
2101void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2102 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2103}
2104
2105void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2106 if (VK_SUCCESS != result) return;
2107 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002108 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002109}
2110
2111void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2112 if (VK_SUCCESS != result) return;
2113 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002114 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002115 }
2116}
2117
2118void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2119 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002120 auto fence_state = GetFenceState(fence);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002121 fence_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002122 fenceMap.erase(fence);
2123}
2124
2125void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2126 const VkAllocationCallbacks *pAllocator) {
2127 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002128 auto semaphore_state = GetSemaphoreState(semaphore);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002129 semaphore_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002130 semaphoreMap.erase(semaphore);
2131}
2132
2133void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2134 if (!event) return;
John Zulauf48057322020-12-02 11:59:31 -07002135 EVENT_STATE *event_state = Get<EVENT_STATE>(event);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002136 event_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002137 eventMap.erase(event);
2138}
2139
2140void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2141 const VkAllocationCallbacks *pAllocator) {
2142 if (!queryPool) return;
2143 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002144 qp_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002145 queryPoolMap.erase(queryPool);
2146}
2147
locke-lunargd556cc32019-09-17 01:21:23 -06002148void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2149 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2150 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002151 // Track objects tied to memory
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002152 auto mem_state = GetDevMemShared(mem);
2153 if (mem_state) {
2154 buffer_state->SetMemBinding(mem_state, memoryOffset);
2155 }
locke-lunargd556cc32019-09-17 01:21:23 -06002156 }
2157}
2158
2159void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2160 VkDeviceSize memoryOffset, VkResult result) {
2161 if (VK_SUCCESS != result) return;
2162 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2163}
2164
2165void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002166 const VkBindBufferMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06002167 for (uint32_t i = 0; i < bindInfoCount; i++) {
2168 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2169 }
2170}
2171
2172void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002173 const VkBindBufferMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06002174 for (uint32_t i = 0; i < bindInfoCount; i++) {
2175 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2176 }
2177}
2178
Spencer Fricke6c127102020-04-16 06:25:20 -07002179void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer) {
locke-lunargd556cc32019-09-17 01:21:23 -06002180 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2181 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002182 buffer_state->memory_requirements_checked = true;
2183 }
2184}
2185
2186void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2187 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002188 RecordGetBufferMemoryRequirementsState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002189}
2190
2191void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002192 const VkBufferMemoryRequirementsInfo2 *pInfo,
2193 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002194 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002195}
2196
2197void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002198 const VkBufferMemoryRequirementsInfo2 *pInfo,
2199 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002200 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002201}
2202
Spencer Fricke6c127102020-04-16 06:25:20 -07002203void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002204 const VkImagePlaneMemoryRequirementsInfo *plane_info =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002205 (pInfo == nullptr) ? nullptr : LvlFindInChain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002206 IMAGE_STATE *image_state = GetImageState(image);
2207 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002208 if (plane_info != nullptr) {
2209 // Multi-plane image
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002210 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002211 image_state->memory_requirements_checked[0] = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002212 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002213 image_state->memory_requirements_checked[1] = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002214 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002215 image_state->memory_requirements_checked[2] = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002216 }
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002217 } else if (!image_state->disjoint) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002218 // Single Plane image
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002219 image_state->memory_requirements_checked[0] = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002220 }
locke-lunargd556cc32019-09-17 01:21:23 -06002221 }
2222}
2223
2224void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2225 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002226 RecordGetImageMemoryRequirementsState(image, nullptr);
locke-lunargd556cc32019-09-17 01:21:23 -06002227}
2228
2229void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2230 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002231 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002232}
2233
2234void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2235 const VkImageMemoryRequirementsInfo2 *pInfo,
2236 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002237 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002238}
2239
2240static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2241 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2242 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2243 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2244 image_state->sparse_metadata_required = true;
2245 }
2246}
2247
2248void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2249 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2250 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2251 auto image_state = GetImageState(image);
2252 image_state->get_sparse_reqs_called = true;
2253 if (!pSparseMemoryRequirements) return;
2254 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2255 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2256 }
2257}
2258
2259void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
Mike Schuchardt2df08912020-12-15 16:28:09 -08002260 VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount,
2261 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements) {
locke-lunargd556cc32019-09-17 01:21:23 -06002262 auto image_state = GetImageState(pInfo->image);
2263 image_state->get_sparse_reqs_called = true;
2264 if (!pSparseMemoryRequirements) return;
2265 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2266 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2267 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2268 }
2269}
2270
2271void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08002272 VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount,
2273 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements) {
locke-lunargd556cc32019-09-17 01:21:23 -06002274 auto image_state = GetImageState(pInfo->image);
2275 image_state->get_sparse_reqs_called = true;
2276 if (!pSparseMemoryRequirements) return;
2277 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2278 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2279 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2280 }
2281}
2282
2283void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2284 const VkAllocationCallbacks *pAllocator) {
2285 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002286 auto shader_module_state = GetShaderModuleState(shaderModule);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002287 shader_module_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002288 shaderModuleMap.erase(shaderModule);
2289}
2290
2291void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2292 const VkAllocationCallbacks *pAllocator) {
2293 if (!pipeline) return;
2294 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
locke-lunargd556cc32019-09-17 01:21:23 -06002295 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002296 pipeline_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002297 pipelineMap.erase(pipeline);
2298}
2299
2300void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2301 const VkAllocationCallbacks *pAllocator) {
2302 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002303 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002304 pipeline_layout_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002305 pipelineLayoutMap.erase(pipelineLayout);
2306}
2307
2308void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2309 const VkAllocationCallbacks *pAllocator) {
2310 if (!sampler) return;
2311 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
locke-lunargd556cc32019-09-17 01:21:23 -06002312 // Any bound cmd buffers are now invalid
2313 if (sampler_state) {
Yuly Novikov424cdd52020-05-26 16:45:12 -04002314 if (sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2315 sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
2316 custom_border_color_sampler_count--;
2317 }
2318
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002319 sampler_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002320 }
2321 samplerMap.erase(sampler);
2322}
2323
2324void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2325 const VkAllocationCallbacks *pAllocator) {
2326 if (!descriptorSetLayout) return;
2327 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2328 if (layout_it != descriptorSetLayoutMap.end()) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002329 layout_it->second.get()->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002330 descriptorSetLayoutMap.erase(layout_it);
2331 }
2332}
2333
2334void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2335 const VkAllocationCallbacks *pAllocator) {
2336 if (!descriptorPool) return;
2337 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002338 if (desc_pool_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002339 // Free sets that were in this pool
John Zulauf79f06582021-02-27 18:38:39 -07002340 for (auto *ds : desc_pool_state->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06002341 FreeDescriptorSet(ds);
2342 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002343 desc_pool_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002344 descriptorPoolMap.erase(descriptorPool);
2345 }
2346}
2347
2348// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2349void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2350 const VkCommandBuffer *command_buffers) {
2351 for (uint32_t i = 0; i < command_buffer_count; i++) {
John Zulaufd1f85d42020-04-15 12:23:15 -06002352 // Allow any derived class to clean up command buffer state
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002353 if (command_buffer_reset_callback) {
2354 (*command_buffer_reset_callback)(command_buffers[i]);
2355 }
John Zulaufd1f85d42020-04-15 12:23:15 -06002356 if (command_buffer_free_callback) {
2357 (*command_buffer_free_callback)(command_buffers[i]);
2358 }
2359
locke-lunargd556cc32019-09-17 01:21:23 -06002360 auto cb_state = GetCBState(command_buffers[i]);
2361 // Remove references to command buffer's state and delete
2362 if (cb_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002363 // Remove the cb_state's references from COMMAND_POOL_STATEs
2364 pool_state->commandBuffers.erase(command_buffers[i]);
2365 // Remove the cb debug labels
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06002366 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer());
locke-lunargd556cc32019-09-17 01:21:23 -06002367 // Remove CBState from CB map
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002368 cb_state->Destroy();
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06002369 commandBufferMap.erase(cb_state->commandBuffer());
locke-lunargd556cc32019-09-17 01:21:23 -06002370 }
2371 }
2372}
2373
2374void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2375 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002376 auto pool = GetCommandPoolState(commandPool);
2377 FreeCommandBufferStates(pool, commandBufferCount, pCommandBuffers);
locke-lunargd556cc32019-09-17 01:21:23 -06002378}
2379
2380void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2381 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2382 VkResult result) {
2383 if (VK_SUCCESS != result) return;
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06002384 auto queue_flags = GetPhysicalDeviceState()->queue_family_properties[pCreateInfo->queueFamilyIndex].queueFlags;
2385 commandPoolMap[*pCommandPool] = std::make_shared<COMMAND_POOL_STATE>(*pCommandPool, pCreateInfo, queue_flags);
locke-lunargd556cc32019-09-17 01:21:23 -06002386}
2387
2388void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2389 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2390 VkResult result) {
2391 if (VK_SUCCESS != result) return;
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002392
2393 uint32_t index_count = 0, n_perf_pass = 0;
2394 bool has_cb = false, has_rb = false;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002395 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002396 const auto *perf = LvlFindInChain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002397 index_count = perf->counterIndexCount;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002398
Mark Lobodzinski7e948e42020-09-09 10:23:36 -06002399 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002400 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2401 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2402 switch (counter.scope) {
2403 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002404 has_cb = true;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002405 break;
2406 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002407 has_rb = true;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002408 break;
2409 default:
2410 break;
2411 }
2412 }
2413
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002414 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf, &n_perf_pass);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002415 }
2416
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002417 queryPoolMap[*pQueryPool] =
2418 std::make_shared<QUERY_POOL_STATE>(*pQueryPool, pCreateInfo, index_count, n_perf_pass, has_cb, has_rb);
locke-lunargd556cc32019-09-17 01:21:23 -06002419
2420 QueryObject query_obj{*pQueryPool, 0u};
2421 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2422 query_obj.query = i;
2423 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2424 }
2425}
2426
2427void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2428 const VkAllocationCallbacks *pAllocator) {
2429 if (!commandPool) return;
2430 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2431 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2432 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2433 if (cp_state) {
2434 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2435 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2436 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002437 cp_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002438 commandPoolMap.erase(commandPool);
2439 }
2440}
2441
2442void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2443 VkCommandPoolResetFlags flags, VkResult result) {
2444 if (VK_SUCCESS != result) return;
2445 // Reset all of the CBs allocated from this pool
2446 auto command_pool_state = GetCommandPoolState(commandPool);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002447 for (auto cmd_buffer : command_pool_state->commandBuffers) {
2448 ResetCommandBufferState(cmd_buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002449 }
2450}
2451
2452void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2453 VkResult result) {
2454 for (uint32_t i = 0; i < fenceCount; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002455 auto fence_state = GetFenceState(pFences[i]);
2456 if (fence_state) {
2457 if (fence_state->scope == kSyncScopeInternal) {
2458 fence_state->state = FENCE_UNSIGNALED;
2459 } else if (fence_state->scope == kSyncScopeExternalTemporary) {
2460 fence_state->scope = kSyncScopeInternal;
locke-lunargd556cc32019-09-17 01:21:23 -06002461 }
2462 }
2463 }
2464}
2465
locke-lunargd556cc32019-09-17 01:21:23 -06002466void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2467 const VkAllocationCallbacks *pAllocator) {
2468 if (!framebuffer) return;
2469 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002470 framebuffer_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002471 frameBufferMap.erase(framebuffer);
2472}
2473
2474void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2475 const VkAllocationCallbacks *pAllocator) {
2476 if (!renderPass) return;
2477 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002478 rp_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002479 renderPassMap.erase(renderPass);
2480}
2481
2482void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2483 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2484 if (VK_SUCCESS != result) return;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002485 fenceMap[*pFence] = std::make_shared<FENCE_STATE>(*pFence, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002486}
2487
2488bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2489 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2490 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002491 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002492 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2493 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2494 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2495 cgpl_state->pipe_state.reserve(count);
2496 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002497 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002498 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002499 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002500 }
2501 return false;
2502}
2503
2504void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2505 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2506 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2507 VkResult result, void *cgpl_state_data) {
2508 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2509 // This API may create pipelines regardless of the return value
2510 for (uint32_t i = 0; i < count; i++) {
2511 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002512 (cgpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002513 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2514 }
2515 }
2516 cgpl_state->pipe_state.clear();
2517}
2518
2519bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2520 const VkComputePipelineCreateInfo *pCreateInfos,
2521 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002522 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002523 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2524 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2525 ccpl_state->pipe_state.reserve(count);
2526 for (uint32_t i = 0; i < count; i++) {
2527 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002528 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002529 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002530 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002531 }
2532 return false;
2533}
2534
2535void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2536 const VkComputePipelineCreateInfo *pCreateInfos,
2537 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2538 VkResult result, void *ccpl_state_data) {
2539 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2540
2541 // This API may create pipelines regardless of the return value
2542 for (uint32_t i = 0; i < count; i++) {
2543 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002544 (ccpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002545 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2546 }
2547 }
2548 ccpl_state->pipe_state.clear();
2549}
2550
2551bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2552 uint32_t count,
2553 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2554 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002555 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002556 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2557 crtpl_state->pipe_state.reserve(count);
2558 for (uint32_t i = 0; i < count; i++) {
2559 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002560 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002561 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002562 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002563 }
2564 return false;
2565}
2566
2567void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2568 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2569 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2570 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2571 // This API may create pipelines regardless of the return value
2572 for (uint32_t i = 0; i < count; i++) {
2573 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002574 (crtpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002575 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2576 }
2577 }
2578 crtpl_state->pipe_state.clear();
2579}
2580
sourav parmarcd5fb182020-07-17 12:58:44 -07002581bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
2582 VkPipelineCache pipelineCache, uint32_t count,
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002583 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2584 const VkAllocationCallbacks *pAllocator,
2585 VkPipeline *pPipelines, void *crtpl_state_data) const {
2586 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2587 crtpl_state->pipe_state.reserve(count);
2588 for (uint32_t i = 0; i < count; i++) {
2589 // Create and initialize internal tracking data structure
2590 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
2591 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
2592 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
2593 }
2594 return false;
2595}
2596
sourav parmarcd5fb182020-07-17 12:58:44 -07002597void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
2598 VkPipelineCache pipelineCache, uint32_t count,
2599 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2600 const VkAllocationCallbacks *pAllocator,
2601 VkPipeline *pPipelines, VkResult result,
2602 void *crtpl_state_data) {
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002603 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2604 // This API may create pipelines regardless of the return value
2605 for (uint32_t i = 0; i < count; i++) {
2606 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002607 (crtpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002608 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2609 }
2610 }
2611 crtpl_state->pipe_state.clear();
2612}
2613
locke-lunargd556cc32019-09-17 01:21:23 -06002614void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2615 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2616 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002617 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002618 if (pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2619 pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
Tony-LunarG7337b312020-04-15 16:40:25 -06002620 custom_border_color_sampler_count++;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002621 }
locke-lunargd556cc32019-09-17 01:21:23 -06002622}
2623
2624void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2625 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2626 const VkAllocationCallbacks *pAllocator,
2627 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2628 if (VK_SUCCESS != result) return;
2629 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2630}
2631
2632// For repeatable sorting, not very useful for "memory in range" search
2633struct PushConstantRangeCompare {
2634 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2635 if (lhs->offset == rhs->offset) {
2636 if (lhs->size == rhs->size) {
2637 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2638 return lhs->stageFlags < rhs->stageFlags;
2639 }
2640 // If the offsets are the same then sorting by the end of range is useful for validation
2641 return lhs->size < rhs->size;
2642 }
2643 return lhs->offset < rhs->offset;
2644 }
2645};
2646
2647static PushConstantRangesDict push_constant_ranges_dict;
2648
2649PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2650 if (!info->pPushConstantRanges) {
2651 // Hand back the empty entry (creating as needed)...
2652 return push_constant_ranges_dict.look_up(PushConstantRanges());
2653 }
2654
2655 // Sort the input ranges to ensure equivalent ranges map to the same id
2656 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2657 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2658 sorted.insert(info->pPushConstantRanges + i);
2659 }
2660
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07002661 PushConstantRanges ranges;
2662 ranges.reserve(sorted.size());
John Zulauf79f06582021-02-27 18:38:39 -07002663 for (const auto *range : sorted) {
locke-lunargd556cc32019-09-17 01:21:23 -06002664 ranges.emplace_back(*range);
2665 }
2666 return push_constant_ranges_dict.look_up(std::move(ranges));
2667}
2668
2669// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2670static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2671
2672// Dictionary of canonical form of the "compatible for set" records
2673static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2674
2675static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2676 const PipelineLayoutSetLayoutsId set_layouts_id) {
2677 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2678}
2679
2680void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2681 const VkAllocationCallbacks *pAllocator,
2682 VkPipelineLayout *pPipelineLayout, VkResult result) {
2683 if (VK_SUCCESS != result) return;
2684
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002685 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>(*pPipelineLayout);
locke-lunargd556cc32019-09-17 01:21:23 -06002686 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2687 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2688 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002689 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002690 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2691 }
2692
2693 // Get canonical form IDs for the "compatible for set" contents
2694 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
2695 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
2696 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
2697
2698 // Create table of "compatible for set N" cannonical forms for trivial accept validation
2699 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2700 pipeline_layout_state->compat_for_set.emplace_back(
2701 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
2702 }
2703 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
2704}
2705
2706void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
2707 const VkAllocationCallbacks *pAllocator,
2708 VkDescriptorPool *pDescriptorPool, VkResult result) {
2709 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002710 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002711}
2712
2713void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2714 VkDescriptorPoolResetFlags flags, VkResult result) {
2715 if (VK_SUCCESS != result) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002716 DESCRIPTOR_POOL_STATE *pool = GetDescriptorPoolState(descriptorPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002717 // TODO: validate flags
2718 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
John Zulauf79f06582021-02-27 18:38:39 -07002719 for (auto *ds : pool->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06002720 FreeDescriptorSet(ds);
2721 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002722 pool->sets.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06002723 // Reset available count for each type and available sets for this pool
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002724 for (auto it = pool->availableDescriptorTypeCount.begin(); it != pool->availableDescriptorTypeCount.end(); ++it) {
2725 pool->availableDescriptorTypeCount[it->first] = pool->maxDescriptorTypeCount[it->first];
locke-lunargd556cc32019-09-17 01:21:23 -06002726 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002727 pool->availableSets = pool->maxSets;
locke-lunargd556cc32019-09-17 01:21:23 -06002728}
2729
2730bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
2731 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002732 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002733 // Always update common data
2734 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2735 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2736 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
2737
2738 return false;
2739}
2740
2741// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
2742void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
2743 VkDescriptorSet *pDescriptorSets, VkResult result,
2744 void *ads_state_data) {
2745 if (VK_SUCCESS != result) return;
2746 // All the updates are contained in a single cvdescriptorset function
2747 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2748 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2749 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
2750}
2751
2752void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
2753 const VkDescriptorSet *pDescriptorSets) {
2754 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
2755 // Update available descriptor sets in pool
2756 pool_state->availableSets += count;
2757
2758 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
2759 for (uint32_t i = 0; i < count; ++i) {
2760 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
2761 auto descriptor_set = setMap[pDescriptorSets[i]].get();
2762 uint32_t type_index = 0, descriptor_count = 0;
2763 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
2764 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
2765 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
2766 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
2767 }
2768 FreeDescriptorSet(descriptor_set);
2769 pool_state->sets.erase(descriptor_set);
2770 }
2771 }
2772}
2773
2774void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
2775 const VkWriteDescriptorSet *pDescriptorWrites,
2776 uint32_t descriptorCopyCount,
2777 const VkCopyDescriptorSet *pDescriptorCopies) {
2778 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
2779 pDescriptorCopies);
2780}
2781
2782void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
2783 VkCommandBuffer *pCommandBuffer, VkResult result) {
2784 if (VK_SUCCESS != result) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002785 auto pool = GetCommandPoolShared(pCreateInfo->commandPool);
2786 if (pool) {
locke-lunargd556cc32019-09-17 01:21:23 -06002787 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
2788 // Add command buffer to its commandPool map
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002789 pool->commandBuffers.insert(pCommandBuffer[i]);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002790 auto cb_state = std::make_shared<CMD_BUFFER_STATE>(pCommandBuffer[i], pCreateInfo);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002791 cb_state->command_pool = pool;
2792 cb_state->unprotected = pool->unprotected;
locke-lunargd556cc32019-09-17 01:21:23 -06002793 // Add command buffer to map
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002794 commandBufferMap[pCommandBuffer[i]] = std::move(cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002795 ResetCommandBufferState(pCommandBuffer[i]);
2796 }
2797 }
2798}
2799
locke-lunargfc78e932020-11-19 17:06:24 -07002800void UpdateSubpassAttachments(const safe_VkSubpassDescription2 &subpass, std::vector<SUBPASS_INFO> &subpasses) {
2801 for (uint32_t index = 0; index < subpass.inputAttachmentCount; ++index) {
2802 const uint32_t attachment_index = subpass.pInputAttachments[index].attachment;
2803 if (attachment_index != VK_ATTACHMENT_UNUSED) {
2804 subpasses[attachment_index].used = true;
2805 subpasses[attachment_index].usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
2806 subpasses[attachment_index].layout = subpass.pInputAttachments[index].layout;
2807 }
2808 }
2809
2810 for (uint32_t index = 0; index < subpass.colorAttachmentCount; ++index) {
2811 const uint32_t attachment_index = subpass.pColorAttachments[index].attachment;
2812 if (attachment_index != VK_ATTACHMENT_UNUSED) {
2813 subpasses[attachment_index].used = true;
2814 subpasses[attachment_index].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2815 subpasses[attachment_index].layout = subpass.pColorAttachments[index].layout;
2816 }
2817 if (subpass.pResolveAttachments) {
2818 const uint32_t attachment_index2 = subpass.pResolveAttachments[index].attachment;
2819 if (attachment_index2 != VK_ATTACHMENT_UNUSED) {
2820 subpasses[attachment_index2].used = true;
2821 subpasses[attachment_index2].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2822 subpasses[attachment_index2].layout = subpass.pResolveAttachments[index].layout;
2823 }
2824 }
2825 }
2826
2827 if (subpass.pDepthStencilAttachment) {
2828 const uint32_t attachment_index = subpass.pDepthStencilAttachment->attachment;
2829 if (attachment_index != VK_ATTACHMENT_UNUSED) {
2830 subpasses[attachment_index].used = true;
2831 subpasses[attachment_index].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
2832 subpasses[attachment_index].layout = subpass.pDepthStencilAttachment->layout;
2833 }
2834 }
2835}
2836
2837void UpdateAttachmentsView(ValidationStateTracker &tracker, CMD_BUFFER_STATE &cb_state, const FRAMEBUFFER_STATE &framebuffer,
2838 const VkRenderPassBeginInfo *pRenderPassBegin) {
2839 auto &attachments = *(cb_state.active_attachments.get());
2840 const bool imageless = (framebuffer.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) ? true : false;
2841 const VkRenderPassAttachmentBeginInfo *attachment_info_struct = nullptr;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002842 if (pRenderPassBegin) attachment_info_struct = LvlFindInChain<VkRenderPassAttachmentBeginInfo>(pRenderPassBegin->pNext);
locke-lunargfc78e932020-11-19 17:06:24 -07002843
2844 for (uint32_t i = 0; i < attachments.size(); ++i) {
2845 if (imageless) {
2846 if (attachment_info_struct && i < attachment_info_struct->attachmentCount) {
2847 auto res = cb_state.attachments_view_states.insert(
2848 tracker.GetShared<IMAGE_VIEW_STATE>(attachment_info_struct->pAttachments[i]));
2849 attachments[i] = res.first->get();
2850 }
2851 } else {
2852 auto res = cb_state.attachments_view_states.insert(framebuffer.attachments_view_state[i]);
2853 attachments[i] = res.first->get();
2854 }
2855 }
2856}
2857
locke-lunargd556cc32019-09-17 01:21:23 -06002858void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
2859 const VkCommandBufferBeginInfo *pBeginInfo) {
2860 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2861 if (!cb_state) return;
locke-lunargfc78e932020-11-19 17:06:24 -07002862
locke-lunargd556cc32019-09-17 01:21:23 -06002863 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
2864 ResetCommandBufferState(commandBuffer);
2865 }
2866 // Set updated state here in case implicit reset occurs above
2867 cb_state->state = CB_RECORDING;
2868 cb_state->beginInfo = *pBeginInfo;
Tony-LunarG3c287f62020-12-17 12:39:49 -07002869 if (cb_state->beginInfo.pInheritanceInfo && (cb_state->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY)) {
locke-lunargd556cc32019-09-17 01:21:23 -06002870 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
2871 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
2872 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
2873 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
2874 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
locke-lunargaecf2152020-05-12 17:15:41 -06002875 cb_state->activeRenderPass = GetShared<RENDER_PASS_STATE>(cb_state->beginInfo.pInheritanceInfo->renderPass);
locke-lunargd556cc32019-09-17 01:21:23 -06002876 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
locke-lunargfc78e932020-11-19 17:06:24 -07002877
locke-lunargaecf2152020-05-12 17:15:41 -06002878 if (cb_state->beginInfo.pInheritanceInfo->framebuffer) {
2879 cb_state->activeFramebuffer = GetShared<FRAMEBUFFER_STATE>(cb_state->beginInfo.pInheritanceInfo->framebuffer);
locke-lunargfc78e932020-11-19 17:06:24 -07002880 cb_state->active_subpasses = nullptr;
2881 cb_state->active_attachments = nullptr;
2882
2883 if (cb_state->activeFramebuffer) {
2884 cb_state->framebuffers.insert(cb_state->activeFramebuffer);
2885
2886 // Set cb_state->active_subpasses
2887 cb_state->active_subpasses =
2888 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
2889 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
2890 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
2891
2892 // Set cb_state->active_attachments & cb_state->attachments_view_states
2893 cb_state->active_attachments =
2894 std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
2895 UpdateAttachmentsView(*this, *cb_state, *cb_state->activeFramebuffer, nullptr);
2896
2897 // Connect this framebuffer and its children to this cmdBuffer
Jeremy Gebben5570abe2021-05-16 18:35:13 -06002898 if (!disabled[command_buffer_state]) {
2899 cb_state->AddChild(cb_state->activeFramebuffer.get());
2900 }
locke-lunargfc78e932020-11-19 17:06:24 -07002901 }
locke-lunargaecf2152020-05-12 17:15:41 -06002902 }
David Zhao Akeley44139b12021-04-26 16:16:13 -07002903
2904 // Check for VkCommandBufferInheritanceViewportScissorInfoNV (VK_NV_inherited_viewport_scissor)
2905 auto p_inherited_viewport_scissor_info =
2906 LvlFindInChain<VkCommandBufferInheritanceViewportScissorInfoNV>(cb_state->beginInfo.pInheritanceInfo->pNext);
2907 if (p_inherited_viewport_scissor_info != nullptr && p_inherited_viewport_scissor_info->viewportScissor2D) {
2908 auto pViewportDepths = p_inherited_viewport_scissor_info->pViewportDepths;
2909 cb_state->inheritedViewportDepths.assign(
2910 pViewportDepths, pViewportDepths + p_inherited_viewport_scissor_info->viewportDepthCount);
2911 }
locke-lunargd556cc32019-09-17 01:21:23 -06002912 }
2913 }
2914
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002915 auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002916 if (chained_device_group_struct) {
2917 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
2918 } else {
2919 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
2920 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002921
2922 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06002923}
2924
2925void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
2926 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2927 if (!cb_state) return;
2928 // Cached validation is specific to a specific recording of a specific command buffer.
John Zulauf79f06582021-02-27 18:38:39 -07002929 for (auto *descriptor_set : cb_state->validated_descriptor_sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06002930 descriptor_set->ClearCachedValidation(cb_state);
2931 }
2932 cb_state->validated_descriptor_sets.clear();
2933 if (VK_SUCCESS == result) {
2934 cb_state->state = CB_RECORDED;
2935 }
2936}
2937
2938void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
2939 VkResult result) {
2940 if (VK_SUCCESS == result) {
2941 ResetCommandBufferState(commandBuffer);
2942 }
2943}
2944
2945CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
2946 // initially assume everything is static state
2947 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
2948
2949 if (ds) {
2950 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
locke-lunarg4189aa22020-10-21 00:23:48 -06002951 flags &= ~ConvertToCBStatusFlagBits(ds->pDynamicStates[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002952 }
2953 }
locke-lunargd556cc32019-09-17 01:21:23 -06002954 return flags;
2955}
2956
2957// Validation cache:
2958// CV is the bottommost implementor of this extension. Don't pass calls down.
2959// utility function to set collective state for pipeline
2960void SetPipelineState(PIPELINE_STATE *pPipe) {
2961 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
2962 if (pPipe->graphicsPipelineCI.pColorBlendState) {
2963 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
2964 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
2965 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2966 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2967 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2968 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2969 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2970 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2971 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2972 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
2973 pPipe->blendConstantsEnabled = true;
2974 }
2975 }
2976 }
2977 }
sfricke-samsung8f658d42020-05-03 20:12:24 -07002978 // Check if sample location is enabled
2979 if (pPipe->graphicsPipelineCI.pMultisampleState) {
2980 const VkPipelineSampleLocationsStateCreateInfoEXT *sample_location_state =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002981 LvlFindInChain<VkPipelineSampleLocationsStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pMultisampleState->pNext);
sfricke-samsung8f658d42020-05-03 20:12:24 -07002982 if (sample_location_state != nullptr) {
2983 pPipe->sample_location_enabled = sample_location_state->sampleLocationsEnable;
2984 }
2985 }
locke-lunargd556cc32019-09-17 01:21:23 -06002986}
2987
2988void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
2989 VkPipeline pipeline) {
2990 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2991 assert(cb_state);
2992
2993 auto pipe_state = GetPipelineState(pipeline);
2994 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002995 bool rasterization_enabled = VK_FALSE == pipe_state->graphicsPipelineCI.ptr()->pRasterizationState->rasterizerDiscardEnable;
David Zhao Akeley44139b12021-04-26 16:16:13 -07002996 const auto* viewport_state = pipe_state->graphicsPipelineCI.ptr()->pViewportState;
2997 const auto* dynamic_state = pipe_state->graphicsPipelineCI.ptr()->pDynamicState;
locke-lunargd556cc32019-09-17 01:21:23 -06002998 cb_state->status &= ~cb_state->static_status;
David Zhao Akeley44139b12021-04-26 16:16:13 -07002999 cb_state->static_status = MakeStaticStateMask(dynamic_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003000 cb_state->status |= cb_state->static_status;
locke-lunarg4189aa22020-10-21 00:23:48 -06003001 cb_state->dynamic_status = CBSTATUS_ALL_STATE_SET & (~cb_state->static_status);
David Zhao Akeley44139b12021-04-26 16:16:13 -07003002
3003 // Used to calculate CMD_BUFFER_STATE::usedViewportScissorCount upon draw command with this graphics pipeline.
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003004 // If rasterization disabled (no viewport/scissors used), or the actual number of viewports/scissors is dynamic (unknown at
3005 // this time), then these are set to 0 to disable this checking.
David Zhao Akeley44139b12021-04-26 16:16:13 -07003006 auto has_dynamic_viewport_count = cb_state->dynamic_status & CBSTATUS_VIEWPORT_WITH_COUNT_SET;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003007 auto has_dynamic_scissor_count = cb_state->dynamic_status & CBSTATUS_SCISSOR_WITH_COUNT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003008 cb_state->pipelineStaticViewportCount =
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003009 has_dynamic_viewport_count || !rasterization_enabled ? 0 : viewport_state->viewportCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003010 cb_state->pipelineStaticScissorCount =
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003011 has_dynamic_scissor_count || !rasterization_enabled ? 0 : viewport_state->scissorCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003012
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003013 // Trash dynamic viewport/scissor state if pipeline defines static state and enabled rasterization.
David Zhao Akeley44139b12021-04-26 16:16:13 -07003014 // akeley98 NOTE: There's a bit of an ambiguity in the spec, whether binding such a pipeline overwrites
3015 // the entire viewport (scissor) array, or only the subsection defined by the viewport (scissor) count.
3016 // I am taking the latter interpretation based on the implementation details of NVIDIA's Vulkan driver.
David Zhao Akeley44139b12021-04-26 16:16:13 -07003017 if (!has_dynamic_viewport_count) {
3018 cb_state->trashedViewportCount = true;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003019 if (rasterization_enabled && (cb_state->static_status & CBSTATUS_VIEWPORT_SET)) {
David Zhao Akeley44139b12021-04-26 16:16:13 -07003020 cb_state->trashedViewportMask |= (uint32_t(1) << viewport_state->viewportCount) - 1u;
3021 // should become = ~uint32_t(0) if the other interpretation is correct.
3022 }
3023 }
3024 if (!has_dynamic_scissor_count) {
3025 cb_state->trashedScissorCount = true;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003026 if (rasterization_enabled && (cb_state->static_status & CBSTATUS_SCISSOR_SET)) {
David Zhao Akeley44139b12021-04-26 16:16:13 -07003027 cb_state->trashedScissorMask |= (uint32_t(1) << viewport_state->scissorCount) - 1u;
3028 // should become = ~uint32_t(0) if the other interpretation is correct.
3029 }
3030 }
locke-lunargd556cc32019-09-17 01:21:23 -06003031 }
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003032 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
3033 cb_state->lastBound[lv_bind_point].pipeline_state = pipe_state;
locke-lunargd556cc32019-09-17 01:21:23 -06003034 SetPipelineState(pipe_state);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003035 if (!disabled[command_buffer_state]) {
3036 cb_state->AddChild(pipe_state);
3037 }
locke-lunargb8be8222020-10-20 00:34:37 -06003038 for (auto &slot : pipe_state->active_slots) {
3039 for (auto &req : slot.second) {
3040 for (auto &sampler : req.second.samplers_used_by_image) {
3041 for (auto &des : sampler) {
3042 des.second = nullptr;
3043 }
3044 }
3045 }
3046 }
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -06003047 cb_state->lastBound[lv_bind_point].UpdateSamplerDescriptorsUsedByImage();
locke-lunargd556cc32019-09-17 01:21:23 -06003048}
3049
3050void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3051 uint32_t viewportCount, const VkViewport *pViewports) {
3052 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07003053 uint32_t bits = ((1u << viewportCount) - 1u) << firstViewport;
3054 cb_state->viewportMask |= bits;
3055 cb_state->trashedViewportMask &= ~bits;
locke-lunargd556cc32019-09-17 01:21:23 -06003056 cb_state->status |= CBSTATUS_VIEWPORT_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003057 cb_state->static_status &= ~CBSTATUS_VIEWPORT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003058
3059 cb_state->dynamicViewports.resize(std::max(size_t(firstViewport + viewportCount), cb_state->dynamicViewports.size()));
3060 for (size_t i = 0; i < viewportCount; ++i) {
3061 cb_state->dynamicViewports[firstViewport + i] = pViewports[i];
3062 }
locke-lunargd556cc32019-09-17 01:21:23 -06003063}
3064
3065void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
3066 uint32_t exclusiveScissorCount,
3067 const VkRect2D *pExclusiveScissors) {
3068 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3069 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
3070 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
3071 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003072 cb_state->static_status &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003073}
3074
3075void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3076 VkImageLayout imageLayout) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003077 if (disabled[command_buffer_state]) return;
3078
locke-lunargd556cc32019-09-17 01:21:23 -06003079 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3080
3081 if (imageView != VK_NULL_HANDLE) {
3082 auto view_state = GetImageViewState(imageView);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003083 cb_state->AddChild(view_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003084 }
3085}
3086
3087void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3088 uint32_t viewportCount,
3089 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3090 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3091 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3092 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3093 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003094 cb_state->static_status &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003095}
3096
3097void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3098 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3099 const VkAllocationCallbacks *pAllocator,
3100 VkAccelerationStructureNV *pAccelerationStructure,
3101 VkResult result) {
3102 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003103 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003104
3105 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003106 auto as_memory_requirements_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003107 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06003108 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure();
locke-lunargd556cc32019-09-17 01:21:23 -06003109 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3110
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003111 auto scratch_memory_req_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003112 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06003113 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure();
locke-lunargd556cc32019-09-17 01:21:23 -06003114 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3115 &as_state->build_scratch_memory_requirements);
3116
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003117 auto update_memory_req_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003118 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06003119 update_memory_req_info.accelerationStructure = as_state->acceleration_structure();
locke-lunargd556cc32019-09-17 01:21:23 -06003120 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3121 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003122 as_state->allocator = pAllocator;
locke-lunargd556cc32019-09-17 01:21:23 -06003123 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3124}
3125
Jeff Bolz95176d02020-04-01 00:36:16 -05003126void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3127 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3128 const VkAllocationCallbacks *pAllocator,
3129 VkAccelerationStructureKHR *pAccelerationStructure,
3130 VkResult result) {
3131 if (VK_SUCCESS != result) return;
sourav parmarcd5fb182020-07-17 12:58:44 -07003132 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE_KHR>(*pAccelerationStructure, pCreateInfo);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003133 as_state->allocator = pAllocator;
sourav parmarcd5fb182020-07-17 12:58:44 -07003134 accelerationStructureMap_khr[*pAccelerationStructure] = std::move(as_state);
Jeff Bolz95176d02020-04-01 00:36:16 -05003135}
3136
sourav parmarcd5fb182020-07-17 12:58:44 -07003137void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructuresKHR(
3138 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
3139 const VkAccelerationStructureBuildRangeInfoKHR *const *ppBuildRangeInfos) {
3140 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3141 if (cb_state == nullptr) {
3142 return;
3143 }
3144 for (uint32_t i = 0; i < infoCount; ++i) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003145 auto *dst_as_state = GetAccelerationStructureStateKHR(pInfos[i].dstAccelerationStructure);
sourav parmarcd5fb182020-07-17 12:58:44 -07003146 if (dst_as_state != nullptr) {
3147 dst_as_state->built = true;
3148 dst_as_state->build_info_khr.initialize(&pInfos[i]);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003149 if (!disabled[command_buffer_state]) {
3150 cb_state->AddChild(dst_as_state);
3151 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003152 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003153 if (!disabled[command_buffer_state]) {
3154 auto *src_as_state = GetAccelerationStructureStateKHR(pInfos[i].srcAccelerationStructure);
3155 if (src_as_state != nullptr) {
3156 cb_state->AddChild(src_as_state);
3157 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003158 }
3159 }
3160 cb_state->hasBuildAccelerationStructureCmd = true;
3161}
3162
3163void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructuresIndirectKHR(
3164 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
3165 const VkDeviceAddress *pIndirectDeviceAddresses, const uint32_t *pIndirectStrides,
3166 const uint32_t *const *ppMaxPrimitiveCounts) {
3167 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3168 if (cb_state == nullptr) {
3169 return;
3170 }
3171 for (uint32_t i = 0; i < infoCount; ++i) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003172 auto *dst_as_state = GetAccelerationStructureStateKHR(pInfos[i].dstAccelerationStructure);
sourav parmarcd5fb182020-07-17 12:58:44 -07003173 if (dst_as_state != nullptr) {
3174 dst_as_state->built = true;
3175 dst_as_state->build_info_khr.initialize(&pInfos[i]);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003176 if (!disabled[command_buffer_state]) {
3177 cb_state->AddChild(dst_as_state);
3178 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003179 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003180 if (!disabled[command_buffer_state]) {
3181 auto *src_as_state = GetAccelerationStructureStateKHR(pInfos[i].srcAccelerationStructure);
3182 if (src_as_state != nullptr) {
3183 cb_state->AddChild(src_as_state);
3184 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003185 }
3186 }
3187 cb_state->hasBuildAccelerationStructureCmd = true;
3188}
locke-lunargd556cc32019-09-17 01:21:23 -06003189void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
Mike Schuchardt2df08912020-12-15 16:28:09 -08003190 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2 *pMemoryRequirements) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003191 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureStateNV(pInfo->accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003192 if (as_state != nullptr) {
3193 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
3194 as_state->memory_requirements = *pMemoryRequirements;
3195 as_state->memory_requirements_checked = true;
3196 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
3197 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
3198 as_state->build_scratch_memory_requirements_checked = true;
3199 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
3200 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
3201 as_state->update_scratch_memory_requirements_checked = true;
3202 }
3203 }
3204}
3205
sourav parmarcd5fb182020-07-17 12:58:44 -07003206void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
3207 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06003208 if (VK_SUCCESS != result) return;
3209 for (uint32_t i = 0; i < bindInfoCount; i++) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003210 const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06003211
sourav parmarcd5fb182020-07-17 12:58:44 -07003212 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureStateNV(info.accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003213 if (as_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06003214 // Track objects tied to memory
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003215 auto mem_state = GetDevMemShared(info.memory);
3216 if (mem_state) {
3217 as_state->SetMemBinding(mem_state, info.memoryOffset);
3218 }
locke-lunargd556cc32019-09-17 01:21:23 -06003219
3220 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05003221 // XXX TODO: Query device address for KHR extension
sourav parmarcd5fb182020-07-17 12:58:44 -07003222 if (enabled[gpu_validation]) {
locke-lunargd556cc32019-09-17 01:21:23 -06003223 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
3224 }
3225 }
3226 }
3227}
3228
3229void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
3230 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
3231 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
3232 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3233 if (cb_state == nullptr) {
3234 return;
3235 }
3236
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003237 auto *dst_as_state = GetAccelerationStructureStateNV(dst);
locke-lunargd556cc32019-09-17 01:21:23 -06003238 if (dst_as_state != nullptr) {
3239 dst_as_state->built = true;
3240 dst_as_state->build_info.initialize(pInfo);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003241 if (!disabled[command_buffer_state]) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003242 cb_state->AddChild(dst_as_state);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003243 }
locke-lunargd556cc32019-09-17 01:21:23 -06003244 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003245 if (!disabled[command_buffer_state]) {
3246 auto *src_as_state = GetAccelerationStructureStateNV(src);
3247 if (src_as_state != nullptr) {
3248 cb_state->AddChild(src_as_state);
3249 }
locke-lunargd556cc32019-09-17 01:21:23 -06003250 }
3251 cb_state->hasBuildAccelerationStructureCmd = true;
3252}
3253
3254void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
3255 VkAccelerationStructureNV dst,
3256 VkAccelerationStructureNV src,
3257 VkCopyAccelerationStructureModeNV mode) {
3258 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3259 if (cb_state) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003260 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureStateNV(src);
3261 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureStateNV(dst);
locke-lunargd556cc32019-09-17 01:21:23 -06003262 if (dst_as_state != nullptr && src_as_state != nullptr) {
3263 dst_as_state->built = true;
3264 dst_as_state->build_info = src_as_state->build_info;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003265 if (!disabled[command_buffer_state]) {
3266 cb_state->AddChild(dst_as_state);
3267 cb_state->AddChild(src_as_state);
3268 }
locke-lunargd556cc32019-09-17 01:21:23 -06003269 }
3270 }
3271}
3272
Jeff Bolz95176d02020-04-01 00:36:16 -05003273void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
3274 VkAccelerationStructureKHR accelerationStructure,
3275 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06003276 if (!accelerationStructure) return;
sourav parmarcd5fb182020-07-17 12:58:44 -07003277 auto *as_state = GetAccelerationStructureStateKHR(accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003278 if (as_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003279 as_state->Destroy();
sourav parmarcd5fb182020-07-17 12:58:44 -07003280 accelerationStructureMap_khr.erase(accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003281 }
3282}
3283
Jeff Bolz95176d02020-04-01 00:36:16 -05003284void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
3285 VkAccelerationStructureNV accelerationStructure,
3286 const VkAllocationCallbacks *pAllocator) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003287 if (!accelerationStructure) return;
3288 auto *as_state = GetAccelerationStructureStateNV(accelerationStructure);
3289 if (as_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003290 as_state->Destroy();
sourav parmarcd5fb182020-07-17 12:58:44 -07003291 accelerationStructureMap.erase(accelerationStructure);
3292 }
Jeff Bolz95176d02020-04-01 00:36:16 -05003293}
3294
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003295void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3296 uint32_t viewportCount,
3297 const VkViewportWScalingNV *pViewportWScalings) {
3298 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3299 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003300 cb_state->static_status &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003301}
3302
locke-lunargd556cc32019-09-17 01:21:23 -06003303void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
3304 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3305 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003306 cb_state->static_status &= ~CBSTATUS_LINE_WIDTH_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003307}
3308
3309void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
3310 uint16_t lineStipplePattern) {
3311 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3312 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003313 cb_state->static_status &= ~CBSTATUS_LINE_STIPPLE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003314}
3315
3316void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
3317 float depthBiasClamp, float depthBiasSlopeFactor) {
3318 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3319 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003320 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003321}
3322
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003323void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
3324 const VkRect2D *pScissors) {
3325 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07003326 uint32_t bits = ((1u << scissorCount) - 1u) << firstScissor;
3327 cb_state->scissorMask |= bits;
3328 cb_state->trashedScissorMask &= ~bits;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003329 cb_state->status |= CBSTATUS_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003330 cb_state->static_status &= ~CBSTATUS_SCISSOR_SET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003331}
3332
locke-lunargd556cc32019-09-17 01:21:23 -06003333void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
3334 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3335 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003336 cb_state->static_status &= ~CBSTATUS_BLEND_CONSTANTS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003337}
3338
3339void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
3340 float maxDepthBounds) {
3341 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3342 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003343 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003344}
3345
3346void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3347 uint32_t compareMask) {
3348 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3349 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003350 cb_state->static_status &= ~CBSTATUS_STENCIL_READ_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003351}
3352
3353void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3354 uint32_t writeMask) {
3355 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3356 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003357 cb_state->static_status &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003358}
3359
3360void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3361 uint32_t reference) {
3362 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3363 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003364 cb_state->static_status &= ~CBSTATUS_STENCIL_REFERENCE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003365}
3366
3367// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
3368// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
3369// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
3370void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
3371 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
3372 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
3373 cvdescriptorset::DescriptorSet *push_descriptor_set,
3374 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
3375 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
3376 // Defensive
3377 assert(pipeline_layout);
3378 if (!pipeline_layout) return;
3379
3380 uint32_t required_size = first_set + set_count;
3381 const uint32_t last_binding_index = required_size - 1;
3382 assert(last_binding_index < pipeline_layout->compat_for_set.size());
3383
3384 // Some useful shorthand
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003385 const auto lv_bind_point = ConvertToLvlBindPoint(pipeline_bind_point);
3386 auto &last_bound = cb_state->lastBound[lv_bind_point];
locke-lunargd556cc32019-09-17 01:21:23 -06003387 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
3388 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
3389
3390 // We need this three times in this function, but nowhere else
3391 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
3392 if (ds && ds->IsPushDescriptor()) {
3393 assert(ds == last_bound.push_descriptor_set.get());
3394 last_bound.push_descriptor_set = nullptr;
3395 return true;
3396 }
3397 return false;
3398 };
3399
3400 // Clean up the "disturbed" before and after the range to be set
3401 if (required_size < current_size) {
3402 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
3403 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
3404 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
3405 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
3406 }
3407 } else {
3408 // We're not disturbing past last, so leave the upper binding data alone.
3409 required_size = current_size;
3410 }
3411 }
3412
3413 // We resize if we need more set entries or if those past "last" are disturbed
3414 if (required_size != current_size) {
3415 last_bound.per_set.resize(required_size);
3416 }
3417
3418 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
3419 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
3420 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
3421 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3422 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
3423 last_bound.per_set[set_idx].dynamicOffsets.clear();
3424 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
3425 }
3426 }
3427
3428 // Now update the bound sets with the input sets
3429 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
3430 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
3431 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
3432 cvdescriptorset::DescriptorSet *descriptor_set =
3433 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
3434
3435 // Record binding (or push)
3436 if (descriptor_set != last_bound.push_descriptor_set.get()) {
3437 // Only cleanup the push descriptors if they aren't the currently used set.
3438 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3439 }
3440 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
3441 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
3442
3443 if (descriptor_set) {
3444 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
3445 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
3446 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
3447 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
3448 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
3449 input_dynamic_offsets = end_offset;
3450 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
3451 } else {
3452 last_bound.per_set[set_idx].dynamicOffsets.clear();
3453 }
3454 if (!descriptor_set->IsPushDescriptor()) {
3455 // Can't cache validation of push_descriptors
3456 cb_state->validated_descriptor_sets.insert(descriptor_set);
3457 }
3458 }
3459 }
3460}
3461
3462// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
3463void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
3464 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3465 uint32_t firstSet, uint32_t setCount,
3466 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
3467 const uint32_t *pDynamicOffsets) {
3468 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3469 auto pipeline_layout = GetPipelineLayout(layout);
3470
3471 // Resize binding arrays
3472 uint32_t last_set_index = firstSet + setCount - 1;
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003473 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
3474 if (last_set_index >= cb_state->lastBound[lv_bind_point].per_set.size()) {
3475 cb_state->lastBound[lv_bind_point].per_set.resize(last_set_index + 1);
locke-lunargd556cc32019-09-17 01:21:23 -06003476 }
3477
3478 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
3479 dynamicOffsetCount, pDynamicOffsets);
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003480 cb_state->lastBound[lv_bind_point].pipeline_layout = layout;
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -06003481 cb_state->lastBound[lv_bind_point].UpdateSamplerDescriptorsUsedByImage();
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003482}
3483
locke-lunargd556cc32019-09-17 01:21:23 -06003484void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
3485 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
3486 const VkWriteDescriptorSet *pDescriptorWrites) {
3487 const auto &pipeline_layout = GetPipelineLayout(layout);
3488 // Short circuit invalid updates
3489 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003490 !pipeline_layout->set_layouts[set]->IsPushDescriptor()) {
locke-lunargd556cc32019-09-17 01:21:23 -06003491 return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003492 }
locke-lunargd556cc32019-09-17 01:21:23 -06003493
3494 // We need a descriptor set to update the bindings with, compatible with the passed layout
Jeremy Gebben50fb1832021-03-19 09:10:13 -06003495 const auto& dsl = pipeline_layout->set_layouts[set];
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003496 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
3497 auto &last_bound = cb_state->lastBound[lv_bind_point];
locke-lunargd556cc32019-09-17 01:21:23 -06003498 auto &push_descriptor_set = last_bound.push_descriptor_set;
3499 // If we are disturbing the current push_desriptor_set clear it
3500 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003501 last_bound.UnbindAndResetPushDescriptorSet(cb_state, new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003502 }
3503
3504 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3505 nullptr);
3506 last_bound.pipeline_layout = layout;
3507
3508 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003509 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003510}
3511
3512void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3513 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3514 uint32_t set, uint32_t descriptorWriteCount,
3515 const VkWriteDescriptorSet *pDescriptorWrites) {
3516 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3517 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3518}
3519
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003520void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3521 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3522 const void *pValues) {
3523 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3524 if (cb_state != nullptr) {
3525 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3526
3527 auto &push_constant_data = cb_state->push_constant_data;
3528 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3529 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
locke-lunargde3f0fa2020-09-10 11:55:31 -06003530 cb_state->push_constant_pipeline_layout_set = layout;
3531
3532 auto flags = stageFlags;
3533 uint32_t bit_shift = 0;
3534 while (flags) {
3535 if (flags & 1) {
3536 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
3537 const auto it = cb_state->push_constant_data_update.find(flag);
3538
3539 if (it != cb_state->push_constant_data_update.end()) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06003540 std::memset(it->second.data() + offset, PC_Byte_Updated, static_cast<std::size_t>(size));
locke-lunargde3f0fa2020-09-10 11:55:31 -06003541 }
3542 }
3543 flags = flags >> 1;
3544 ++bit_shift;
3545 }
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003546 }
3547}
3548
locke-lunargd556cc32019-09-17 01:21:23 -06003549void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3550 VkIndexType indexType) {
locke-lunargd556cc32019-09-17 01:21:23 -06003551 auto cb_state = GetCBState(commandBuffer);
3552
3553 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
Piers Daniell39842ee2020-07-10 16:42:33 -06003554 cb_state->static_status &= ~CBSTATUS_INDEX_BUFFER_BOUND;
locke-lunarg1ae57d62020-11-18 10:49:19 -07003555 cb_state->index_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(buffer);
3556 cb_state->index_buffer_binding.size = cb_state->index_buffer_binding.buffer_state->createInfo.size;
locke-lunargd556cc32019-09-17 01:21:23 -06003557 cb_state->index_buffer_binding.offset = offset;
3558 cb_state->index_buffer_binding.index_type = indexType;
3559 // Add binding for this index buffer to this commandbuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003560 if (!disabled[command_buffer_state]) {
3561 cb_state->AddChild(cb_state->index_buffer_binding.buffer_state.get());
3562 }
locke-lunargd556cc32019-09-17 01:21:23 -06003563}
3564
3565void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3566 uint32_t bindingCount, const VkBuffer *pBuffers,
3567 const VkDeviceSize *pOffsets) {
3568 auto cb_state = GetCBState(commandBuffer);
3569
3570 uint32_t end = firstBinding + bindingCount;
3571 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3572 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3573 }
3574
3575 for (uint32_t i = 0; i < bindingCount; ++i) {
3576 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07003577 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003578 vertex_buffer_binding.offset = pOffsets[i];
Piers Daniell39842ee2020-07-10 16:42:33 -06003579 vertex_buffer_binding.size = VK_WHOLE_SIZE;
3580 vertex_buffer_binding.stride = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06003581 // Add binding for this vertex buffer to this commandbuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003582 if (pBuffers[i] && !disabled[command_buffer_state]) {
3583 cb_state->AddChild(vertex_buffer_binding.buffer_state.get());
Jeff Bolz165818a2020-05-08 11:19:03 -05003584 }
locke-lunargd556cc32019-09-17 01:21:23 -06003585 }
3586}
3587
3588void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3589 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003590 if (disabled[command_buffer_state]) return;
3591
locke-lunargd556cc32019-09-17 01:21:23 -06003592 auto cb_state = GetCBState(commandBuffer);
3593 auto dst_buffer_state = GetBufferState(dstBuffer);
3594
3595 // Update bindings between buffer and cmd buffer
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003596 if (cb_state && dst_buffer_state) {
3597 cb_state->AddChild(dst_buffer_state);
3598 }
locke-lunargd556cc32019-09-17 01:21:23 -06003599}
3600
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06003601static bool SetEventStageMask(VkEvent event, VkPipelineStageFlags2KHR stageMask,
Jeff Bolz310775c2019-10-09 00:46:33 -05003602 EventToStageMap *localEventToStageMap) {
3603 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003604 return false;
3605}
3606
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003607void ValidationStateTracker::RecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask) {
locke-lunargd556cc32019-09-17 01:21:23 -06003608 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003609 if (!disabled[command_buffer_state]) {
3610 auto event_state = GetEventState(event);
3611 if (event_state) {
3612 cb_state->AddChild(event_state);
3613 }
locke-lunargd556cc32019-09-17 01:21:23 -06003614 }
3615 cb_state->events.push_back(event);
3616 if (!cb_state->waitedEvents.count(event)) {
3617 cb_state->writeEventsBeforeWait.push_back(event);
3618 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003619 cb_state->eventUpdates.emplace_back(
3620 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3621 return SetEventStageMask(event, stageMask, localEventToStageMap);
3622 });
locke-lunargd556cc32019-09-17 01:21:23 -06003623}
3624
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003625void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3626 VkPipelineStageFlags stageMask) {
3627 RecordCmdSetEvent(commandBuffer, event, stageMask);
3628}
3629
3630void ValidationStateTracker::PreCallRecordCmdSetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event,
3631 const VkDependencyInfoKHR *pDependencyInfo) {
3632 auto stage_masks = sync_utils::GetGlobalStageMasks(*pDependencyInfo);
3633
3634 RecordCmdSetEvent(commandBuffer, event, stage_masks.src);
Jeremy Gebben79649152021-06-22 14:46:24 -06003635
3636 RecordBarriers(commandBuffer, pDependencyInfo);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003637}
3638
3639void ValidationStateTracker::RecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3640 VkPipelineStageFlags2KHR stageMask) {
locke-lunargd556cc32019-09-17 01:21:23 -06003641 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003642 if (!disabled[command_buffer_state]) {
3643 auto event_state = GetEventState(event);
3644 if (event_state) {
3645 cb_state->AddChild(event_state);
3646 }
locke-lunargd556cc32019-09-17 01:21:23 -06003647 }
3648 cb_state->events.push_back(event);
3649 if (!cb_state->waitedEvents.count(event)) {
3650 cb_state->writeEventsBeforeWait.push_back(event);
3651 }
3652
3653 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003654 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003655 return SetEventStageMask(event, VkPipelineStageFlags2KHR(0), localEventToStageMap);
Jeff Bolz310775c2019-10-09 00:46:33 -05003656 });
locke-lunargd556cc32019-09-17 01:21:23 -06003657}
3658
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003659void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3660 VkPipelineStageFlags stageMask) {
3661 RecordCmdResetEvent(commandBuffer, event, stageMask);
3662}
3663
3664void ValidationStateTracker::PreCallRecordCmdResetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event,
3665 VkPipelineStageFlags2KHR stageMask) {
3666 RecordCmdResetEvent(commandBuffer, event, stageMask);
3667}
3668
3669void ValidationStateTracker::RecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents) {
locke-lunargd556cc32019-09-17 01:21:23 -06003670 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3671 for (uint32_t i = 0; i < eventCount; ++i) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003672 if (!disabled[command_buffer_state]) {
3673 auto event_state = GetEventState(pEvents[i]);
3674 if (event_state) {
3675 cb_state->AddChild(event_state);
3676 }
locke-lunargd556cc32019-09-17 01:21:23 -06003677 }
3678 cb_state->waitedEvents.insert(pEvents[i]);
3679 cb_state->events.push_back(pEvents[i]);
3680 }
3681}
3682
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003683void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3684 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3685 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3686 uint32_t bufferMemoryBarrierCount,
3687 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3688 uint32_t imageMemoryBarrierCount,
3689 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3690 RecordCmdWaitEvents(commandBuffer, eventCount, pEvents);
Jeremy Gebben79649152021-06-22 14:46:24 -06003691 RecordBarriers(commandBuffer, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers,
3692 imageMemoryBarrierCount, pImageMemoryBarriers);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003693}
3694
3695void ValidationStateTracker::PreCallRecordCmdWaitEvents2KHR(VkCommandBuffer commandBuffer, uint32_t eventCount,
3696 const VkEvent *pEvents, const VkDependencyInfoKHR *pDependencyInfos) {
3697 RecordCmdWaitEvents(commandBuffer, eventCount, pEvents);
Jeremy Gebben79649152021-06-22 14:46:24 -06003698 for (uint32_t i = 0; i < eventCount; i++) {
3699 RecordBarriers(commandBuffer, &pDependencyInfos[i]);
3700 }
3701}
3702
3703void ValidationStateTracker::PostCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
3704 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
3705 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3706 uint32_t bufferMemoryBarrierCount,
3707 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3708 uint32_t imageMemoryBarrierCount,
3709 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3710 RecordBarriers(commandBuffer, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers,
3711 imageMemoryBarrierCount, pImageMemoryBarriers);
3712}
3713
3714void ValidationStateTracker::PreCallRecordCmdPipelineBarrier2KHR(VkCommandBuffer commandBuffer,
3715 const VkDependencyInfoKHR *pDependencyInfo) {
3716 RecordBarriers(commandBuffer, pDependencyInfo);
3717}
3718
3719void ValidationStateTracker::RecordBarriers(VkCommandBuffer commandBuffer, uint32_t memoryBarrierCount,
3720 const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount,
3721 const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount,
3722 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3723 if (disabled[command_buffer_state]) return;
3724
3725 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3726 for (uint32_t i = 0; i < bufferMemoryBarrierCount; i++) {
3727 auto buffer_state = GetBufferState(pBufferMemoryBarriers[i].buffer);
3728 if (buffer_state) {
3729 cb_state->AddChild(buffer_state);
3730 }
3731 }
3732 for (uint32_t i = 0; i < imageMemoryBarrierCount; i++) {
3733 auto image_state = GetImageState(pImageMemoryBarriers[i].image);
3734 if (image_state) {
3735 cb_state->AddChild(image_state);
3736 }
3737 }
3738}
3739
3740void ValidationStateTracker::RecordBarriers(VkCommandBuffer commandBuffer, const VkDependencyInfoKHR *pDependencyInfo) {
3741 if (disabled[command_buffer_state]) return;
3742
3743 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3744 for (uint32_t i = 0; i < pDependencyInfo->bufferMemoryBarrierCount; i++) {
3745 auto buffer_state = GetBufferState(pDependencyInfo->pBufferMemoryBarriers[i].buffer);
3746 if (buffer_state) {
3747 cb_state->AddChild(buffer_state);
3748 }
3749 }
3750 for (uint32_t i = 0; i < pDependencyInfo->imageMemoryBarrierCount; i++) {
3751 auto image_state = GetImageState(pDependencyInfo->pImageMemoryBarriers[i].image);
3752 if (image_state) {
3753 cb_state->AddChild(image_state);
3754 }
3755 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003756}
3757
Jeff Bolz310775c2019-10-09 00:46:33 -05003758bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
3759 (*localQueryToStateMap)[object] = value;
3760 return false;
3761}
3762
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003763bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
3764 QueryState value, QueryMap *localQueryToStateMap) {
Jeff Bolz310775c2019-10-09 00:46:33 -05003765 for (uint32_t i = 0; i < queryCount; i++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003766 QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
Jeff Bolz310775c2019-10-09 00:46:33 -05003767 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06003768 }
3769 return false;
3770}
3771
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003772QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex,
3773 uint32_t perfPass) const {
3774 QueryObject query = QueryObject(QueryObject(queryPool, queryIndex), perfPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003775
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003776 auto iter = localQueryToStateMap->find(query);
3777 if (iter != localQueryToStateMap->end()) return iter->second;
Jeff Bolz310775c2019-10-09 00:46:33 -05003778
Jeff Bolz310775c2019-10-09 00:46:33 -05003779 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06003780}
3781
3782void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003783 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003784 cb_state->activeQueries.insert(query_obj);
3785 cb_state->startedQueries.insert(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003786 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3787 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3788 QueryMap *localQueryToStateMap) {
3789 SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
3790 return false;
3791 });
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003792 if (!disabled[command_buffer_state]) {
3793 auto pool_state = GetQueryPoolState(query_obj.pool);
3794 cb_state->AddChild(pool_state);
3795 }
locke-lunargd556cc32019-09-17 01:21:23 -06003796}
3797
3798void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3799 VkFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003800 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003801 QueryObject query = {queryPool, slot};
3802 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3803 RecordCmdBeginQuery(cb_state, query);
3804}
3805
3806void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003807 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003808 cb_state->activeQueries.erase(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003809 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3810 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3811 QueryMap *localQueryToStateMap) {
3812 return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3813 });
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003814 if (!disabled[command_buffer_state]) {
3815 auto pool_state = GetQueryPoolState(query_obj.pool);
3816 cb_state->AddChild(pool_state);
3817 }
locke-lunargd556cc32019-09-17 01:21:23 -06003818}
3819
3820void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003821 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003822 QueryObject query_obj = {queryPool, slot};
3823 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3824 RecordCmdEndQuery(cb_state, query_obj);
3825}
3826
3827void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3828 uint32_t firstQuery, uint32_t queryCount) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003829 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003830 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3831
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02003832 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
3833 QueryObject query = {queryPool, slot};
3834 cb_state->resetQueries.insert(query);
3835 }
3836
Jeff Bolz310775c2019-10-09 00:46:33 -05003837 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003838 bool do_validate, VkQueryPool &firstPerfQueryPool,
3839 uint32_t perfQueryPass,
3840 QueryMap *localQueryToStateMap) {
3841 return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06003842 });
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003843 if (!disabled[command_buffer_state]) {
3844 auto pool_state = GetQueryPoolState(queryPool);
3845 cb_state->AddChild(pool_state);
3846 }
locke-lunargd556cc32019-09-17 01:21:23 -06003847}
3848
3849void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3850 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3851 VkDeviceSize dstOffset, VkDeviceSize stride,
3852 VkQueryResultFlags flags) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003853 if (disabled[query_validation] || disabled[command_buffer_state]) return;
3854
locke-lunargd556cc32019-09-17 01:21:23 -06003855 auto cb_state = GetCBState(commandBuffer);
3856 auto dst_buff_state = GetBufferState(dstBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003857 cb_state->AddChild(dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003858 auto pool_state = GetQueryPoolState(queryPool);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003859 cb_state->AddChild(pool_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003860}
3861
3862void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3863 VkQueryPool queryPool, uint32_t slot) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003864 PostCallRecordCmdWriteTimestamp2KHR(commandBuffer, pipelineStage, queryPool, slot);
3865}
3866
3867void ValidationStateTracker::PostCallRecordCmdWriteTimestamp2KHR(VkCommandBuffer commandBuffer,
3868 VkPipelineStageFlags2KHR pipelineStage, VkQueryPool queryPool,
3869 uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003870 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003871 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003872 if (!disabled[command_buffer_state]) {
3873 auto pool_state = GetQueryPoolState(queryPool);
3874 cb_state->AddChild(pool_state);
3875 }
locke-lunargd556cc32019-09-17 01:21:23 -06003876 QueryObject query = {queryPool, slot};
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003877 cb_state->queryUpdates.emplace_back([query](const ValidationStateTracker *device_data, bool do_validate,
3878 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3879 QueryMap *localQueryToStateMap) {
3880 return SetQueryState(QueryObject(query, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3881 });
locke-lunargd556cc32019-09-17 01:21:23 -06003882}
3883
Marijn Suijten6750fdc2020-12-30 22:06:42 +01003884void ValidationStateTracker::PostCallRecordCmdWriteAccelerationStructuresPropertiesKHR(
3885 VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR *pAccelerationStructures,
3886 VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) {
3887 if (disabled[query_validation]) return;
3888 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003889 if (!disabled[command_buffer_state]) {
3890 auto pool_state = GetQueryPoolState(queryPool);
3891 cb_state->AddChild(pool_state);
3892 }
Marijn Suijten6750fdc2020-12-30 22:06:42 +01003893 cb_state->queryUpdates.emplace_back(
3894 [queryPool, firstQuery, accelerationStructureCount](const ValidationStateTracker *device_data, bool do_validate,
3895 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3896 QueryMap *localQueryToStateMap) {
3897 return SetQueryStateMulti(queryPool, firstQuery, accelerationStructureCount, perfQueryPass, QUERYSTATE_ENDED,
3898 localQueryToStateMap);
3899 });
3900}
3901
locke-lunargd556cc32019-09-17 01:21:23 -06003902void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3903 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3904 VkResult result) {
3905 if (VK_SUCCESS != result) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003906
Jeremy Gebben88f58142021-06-01 10:07:52 -06003907 std::vector<std::shared_ptr<IMAGE_VIEW_STATE>> views;
Mike Schuchardt2df08912020-12-15 16:28:09 -08003908 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) == 0) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003909 views.resize(pCreateInfo->attachmentCount);
locke-lunarg1ae57d62020-11-18 10:49:19 -07003910
locke-lunargd556cc32019-09-17 01:21:23 -06003911 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003912 views[i] = GetShared<IMAGE_VIEW_STATE>(pCreateInfo->pAttachments[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003913 }
3914 }
Jeremy Gebben88f58142021-06-01 10:07:52 -06003915
3916 frameBufferMap[*pFramebuffer] = std::make_shared<FRAMEBUFFER_STATE>(
3917 *pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass), std::move(views));
locke-lunargd556cc32019-09-17 01:21:23 -06003918}
3919
locke-lunargd556cc32019-09-17 01:21:23 -06003920void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
3921 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3922 VkResult result) {
3923 if (VK_SUCCESS != result) return;
Jeremy Gebben88f58142021-06-01 10:07:52 -06003924 renderPassMap[*pRenderPass] = std::make_shared<RENDER_PASS_STATE>(*pRenderPass, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003925}
3926
Mike Schuchardt2df08912020-12-15 16:28:09 -08003927void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07003928 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3929 VkResult result) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003930 if (VK_SUCCESS != result) return;
3931
3932 renderPassMap[*pRenderPass] = std::make_shared<RENDER_PASS_STATE>(*pRenderPass, pCreateInfo);
Tony-LunarG977448c2019-12-02 14:52:02 -07003933}
3934
Mike Schuchardt2df08912020-12-15 16:28:09 -08003935void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07003936 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3937 VkResult result) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003938 if (VK_SUCCESS != result) return;
3939
3940 renderPassMap[*pRenderPass] = std::make_shared<RENDER_PASS_STATE>(*pRenderPass, pCreateInfo);
Tony-LunarG977448c2019-12-02 14:52:02 -07003941}
3942
locke-lunargd556cc32019-09-17 01:21:23 -06003943void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
3944 const VkRenderPassBeginInfo *pRenderPassBegin,
3945 const VkSubpassContents contents) {
3946 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunargaecf2152020-05-12 17:15:41 -06003947 auto render_pass_state = pRenderPassBegin ? GetShared<RENDER_PASS_STATE>(pRenderPassBegin->renderPass) : nullptr;
3948 auto framebuffer = pRenderPassBegin ? GetShared<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer) : nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06003949
3950 if (render_pass_state) {
locke-lunargaecf2152020-05-12 17:15:41 -06003951 cb_state->activeFramebuffer = framebuffer;
locke-lunargd556cc32019-09-17 01:21:23 -06003952 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07003953 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06003954 cb_state->activeSubpass = 0;
3955 cb_state->activeSubpassContents = contents;
locke-lunargfc78e932020-11-19 17:06:24 -07003956
locke-lunargd556cc32019-09-17 01:21:23 -06003957 // Connect this RP to cmdBuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003958 if (!disabled[command_buffer_state]) {
3959 cb_state->AddChild(render_pass_state.get());
3960 }
locke-lunargd556cc32019-09-17 01:21:23 -06003961
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07003962 auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06003963 if (chained_device_group_struct) {
3964 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
3965 } else {
3966 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
3967 }
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003968
locke-lunargfc78e932020-11-19 17:06:24 -07003969 cb_state->active_subpasses = nullptr;
3970 cb_state->active_attachments = nullptr;
3971
3972 if (framebuffer) {
3973 cb_state->framebuffers.insert(framebuffer);
3974
3975 // Set cb_state->active_subpasses
3976 cb_state->active_subpasses =
3977 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
3978 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
3979 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
3980
3981 // Set cb_state->active_attachments & cb_state->attachments_view_states
3982 cb_state->active_attachments =
3983 std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(framebuffer->createInfo.attachmentCount);
3984 UpdateAttachmentsView(*this, *cb_state, *cb_state->activeFramebuffer, pRenderPassBegin);
3985
3986 // Connect this framebuffer and its children to this cmdBuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003987 cb_state->AddChild(framebuffer.get());
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003988 }
locke-lunargd556cc32019-09-17 01:21:23 -06003989 }
3990}
3991
3992void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
3993 const VkRenderPassBeginInfo *pRenderPassBegin,
3994 VkSubpassContents contents) {
3995 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
3996}
3997
3998void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
3999 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004000 const VkSubpassBeginInfo *pSubpassBeginInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06004001 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4002}
4003
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06004004void ValidationStateTracker::PostCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4005 uint32_t counterBufferCount,
4006 const VkBuffer *pCounterBuffers,
4007 const VkDeviceSize *pCounterBufferOffsets) {
4008 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4009
4010 cb_state->transform_feedback_active = true;
4011}
4012
4013void ValidationStateTracker::PostCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4014 uint32_t counterBufferCount, const VkBuffer *pCounterBuffers,
4015 const VkDeviceSize *pCounterBufferOffsets) {
4016 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4017
4018 cb_state->transform_feedback_active = false;
4019}
4020
Tony-LunarG977448c2019-12-02 14:52:02 -07004021void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
4022 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004023 const VkSubpassBeginInfo *pSubpassBeginInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004024 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4025}
4026
locke-lunargd556cc32019-09-17 01:21:23 -06004027void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4028 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4029 cb_state->activeSubpass++;
4030 cb_state->activeSubpassContents = contents;
locke-lunargfc78e932020-11-19 17:06:24 -07004031
4032 // Update cb_state->active_subpasses
4033 if (cb_state->activeRenderPass && cb_state->activeFramebuffer) {
4034 cb_state->active_subpasses = nullptr;
4035 cb_state->active_subpasses =
4036 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
4037
4038 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
4039 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
4040 }
locke-lunargd556cc32019-09-17 01:21:23 -06004041}
4042
4043void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4044 RecordCmdNextSubpass(commandBuffer, contents);
4045}
4046
4047void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004048 const VkSubpassBeginInfo *pSubpassBeginInfo,
4049 const VkSubpassEndInfo *pSubpassEndInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06004050 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4051}
4052
Tony-LunarG977448c2019-12-02 14:52:02 -07004053void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004054 const VkSubpassBeginInfo *pSubpassBeginInfo,
4055 const VkSubpassEndInfo *pSubpassEndInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004056 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4057}
4058
locke-lunargd556cc32019-09-17 01:21:23 -06004059void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
4060 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4061 cb_state->activeRenderPass = nullptr;
locke-lunargfc78e932020-11-19 17:06:24 -07004062 cb_state->active_attachments = nullptr;
4063 cb_state->active_subpasses = nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004064 cb_state->activeSubpass = 0;
4065 cb_state->activeFramebuffer = VK_NULL_HANDLE;
4066}
4067
4068void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
4069 RecordCmdEndRenderPassState(commandBuffer);
4070}
4071
4072void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004073 const VkSubpassEndInfo *pSubpassEndInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06004074 RecordCmdEndRenderPassState(commandBuffer);
4075}
4076
Tony-LunarG977448c2019-12-02 14:52:02 -07004077void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004078 const VkSubpassEndInfo *pSubpassEndInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004079 RecordCmdEndRenderPassState(commandBuffer);
4080}
locke-lunargd556cc32019-09-17 01:21:23 -06004081void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
4082 const VkCommandBuffer *pCommandBuffers) {
4083 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4084
4085 CMD_BUFFER_STATE *sub_cb_state = NULL;
4086 for (uint32_t i = 0; i < commandBuffersCount; i++) {
4087 sub_cb_state = GetCBState(pCommandBuffers[i]);
4088 assert(sub_cb_state);
4089 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
4090 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
4091 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
4092 // from the validation step to the recording step
4093 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
4094 }
4095 }
4096
4097 // Propagate inital layout and current layout state to the primary cmd buffer
4098 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
4099 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
4100 // for those other classes.
4101 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
4102 const auto image = sub_layout_map_entry.first;
4103 const auto *image_state = GetImageState(image);
4104 if (!image_state) continue; // Can't set layouts of a dead image
4105
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06004106 auto *cb_subres_map = cb_state->GetImageSubresourceLayoutMap(*image_state);
John Zulauf17708d02021-02-22 11:20:58 -07004107 const auto *sub_cb_subres_map = &sub_layout_map_entry.second;
locke-lunargd556cc32019-09-17 01:21:23 -06004108 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
4109 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
4110 }
4111
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06004112 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer();
locke-lunargd556cc32019-09-17 01:21:23 -06004113 cb_state->linkedCommandBuffers.insert(sub_cb_state);
Jeremy Gebben5570abe2021-05-16 18:35:13 -06004114 cb_state->AddChild(sub_cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004115 for (auto &function : sub_cb_state->queryUpdates) {
4116 cb_state->queryUpdates.push_back(function);
4117 }
4118 for (auto &function : sub_cb_state->queue_submit_functions) {
4119 cb_state->queue_submit_functions.push_back(function);
4120 }
David Zhao Akeley44139b12021-04-26 16:16:13 -07004121
4122 // State is trashed after executing secondary command buffers.
4123 // Importantly, this function runs after CoreChecks::PreCallValidateCmdExecuteCommands.
4124 cb_state->trashedViewportMask = ~uint32_t(0);
4125 cb_state->trashedScissorMask = ~uint32_t(0);
4126 cb_state->trashedViewportCount = true;
4127 cb_state->trashedScissorCount = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004128 }
4129}
4130
4131void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
4132 VkFlags flags, void **ppData, VkResult result) {
4133 if (VK_SUCCESS != result) return;
4134 RecordMappedMemory(mem, offset, size, ppData);
4135}
4136
4137void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
4138 auto mem_info = GetDevMemState(mem);
4139 if (mem_info) {
4140 mem_info->mapped_range = MemRange();
4141 mem_info->p_driver_data = nullptr;
4142 }
4143}
4144
4145void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
4146 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
4147 if (image_state) {
locke-lunargae26eac2020-04-16 15:29:05 -06004148 // An Android sepcial image cannot get VkSubresourceLayout until the image binds a memory.
4149 // See: VUID-vkGetImageSubresourceLayout-image-01895
4150 image_state->fragment_encoder =
4151 std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(new subresource_adapter::ImageRangeEncoder(*image_state));
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07004152 const auto swapchain_info = LvlFindInChain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06004153 if (swapchain_info) {
John Zulauf29d00532021-03-04 13:28:54 -07004154 auto *swapchain = GetSwapchainState(swapchain_info->swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004155 if (swapchain) {
John Zulaufd13b38e2021-03-05 08:17:38 -07004156 SWAPCHAIN_IMAGE &swap_image = swapchain->images[swapchain_info->imageIndex];
John Zulauf29d00532021-03-04 13:28:54 -07004157 if (swap_image.bound_images.empty()) {
4158 // If this is the first "binding" of an image to this swapchain index, get a fake allocation
4159 image_state->swapchain_fake_address = fake_memory.Alloc(image_state->fragment_encoder->TotalSize());
4160 } else {
4161 image_state->swapchain_fake_address = (*swap_image.bound_images.cbegin())->swapchain_fake_address;
4162 }
John Zulaufd13b38e2021-03-05 08:17:38 -07004163 swap_image.bound_images.emplace(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004164 image_state->bind_swapchain = swapchain_info->swapchain;
4165 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
John Zulaufd13b38e2021-03-05 08:17:38 -07004166
John Zulauf29d00532021-03-04 13:28:54 -07004167 // All images bound to this swapchain and index are aliases
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06004168 for (auto *other_image : swap_image.bound_images) {
4169 image_state->AddAliasingImage(other_image);
4170 }
locke-lunargd556cc32019-09-17 01:21:23 -06004171 }
4172 } else {
4173 // Track bound memory range information
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004174 auto mem_info = GetDevMemShared(bindInfo.memory);
locke-lunargd556cc32019-09-17 01:21:23 -06004175 if (mem_info) {
John Zulaufd13b38e2021-03-05 08:17:38 -07004176 if (image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06004177 for (auto *base_node : mem_info->ObjectBindings()) {
4178 if (base_node->Handle().type == kVulkanObjectTypeImage) {
4179 auto other_image = static_cast<IMAGE_STATE *>(base_node);
4180 image_state->AddAliasingImage(other_image);
4181 }
4182 }
John Zulaufd13b38e2021-03-05 08:17:38 -07004183 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004184 // Track objects tied to memory
4185 image_state->SetMemBinding(mem_info, bindInfo.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06004186 }
locke-lunargd556cc32019-09-17 01:21:23 -06004187 }
locke-lunargd556cc32019-09-17 01:21:23 -06004188 }
4189}
4190
4191void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
4192 VkDeviceSize memoryOffset, VkResult result) {
4193 if (VK_SUCCESS != result) return;
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06004194 auto bind_info = LvlInitStruct<VkBindImageMemoryInfo>();
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004195 bind_info.image = image;
4196 bind_info.memory = mem;
4197 bind_info.memoryOffset = memoryOffset;
4198 UpdateBindImageMemoryState(bind_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004199}
4200
4201void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004202 const VkBindImageMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004203 if (VK_SUCCESS != result) return;
4204 for (uint32_t i = 0; i < bindInfoCount; i++) {
4205 UpdateBindImageMemoryState(pBindInfos[i]);
4206 }
4207}
4208
4209void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004210 const VkBindImageMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004211 if (VK_SUCCESS != result) return;
4212 for (uint32_t i = 0; i < bindInfoCount; i++) {
4213 UpdateBindImageMemoryState(pBindInfos[i]);
4214 }
4215}
4216
4217void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
4218 auto event_state = GetEventState(event);
4219 if (event_state) {
4220 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
4221 }
locke-lunargd556cc32019-09-17 01:21:23 -06004222}
4223
4224void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
4225 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
4226 VkResult result) {
4227 if (VK_SUCCESS != result) return;
4228 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
4229 pImportSemaphoreFdInfo->flags);
4230}
4231
4232void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004233 VkExternalSemaphoreHandleTypeFlagBits handle_type) {
locke-lunargd556cc32019-09-17 01:21:23 -06004234 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
Mike Schuchardt2df08912020-12-15 16:28:09 -08004235 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT) {
locke-lunargd556cc32019-09-17 01:21:23 -06004236 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
4237 semaphore_state->scope = kSyncScopeExternalPermanent;
4238 }
4239}
4240
4241#ifdef VK_USE_PLATFORM_WIN32_KHR
4242void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
4243 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
4244 if (VK_SUCCESS != result) return;
4245 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
4246 pImportSemaphoreWin32HandleInfo->flags);
4247}
4248
4249void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
4250 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4251 HANDLE *pHandle, VkResult result) {
4252 if (VK_SUCCESS != result) return;
4253 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
4254}
4255
4256void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
4257 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
4258 if (VK_SUCCESS != result) return;
4259 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
4260 pImportFenceWin32HandleInfo->flags);
4261}
4262
4263void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
4264 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4265 HANDLE *pHandle, VkResult result) {
4266 if (VK_SUCCESS != result) return;
4267 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
4268}
4269#endif
4270
4271void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
4272 VkResult result) {
4273 if (VK_SUCCESS != result) return;
4274 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
4275}
4276
Mike Schuchardt2df08912020-12-15 16:28:09 -08004277void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBits handle_type,
4278 VkFenceImportFlags flags) {
locke-lunargd556cc32019-09-17 01:21:23 -06004279 FENCE_STATE *fence_node = GetFenceState(fence);
4280 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08004281 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT || flags & VK_FENCE_IMPORT_TEMPORARY_BIT) &&
locke-lunargd556cc32019-09-17 01:21:23 -06004282 fence_node->scope == kSyncScopeInternal) {
4283 fence_node->scope = kSyncScopeExternalTemporary;
4284 } else {
4285 fence_node->scope = kSyncScopeExternalPermanent;
4286 }
4287 }
4288}
4289
4290void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
4291 VkResult result) {
4292 if (VK_SUCCESS != result) return;
4293 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
4294}
4295
Mike Schuchardt2df08912020-12-15 16:28:09 -08004296void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBits handle_type) {
locke-lunargd556cc32019-09-17 01:21:23 -06004297 FENCE_STATE *fence_state = GetFenceState(fence);
4298 if (fence_state) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08004299 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT) {
locke-lunargd556cc32019-09-17 01:21:23 -06004300 // Export with reference transference becomes external
4301 fence_state->scope = kSyncScopeExternalPermanent;
4302 } else if (fence_state->scope == kSyncScopeInternal) {
4303 // Export with copy transference has a side effect of resetting the fence
4304 fence_state->state = FENCE_UNSIGNALED;
4305 }
4306 }
4307}
4308
4309void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
4310 VkResult result) {
4311 if (VK_SUCCESS != result) return;
4312 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
4313}
4314
4315void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
4316 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
4317 if (VK_SUCCESS != result) return;
John Zulaufd5115702021-01-18 12:34:33 -07004318 const auto event = *pEvent;
Jeremy Gebbencbf22862021-03-03 12:01:22 -07004319 eventMap.emplace(event, std::make_shared<EVENT_STATE>(event, pCreateInfo->flags));
locke-lunargd556cc32019-09-17 01:21:23 -06004320}
4321
4322void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
4323 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
4324 SWAPCHAIN_NODE *old_swapchain_state) {
4325 if (VK_SUCCESS == result) {
Nathaniel Cesario39152e62021-07-02 13:04:16 -06004326 auto swapchain_state = CreateSwapchainState(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004327 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
4328 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
4329 swapchain_state->shared_presentable = true;
4330 }
4331 surface_state->swapchain = swapchain_state.get();
4332 swapchainMap[*pSwapchain] = std::move(swapchain_state);
4333 } else {
4334 surface_state->swapchain = nullptr;
4335 }
4336 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
4337 if (old_swapchain_state) {
4338 old_swapchain_state->retired = true;
4339 }
4340 return;
4341}
4342
4343void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
4344 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
4345 VkResult result) {
4346 auto surface_state = GetSurfaceState(pCreateInfo->surface);
4347 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
4348 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
4349}
4350
4351void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
4352 const VkAllocationCallbacks *pAllocator) {
4353 if (!swapchain) return;
4354 auto swapchain_data = GetSwapchainState(swapchain);
4355 if (swapchain_data) {
John Zulauffaa7a522021-03-05 12:22:45 -07004356 for (auto &swapchain_image : swapchain_data->images) {
4357 // TODO: missing validation that the bound images are empty (except for image_state above)
4358 // Clean up the aliases and the bound_images *before* erasing the image_state.
Jeremy Gebben5570abe2021-05-16 18:35:13 -06004359 RemoveAliasingImages(swapchain_image.bound_images);
John Zulauffaa7a522021-03-05 12:22:45 -07004360 swapchain_image.bound_images.clear();
4361
4362 if (swapchain_image.image_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004363 swapchain_image.image_state->Destroy();
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06004364 imageMap.erase(swapchain_image.image_state->image());
John Zulauffaa7a522021-03-05 12:22:45 -07004365 swapchain_image.image_state = nullptr;
John Zulauf2d60a452021-03-04 15:12:03 -07004366 }
locke-lunargd556cc32019-09-17 01:21:23 -06004367 }
4368
4369 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
4370 if (surface_state) {
4371 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
4372 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004373 swapchain_data->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06004374 swapchainMap.erase(swapchain);
4375 }
4376}
4377
sfricke-samsung5c1b7392020-12-13 22:17:15 -08004378void ValidationStateTracker::PostCallRecordCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
4379 const VkDisplayModeCreateInfoKHR *pCreateInfo,
4380 const VkAllocationCallbacks *pAllocator, VkDisplayModeKHR *pMode,
4381 VkResult result) {
4382 if (VK_SUCCESS != result) return;
4383 if (!pMode) return;
Jeremy Gebben5573a8c2021-06-04 08:55:10 -06004384 display_mode_map[*pMode] = std::make_shared<DISPLAY_MODE_STATE>(*pMode, physicalDevice);
sfricke-samsung5c1b7392020-12-13 22:17:15 -08004385}
4386
locke-lunargd556cc32019-09-17 01:21:23 -06004387void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
4388 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
4389 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004390 auto semaphore_state = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
4391 if (semaphore_state) {
4392 semaphore_state->signaler.first = VK_NULL_HANDLE;
4393 semaphore_state->signaled = false;
locke-lunargd556cc32019-09-17 01:21:23 -06004394 }
4395 }
4396
4397 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
4398 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
4399 // confused itself just as much.
4400 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
4401 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
4402 // Mark the image as having been released to the WSI
4403 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
4404 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
John Zulauffaa7a522021-03-05 12:22:45 -07004405 IMAGE_STATE *image_state = swapchain_data->images[pPresentInfo->pImageIndices[i]].image_state;
locke-lunargd556cc32019-09-17 01:21:23 -06004406 if (image_state) {
4407 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004408 if (image_state->shared_presentable) {
4409 image_state->layout_locked = true;
4410 }
locke-lunargd556cc32019-09-17 01:21:23 -06004411 }
4412 }
4413 }
4414 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
4415 // its semaphore waits) /never/ participate in any completion proof.
4416}
4417
4418void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
4419 const VkSwapchainCreateInfoKHR *pCreateInfos,
4420 const VkAllocationCallbacks *pAllocator,
4421 VkSwapchainKHR *pSwapchains, VkResult result) {
4422 if (pCreateInfos) {
4423 for (uint32_t i = 0; i < swapchainCount; i++) {
4424 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
4425 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
4426 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
4427 }
4428 }
4429}
4430
4431void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4432 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004433 auto fence_state = GetFenceState(fence);
4434 if (fence_state && fence_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06004435 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
4436 // import
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004437 fence_state->state = FENCE_INFLIGHT;
4438 fence_state->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
locke-lunargd556cc32019-09-17 01:21:23 -06004439 }
4440
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004441 auto semaphore_state = GetSemaphoreState(semaphore);
4442 if (semaphore_state && semaphore_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06004443 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
4444 // temporary import
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004445 semaphore_state->signaled = true;
4446 semaphore_state->signaler.first = VK_NULL_HANDLE;
locke-lunargd556cc32019-09-17 01:21:23 -06004447 }
4448
4449 // Mark the image as acquired.
4450 auto swapchain_data = GetSwapchainState(swapchain);
4451 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
John Zulauffaa7a522021-03-05 12:22:45 -07004452 IMAGE_STATE *image_state = swapchain_data->images[*pImageIndex].image_state;
locke-lunargd556cc32019-09-17 01:21:23 -06004453 if (image_state) {
4454 image_state->acquired = true;
4455 image_state->shared_presentable = swapchain_data->shared_presentable;
4456 }
4457 }
4458}
4459
4460void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4461 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
4462 VkResult result) {
4463 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4464 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
4465}
4466
4467void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
4468 uint32_t *pImageIndex, VkResult result) {
4469 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4470 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
4471 pAcquireInfo->fence, pImageIndex);
4472}
4473
4474void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
4475 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
4476 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
4477 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
4478 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
4479 phys_device_state.phys_device = pPhysicalDevices[i];
4480 // Init actual features for each physical device
4481 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
4482 }
4483 }
4484}
4485
4486// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
4487static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004488 VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004489 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
4490
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004491 if (pQueueFamilyProperties) { // Save queue family properties
locke-lunargd556cc32019-09-17 01:21:23 -06004492 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
4493 for (uint32_t i = 0; i < count; ++i) {
4494 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
4495 }
4496 }
4497}
4498
4499void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
4500 uint32_t *pQueueFamilyPropertyCount,
4501 VkQueueFamilyProperties *pQueueFamilyProperties) {
4502 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4503 assert(physical_device_state);
Mike Schuchardt2df08912020-12-15 16:28:09 -08004504 VkQueueFamilyProperties2 *pqfp = nullptr;
4505 std::vector<VkQueueFamilyProperties2> qfp;
locke-lunargd556cc32019-09-17 01:21:23 -06004506 qfp.resize(*pQueueFamilyPropertyCount);
4507 if (pQueueFamilyProperties) {
4508 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06004509 qfp[i] = LvlInitStruct<VkQueueFamilyProperties2>();
locke-lunargd556cc32019-09-17 01:21:23 -06004510 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
4511 }
4512 pqfp = qfp.data();
4513 }
4514 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
4515}
4516
4517void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004518 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004519 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4520 assert(physical_device_state);
4521 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4522 pQueueFamilyProperties);
4523}
4524
4525void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004526 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004527 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4528 assert(physical_device_state);
4529 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4530 pQueueFamilyProperties);
4531}
4532void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
4533 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004534 if (!surface) return;
4535 auto surface_state = GetSurfaceState(surface);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004536 surface_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06004537 surface_map.erase(surface);
4538}
4539
4540void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004541 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06004542}
4543
4544void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
4545 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
4546 const VkAllocationCallbacks *pAllocator,
4547 VkSurfaceKHR *pSurface, VkResult result) {
4548 if (VK_SUCCESS != result) return;
4549 RecordVulkanSurface(pSurface);
4550}
4551
4552#ifdef VK_USE_PLATFORM_ANDROID_KHR
4553void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
4554 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
4555 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4556 VkResult result) {
4557 if (VK_SUCCESS != result) return;
4558 RecordVulkanSurface(pSurface);
4559}
4560#endif // VK_USE_PLATFORM_ANDROID_KHR
4561
4562#ifdef VK_USE_PLATFORM_IOS_MVK
4563void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
4564 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4565 VkResult result) {
4566 if (VK_SUCCESS != result) return;
4567 RecordVulkanSurface(pSurface);
4568}
4569#endif // VK_USE_PLATFORM_IOS_MVK
4570
4571#ifdef VK_USE_PLATFORM_MACOS_MVK
4572void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
4573 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
4574 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4575 VkResult result) {
4576 if (VK_SUCCESS != result) return;
4577 RecordVulkanSurface(pSurface);
4578}
4579#endif // VK_USE_PLATFORM_MACOS_MVK
4580
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07004581#ifdef VK_USE_PLATFORM_METAL_EXT
4582void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
4583 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
4584 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4585 VkResult result) {
4586 if (VK_SUCCESS != result) return;
4587 RecordVulkanSurface(pSurface);
4588}
4589#endif // VK_USE_PLATFORM_METAL_EXT
4590
locke-lunargd556cc32019-09-17 01:21:23 -06004591#ifdef VK_USE_PLATFORM_WAYLAND_KHR
4592void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
4593 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
4594 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4595 VkResult result) {
4596 if (VK_SUCCESS != result) return;
4597 RecordVulkanSurface(pSurface);
4598}
4599#endif // VK_USE_PLATFORM_WAYLAND_KHR
4600
4601#ifdef VK_USE_PLATFORM_WIN32_KHR
4602void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
4603 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
4604 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4605 VkResult result) {
4606 if (VK_SUCCESS != result) return;
4607 RecordVulkanSurface(pSurface);
4608}
4609#endif // VK_USE_PLATFORM_WIN32_KHR
4610
4611#ifdef VK_USE_PLATFORM_XCB_KHR
4612void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
4613 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4614 VkResult result) {
4615 if (VK_SUCCESS != result) return;
4616 RecordVulkanSurface(pSurface);
4617}
4618#endif // VK_USE_PLATFORM_XCB_KHR
4619
4620#ifdef VK_USE_PLATFORM_XLIB_KHR
4621void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
4622 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4623 VkResult result) {
4624 if (VK_SUCCESS != result) return;
4625 RecordVulkanSurface(pSurface);
4626}
4627#endif // VK_USE_PLATFORM_XLIB_KHR
4628
Niklas Haas8b84af12020-04-19 22:20:11 +02004629void ValidationStateTracker::PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance,
4630 const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
4631 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4632 VkResult result) {
4633 if (VK_SUCCESS != result) return;
4634 RecordVulkanSurface(pSurface);
4635}
4636
Cort23cf2282019-09-20 18:58:18 +02004637void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004638 VkPhysicalDeviceFeatures *pFeatures) {
4639 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Yilong Li358152a2020-07-08 02:16:45 -07004640 // Reset the features2 safe struct before setting up the features field.
4641 physical_device_state->features2 = safe_VkPhysicalDeviceFeatures2();
Cortffba2642019-09-20 22:09:41 +02004642 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02004643}
4644
4645void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004646 VkPhysicalDeviceFeatures2 *pFeatures) {
4647 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02004648 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004649}
4650
4651void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004652 VkPhysicalDeviceFeatures2 *pFeatures) {
4653 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02004654 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004655}
4656
locke-lunargd556cc32019-09-17 01:21:23 -06004657void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
4658 VkSurfaceKHR surface,
4659 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
4660 VkResult result) {
4661 if (VK_SUCCESS != result) return;
4662 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004663 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004664
4665 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
4666 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004667}
4668
4669void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
4670 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4671 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
4672 if (VK_SUCCESS != result) return;
4673 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004674 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004675
4676 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
4677 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004678}
4679
4680void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
4681 VkSurfaceKHR surface,
4682 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
4683 VkResult result) {
4684 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004685 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
4686 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
4687 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
4688 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
4689 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
4690 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
4691 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
4692 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
4693 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
4694 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004695
4696 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
4697 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004698}
4699
4700void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
4701 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
4702 VkBool32 *pSupported, VkResult result) {
4703 if (VK_SUCCESS != result) return;
4704 auto surface_state = GetSurfaceState(surface);
4705 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
4706}
4707
4708void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
4709 VkSurfaceKHR surface,
4710 uint32_t *pPresentModeCount,
4711 VkPresentModeKHR *pPresentModes,
4712 VkResult result) {
4713 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4714
4715 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
4716 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004717
4718 if (*pPresentModeCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004719 if (*pPresentModeCount > physical_device_state->present_modes.size()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004720 physical_device_state->present_modes.resize(*pPresentModeCount);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004721 }
locke-lunargd556cc32019-09-17 01:21:23 -06004722 }
4723 if (pPresentModes) {
locke-lunargd556cc32019-09-17 01:21:23 -06004724 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
4725 physical_device_state->present_modes[i] = pPresentModes[i];
4726 }
4727 }
4728}
4729
4730void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
4731 uint32_t *pSurfaceFormatCount,
4732 VkSurfaceFormatKHR *pSurfaceFormats,
4733 VkResult result) {
4734 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4735
4736 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004737
4738 if (*pSurfaceFormatCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004739 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004740 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004741 }
locke-lunargd556cc32019-09-17 01:21:23 -06004742 }
4743 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06004744 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4745 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
4746 }
4747 }
4748}
4749
4750void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
4751 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4752 uint32_t *pSurfaceFormatCount,
4753 VkSurfaceFormat2KHR *pSurfaceFormats,
4754 VkResult result) {
4755 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4756
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004757 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004758 if (*pSurfaceFormatCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004759 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size()) {
4760 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
4761 }
locke-lunargd556cc32019-09-17 01:21:23 -06004762 }
4763 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06004764 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004765 physical_device_state->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
locke-lunargd556cc32019-09-17 01:21:23 -06004766 }
4767 }
4768}
4769
4770void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4771 const VkDebugUtilsLabelEXT *pLabelInfo) {
4772 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4773}
4774
4775void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
4776 EndCmdDebugUtilsLabel(report_data, commandBuffer);
4777}
4778
4779void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4780 const VkDebugUtilsLabelEXT *pLabelInfo) {
4781 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4782
4783 // Squirrel away an easily accessible copy.
4784 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4785 cb_state->debug_label = LoggingLabel(pLabelInfo);
4786}
4787
4788void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004789 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004790 if (NULL != pPhysicalDeviceGroupProperties) {
4791 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
4792 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
4793 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
4794 auto &phys_device_state = physical_device_map[cur_phys_dev];
4795 phys_device_state.phys_device = cur_phys_dev;
4796 // Init actual features for each physical device
4797 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
4798 }
4799 }
4800 }
4801}
4802
4803void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004804 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties,
locke-lunargd556cc32019-09-17 01:21:23 -06004805 VkResult result) {
4806 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4807 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4808}
4809
4810void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004811 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties,
locke-lunargd556cc32019-09-17 01:21:23 -06004812 VkResult result) {
4813 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4814 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4815}
4816
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004817void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
4818 uint32_t queueFamilyIndex,
4819 uint32_t *pCounterCount,
4820 VkPerformanceCounterKHR *pCounters) {
4821 if (NULL == pCounters) return;
4822
4823 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4824 assert(physical_device_state);
4825
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004826 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queue_family_counters(new QUEUE_FAMILY_PERF_COUNTERS());
4827 queue_family_counters->counters.resize(*pCounterCount);
4828 for (uint32_t i = 0; i < *pCounterCount; i++) queue_family_counters->counters[i] = pCounters[i];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004829
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004830 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queue_family_counters);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004831}
4832
4833void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
4834 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
4835 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
4836 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4837 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
4838}
4839
4840void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
4841 VkResult result) {
4842 if (result == VK_SUCCESS) performance_lock_acquired = true;
4843}
4844
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004845void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
4846 performance_lock_acquired = false;
4847 for (auto &cmd_buffer : commandBufferMap) {
4848 cmd_buffer.second->performance_lock_released = true;
4849 }
4850}
4851
locke-lunargd556cc32019-09-17 01:21:23 -06004852void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004853 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004854 const VkAllocationCallbacks *pAllocator) {
4855 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004856 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4857 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004858 desc_template_map.erase(descriptorUpdateTemplate);
4859}
4860
4861void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004862 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004863 const VkAllocationCallbacks *pAllocator) {
4864 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004865 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4866 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004867 desc_template_map.erase(descriptorUpdateTemplate);
4868}
4869
Mike Schuchardt2df08912020-12-15 16:28:09 -08004870void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
4871 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate) {
locke-lunargd556cc32019-09-17 01:21:23 -06004872 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004873 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004874 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
4875}
4876
Mike Schuchardt2df08912020-12-15 16:28:09 -08004877void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(VkDevice device,
4878 const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
4879 const VkAllocationCallbacks *pAllocator,
4880 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate,
4881 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004882 if (VK_SUCCESS != result) return;
4883 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4884}
4885
4886void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004887 VkDevice device, const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4888 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004889 if (VK_SUCCESS != result) return;
4890 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4891}
4892
4893void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004894 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004895 const void *pData) {
4896 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
4897 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
4898 assert(0);
4899 } else {
4900 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
4901 // TODO: Record template push descriptor updates
4902 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
4903 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
4904 }
4905 }
4906}
4907
4908void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
4909 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4910 const void *pData) {
4911 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4912}
4913
4914void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004915 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004916 const void *pData) {
4917 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4918}
4919
Mike Schuchardt2df08912020-12-15 16:28:09 -08004920void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
4921 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4922 VkPipelineLayout layout, uint32_t set,
4923 const void *pData) {
locke-lunargd556cc32019-09-17 01:21:23 -06004924 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4925
4926 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4927 if (template_state) {
4928 auto layout_data = GetPipelineLayout(layout);
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -06004929 auto dsl = layout_data ? layout_data->GetDsl(set) : nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004930 const auto &template_ci = template_state->create_info;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004931 if (dsl && !dsl->Destroyed()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004932 // Decode the template into a set of write updates
4933 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
4934 dsl->GetDescriptorSetLayout());
4935 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
4936 static_cast<uint32_t>(decoded_template.desc_writes.size()),
4937 decoded_template.desc_writes.data());
4938 }
4939 }
4940}
4941
4942void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
4943 uint32_t *pPropertyCount, void *pProperties) {
4944 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4945 if (*pPropertyCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06004946 physical_device_state->display_plane_property_count = *pPropertyCount;
4947 }
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004948 if (*pPropertyCount || pProperties) {
4949 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004950 }
4951}
4952
4953void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
4954 uint32_t *pPropertyCount,
4955 VkDisplayPlanePropertiesKHR *pProperties,
4956 VkResult result) {
4957 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4958 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4959}
4960
4961void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
4962 uint32_t *pPropertyCount,
4963 VkDisplayPlaneProperties2KHR *pProperties,
4964 VkResult result) {
4965 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4966 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4967}
4968
4969void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4970 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
4971 QueryObject query_obj = {queryPool, query, index};
4972 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4973 RecordCmdBeginQuery(cb_state, query_obj);
4974}
4975
4976void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4977 uint32_t query, uint32_t index) {
4978 QueryObject query_obj = {queryPool, query, index};
4979 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4980 RecordCmdEndQuery(cb_state, query_obj);
4981}
4982
4983void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
4984 VkSamplerYcbcrConversion ycbcr_conversion) {
Jeremy Gebben5d970742021-05-31 16:04:14 -06004985 auto ycbcr_state = std::make_shared<SAMPLER_YCBCR_CONVERSION_STATE>(ycbcr_conversion, create_info,
4986 GetPotentialFormatFeatures(create_info->format));
4987 // If format is VK_FORMAT_UNDEFINED, format_features will be set by external AHB features
locke-lunargd556cc32019-09-17 01:21:23 -06004988 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004989 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion, ycbcr_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06004990 }
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004991 samplerYcbcrConversionMap[ycbcr_conversion] = std::move(ycbcr_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004992}
4993
4994void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
4995 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4996 const VkAllocationCallbacks *pAllocator,
4997 VkSamplerYcbcrConversion *pYcbcrConversion,
4998 VkResult result) {
4999 if (VK_SUCCESS != result) return;
5000 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5001}
5002
5003void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
5004 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5005 const VkAllocationCallbacks *pAllocator,
5006 VkSamplerYcbcrConversion *pYcbcrConversion,
5007 VkResult result) {
5008 if (VK_SUCCESS != result) return;
5009 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5010}
5011
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005012void ValidationStateTracker::RecordDestroySamplerYcbcrConversionState(VkSamplerYcbcrConversion ycbcr_conversion) {
5013 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
5014 RecordDestroySamplerYcbcrConversionANDROID(ycbcr_conversion);
5015 }
5016
5017 auto ycbcr_state = GetSamplerYcbcrConversionState(ycbcr_conversion);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005018 ycbcr_state->Destroy();
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005019 samplerYcbcrConversionMap.erase(ycbcr_conversion);
5020}
5021
locke-lunargd556cc32019-09-17 01:21:23 -06005022void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
5023 const VkAllocationCallbacks *pAllocator) {
5024 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005025 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005026}
5027
5028void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
5029 VkSamplerYcbcrConversion ycbcrConversion,
5030 const VkAllocationCallbacks *pAllocator) {
5031 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005032 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005033}
5034
Tony-LunarG977448c2019-12-02 14:52:02 -07005035void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5036 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005037 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07005038 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06005039
5040 // Do nothing if the query pool has been destroyed.
5041 auto query_pool_state = GetQueryPoolState(queryPool);
5042 if (!query_pool_state) return;
5043
5044 // Reset the state of existing entries.
5045 QueryObject query_obj{queryPool, 0};
5046 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
5047 for (uint32_t i = 0; i < max_query_count; ++i) {
5048 query_obj.query = firstQuery + i;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005049 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005050 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005051 for (uint32_t pass_index = 0; pass_index < query_pool_state->n_performance_passes; pass_index++) {
5052 query_obj.perf_pass = pass_index;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005053 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005054 }
5055 }
locke-lunargd556cc32019-09-17 01:21:23 -06005056 }
5057}
5058
Tony-LunarG977448c2019-12-02 14:52:02 -07005059void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5060 uint32_t queryCount) {
5061 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5062}
5063
5064void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5065 uint32_t queryCount) {
5066 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5067}
5068
locke-lunargd556cc32019-09-17 01:21:23 -06005069void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
5070 const TEMPLATE_STATE *template_state, const void *pData) {
5071 // Translate the templated update into a normal update for validation...
5072 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
5073 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
5074 decoded_update.desc_writes.data(), 0, NULL);
5075}
5076
5077// Update the common AllocateDescriptorSetsData
5078void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005079 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005080 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05005081 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06005082 if (layout) {
5083 ds_data->layout_nodes[i] = layout;
5084 // Count total descriptors required per type
5085 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
5086 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005087 uint32_t type_index = static_cast<uint32_t>(binding_layout->descriptorType);
5088 ds_data->required_descriptors_by_type[type_index] += binding_layout->descriptorCount;
locke-lunargd556cc32019-09-17 01:21:23 -06005089 }
5090 }
5091 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
5092 }
5093}
5094
5095// Decrement allocated sets from the pool and insert new sets into set_map
5096void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
5097 const VkDescriptorSet *descriptor_sets,
5098 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
5099 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
5100 // Account for sets and individual descriptors allocated from pool
5101 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
5102 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
5103 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
5104 }
5105
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07005106 const auto *variable_count_info = LvlFindInChain<VkDescriptorSetVariableDescriptorCountAllocateInfo>(p_alloc_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06005107 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
5108
5109 // Create tracking object for each descriptor set; insert into global map and the pool's set.
5110 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
5111 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
5112
Jeff Bolz41a1ced2019-10-11 11:40:49 -05005113 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07005114 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06005115 pool_state->sets.insert(new_ds.get());
locke-lunargd556cc32019-09-17 01:21:23 -06005116 setMap[descriptor_sets[i]] = std::move(new_ds);
5117 }
5118}
5119
5120// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
Jeremy Kniager05631e72020-06-08 14:21:35 -06005121void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type,
locke-lunarg540b2252020-08-03 13:23:36 -06005122 VkPipelineBindPoint bind_point, const char *function) {
5123 UpdateDrawState(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005124 cb_state->hasDispatchCmd = true;
5125}
5126
locke-lunargd556cc32019-09-17 01:21:23 -06005127// Generic function to handle state update for all CmdDraw* type functions
locke-lunarg540b2252020-08-03 13:23:36 -06005128void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, VkPipelineBindPoint bind_point,
5129 const char *function) {
5130 UpdateStateCmdDrawDispatchType(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005131 cb_state->hasDrawCmd = true;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005132
5133 // Update the consumed viewport/scissor count.
5134 uint32_t& used = cb_state->usedViewportScissorCount;
5135 used = std::max(used, cb_state->pipelineStaticViewportCount);
5136 used = std::max(used, cb_state->pipelineStaticScissorCount);
5137 cb_state->usedDynamicViewportCount |= !!(cb_state->dynamic_status & CBSTATUS_VIEWPORT_WITH_COUNT_SET); // !! silences MSVC warn
5138 cb_state->usedDynamicScissorCount |= !!(cb_state->dynamic_status & CBSTATUS_SCISSOR_WITH_COUNT_SET);
locke-lunargd556cc32019-09-17 01:21:23 -06005139}
5140
5141void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
5142 uint32_t firstVertex, uint32_t firstInstance) {
5143 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005144 UpdateStateCmdDrawType(cb_state, CMD_DRAW, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDraw()");
locke-lunargd556cc32019-09-17 01:21:23 -06005145}
5146
5147void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
5148 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
5149 uint32_t firstInstance) {
5150 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005151 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXED, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexed()");
locke-lunargd556cc32019-09-17 01:21:23 -06005152}
5153
5154void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5155 uint32_t count, uint32_t stride) {
5156 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5157 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005158 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndirect()");
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005159 if (!disabled[command_buffer_state]) {
5160 cb_state->AddChild(buffer_state);
5161 }
locke-lunargd556cc32019-09-17 01:21:23 -06005162}
5163
5164void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5165 VkDeviceSize offset, uint32_t count, uint32_t stride) {
5166 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5167 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005168 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexedIndirect()");
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005169 if (!disabled[command_buffer_state]) {
5170 cb_state->AddChild(buffer_state);
5171 }
locke-lunargd556cc32019-09-17 01:21:23 -06005172}
5173
5174void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
5175 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005176 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCH, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatch()");
locke-lunargd556cc32019-09-17 01:21:23 -06005177}
5178
5179void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5180 VkDeviceSize offset) {
5181 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005182 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCHINDIRECT, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatchIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06005183 BUFFER_STATE *buffer_state = GetBufferState(buffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005184 if (!disabled[command_buffer_state]) {
5185 cb_state->AddChild(buffer_state);
5186 }
locke-lunargd556cc32019-09-17 01:21:23 -06005187}
5188
Tony-LunarG977448c2019-12-02 14:52:02 -07005189void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5190 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
locke-lunarg540b2252020-08-03 13:23:36 -06005191 uint32_t stride, const char *function) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005192 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5193 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5194 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005195 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005196 if (!disabled[command_buffer_state]) {
5197 cb_state->AddChild(buffer_state);
5198 cb_state->AddChild(count_buffer_state);
5199 }
Tony-LunarG977448c2019-12-02 14:52:02 -07005200}
5201
locke-lunargd556cc32019-09-17 01:21:23 -06005202void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5203 VkDeviceSize offset, VkBuffer countBuffer,
5204 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5205 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005206 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5207 "vkCmdDrawIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005208}
5209
5210void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5211 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5212 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005213 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5214 "vkCmdDrawIndirectCount()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005215}
5216
5217void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5218 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
locke-lunarg540b2252020-08-03 13:23:36 -06005219 uint32_t maxDrawCount, uint32_t stride, const char *function) {
locke-lunargd556cc32019-09-17 01:21:23 -06005220 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5221 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5222 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005223 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005224 if (!disabled[command_buffer_state]) {
5225 cb_state->AddChild(buffer_state);
5226 cb_state->AddChild(count_buffer_state);
5227 }
locke-lunargd556cc32019-09-17 01:21:23 -06005228}
5229
5230void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5231 VkDeviceSize offset, VkBuffer countBuffer,
5232 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5233 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005234 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5235 "vkCmdDrawIndexedIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005236}
5237
5238void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
5239 VkDeviceSize offset, VkBuffer countBuffer,
5240 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5241 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005242 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5243 "vkCmdDrawIndexedIndirectCount()");
locke-lunargd556cc32019-09-17 01:21:23 -06005244}
5245
5246void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
5247 uint32_t firstTask) {
5248 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005249 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSNV, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawMeshTasksNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005250}
5251
5252void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5253 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
5254 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005255 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
5256 "vkCmdDrawMeshTasksIndirectNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005257 BUFFER_STATE *buffer_state = GetBufferState(buffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005258 if (!disabled[command_buffer_state] && buffer_state) {
5259 cb_state->AddChild(buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -06005260 }
5261}
5262
5263void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5264 VkDeviceSize offset, VkBuffer countBuffer,
5265 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5266 uint32_t stride) {
5267 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5268 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5269 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005270 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTCOUNTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
5271 "vkCmdDrawMeshTasksIndirectCountNV()");
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005272 if (!disabled[command_buffer_state]) {
5273 if (buffer_state) {
5274 cb_state->AddChild(buffer_state);
5275 }
5276 if (count_buffer_state) {
5277 cb_state->AddChild(count_buffer_state);
5278 }
locke-lunargd556cc32019-09-17 01:21:23 -06005279 }
5280}
5281
5282void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
5283 const VkAllocationCallbacks *pAllocator,
5284 VkShaderModule *pShaderModule, VkResult result,
5285 void *csm_state_data) {
5286 if (VK_SUCCESS != result) return;
5287 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
5288
Tony-LunarG8a51b7d2020-07-01 15:57:23 -06005289 spv_target_env spirv_environment = PickSpirvEnv(api_version, (device_extensions.vk_khr_spirv_1_4 != kNotEnabled));
locke-lunargd556cc32019-09-17 01:21:23 -06005290 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005291 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
5292 csm_state->unique_shader_id)
5293 : std::make_shared<SHADER_MODULE_STATE>();
sfricke-samsung962cad92021-04-13 00:46:29 -07005294 new_shader_module->SetPushConstantUsedInShader();
locke-lunargd556cc32019-09-17 01:21:23 -06005295 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
5296}
5297
5298void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06005299 PipelineStageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005300 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
locke-lunargde3f0fa2020-09-10 11:55:31 -06005301 stage_state->entry_point_name = pStage->pName;
5302 stage_state->shader_state = GetShared<SHADER_MODULE_STATE>(pStage->module);
5303 auto module = stage_state->shader_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -06005304 if (!module->has_valid_spirv) return;
5305
5306 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
sfricke-samsung962cad92021-04-13 00:46:29 -07005307 auto entrypoint = module->FindEntrypoint(pStage->pName, pStage->stage);
locke-lunargd556cc32019-09-17 01:21:23 -06005308 if (entrypoint == module->end()) return;
5309
locke-lunarg654e3692020-06-04 17:19:15 -06005310 stage_state->stage_flag = pStage->stage;
5311
locke-lunargd556cc32019-09-17 01:21:23 -06005312 // Mark accessible ids
sfricke-samsung962cad92021-04-13 00:46:29 -07005313 stage_state->accessible_ids = module->MarkAccessibleIds(entrypoint);
5314 module->ProcessExecutionModes(entrypoint, pipeline);
locke-lunargd556cc32019-09-17 01:21:23 -06005315
sfricke-samsung962cad92021-04-13 00:46:29 -07005316 stage_state->descriptor_uses = module->CollectInterfaceByDescriptorSlot(
5317 stage_state->accessible_ids, &stage_state->has_writable_descriptor, &stage_state->has_atomic_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06005318 // Capture descriptor uses for the pipeline
locke-lunarg36045992020-08-20 16:54:37 -06005319 for (const auto &use : stage_state->descriptor_uses) {
locke-lunargd556cc32019-09-17 01:21:23 -06005320 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06005321 const uint32_t slot = use.first.first;
locke-lunarg351c9d82020-10-23 14:43:21 -06005322 pipeline->active_slots[slot][use.first.second].is_writable |= use.second.is_writable;
locke-lunarg36045992020-08-20 16:54:37 -06005323 auto &reqs = pipeline->active_slots[slot][use.first.second].reqs;
sfricke-samsung962cad92021-04-13 00:46:29 -07005324 reqs = descriptor_req(reqs | module->DescriptorTypeToReqs(use.second.type_id));
locke-lunarg25b6c352020-08-06 17:44:18 -06005325 if (use.second.is_atomic_operation) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_VIEW_ATOMIC_OPERATION);
locke-lunarg12d20992020-09-21 12:46:49 -06005326 if (use.second.is_sampler_implicitLod_dref_proj) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_IMPLICITLOD_DREF_PROJ);
locke-lunargae2a43c2020-09-22 17:21:57 -06005327 if (use.second.is_sampler_bias_offset) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_BIAS_OFFSET);
locke-lunarg12d20992020-09-21 12:46:49 -06005328
John Zulauf649edd52019-10-02 14:39:41 -06005329 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunarg36045992020-08-20 16:54:37 -06005330 if (use.second.samplers_used_by_image.size()) {
locke-lunarg654a9052020-10-13 16:28:42 -06005331 auto &samplers_used_by_image = pipeline->active_slots[slot][use.first.second].samplers_used_by_image;
5332 if (use.second.samplers_used_by_image.size() > samplers_used_by_image.size()) {
5333 samplers_used_by_image.resize(use.second.samplers_used_by_image.size());
5334 }
locke-lunarg654a9052020-10-13 16:28:42 -06005335 uint32_t image_index = 0;
5336 for (const auto &samplers : use.second.samplers_used_by_image) {
5337 for (const auto &sampler : samplers) {
locke-lunargb8be8222020-10-20 00:34:37 -06005338 samplers_used_by_image[image_index].emplace(sampler, nullptr);
locke-lunarg654a9052020-10-13 16:28:42 -06005339 }
5340 ++image_index;
5341 }
locke-lunarg36045992020-08-20 16:54:37 -06005342 }
locke-lunargd556cc32019-09-17 01:21:23 -06005343 }
locke-lunarg78486832020-09-09 19:39:42 -06005344
locke-lunarg96dc9632020-06-10 17:22:18 -06005345 if (pStage->stage == VK_SHADER_STAGE_FRAGMENT_BIT) {
sfricke-samsung962cad92021-04-13 00:46:29 -07005346 pipeline->fragmentShader_writable_output_location_list = module->CollectWritableOutputLocationinFS(*pStage);
locke-lunarg96dc9632020-06-10 17:22:18 -06005347 }
locke-lunargd556cc32019-09-17 01:21:23 -06005348}
5349
sfricke-samsung70ad9ce2021-04-04 00:53:54 -07005350// Discussed in details in https://github.com/KhronosGroup/Vulkan-Docs/issues/1081
5351// Internal discussion and CTS were written to prove that this is not called after an incompatible vkCmdBindPipeline
5352// "Binding a pipeline with a layout that is not compatible with the push constant layout does not disturb the push constant values"
5353//
5354// vkCmdBindDescriptorSet has nothing to do with push constants and don't need to call this after neither
5355//
5356// Part of this assumes apps at draw/dispath/traceRays/etc time will have it properly compatabile or else other VU will be triggered
locke-lunargd556cc32019-09-17 01:21:23 -06005357void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
5358 if (cb_state == nullptr) {
5359 return;
5360 }
5361
5362 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
5363 if (pipeline_layout_state == nullptr) {
5364 return;
5365 }
5366
5367 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
5368 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
5369 cb_state->push_constant_data.clear();
locke-lunargde3f0fa2020-09-10 11:55:31 -06005370 cb_state->push_constant_data_update.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06005371 uint32_t size_needed = 0;
John Zulauf79f06582021-02-27 18:38:39 -07005372 for (const auto &push_constant_range : *cb_state->push_constant_data_ranges) {
locke-lunargde3f0fa2020-09-10 11:55:31 -06005373 auto size = push_constant_range.offset + push_constant_range.size;
5374 size_needed = std::max(size_needed, size);
5375
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005376 auto stage_flags = push_constant_range.stageFlags;
locke-lunargde3f0fa2020-09-10 11:55:31 -06005377 uint32_t bit_shift = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005378 while (stage_flags) {
5379 if (stage_flags & 1) {
locke-lunargde3f0fa2020-09-10 11:55:31 -06005380 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
5381 const auto it = cb_state->push_constant_data_update.find(flag);
5382
5383 if (it != cb_state->push_constant_data_update.end()) {
5384 if (it->second.size() < push_constant_range.offset) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06005385 it->second.resize(push_constant_range.offset, PC_Byte_Not_Set);
locke-lunargde3f0fa2020-09-10 11:55:31 -06005386 }
5387 if (it->second.size() < size) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06005388 it->second.resize(size, PC_Byte_Not_Updated);
locke-lunargde3f0fa2020-09-10 11:55:31 -06005389 }
5390 } else {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06005391 std::vector<uint8_t> bytes;
5392 bytes.resize(push_constant_range.offset, PC_Byte_Not_Set);
5393 bytes.resize(size, PC_Byte_Not_Updated);
locke-lunargde3f0fa2020-09-10 11:55:31 -06005394 cb_state->push_constant_data_update[flag] = bytes;
5395 }
5396 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005397 stage_flags = stage_flags >> 1;
locke-lunargde3f0fa2020-09-10 11:55:31 -06005398 ++bit_shift;
5399 }
locke-lunargd556cc32019-09-17 01:21:23 -06005400 }
5401 cb_state->push_constant_data.resize(size_needed, 0);
5402 }
5403}
John Zulauf22b0fbe2019-10-15 06:26:16 -06005404
5405void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
5406 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
5407 VkResult result) {
5408 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
5409 auto swapchain_state = GetSwapchainState(swapchain);
5410
5411 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
5412
5413 if (pSwapchainImages) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005414 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
John Zulauf29d00532021-03-04 13:28:54 -07005415 SWAPCHAIN_IMAGE &swapchain_image = swapchain_state->images[i];
John Zulauffaa7a522021-03-05 12:22:45 -07005416 if (swapchain_image.image_state) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06005417
5418 // Add imageMap entries for each swapchain image
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06005419 auto image_ci = LvlInitStruct<VkImageCreateInfo>();
5420 image_ci.pNext = LvlFindInChain<VkImageFormatListCreateInfo>(swapchain_state->createInfo.pNext);
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -06005421 image_ci.flags = 0; // to be updated below
John Zulauf22b0fbe2019-10-15 06:26:16 -06005422 image_ci.imageType = VK_IMAGE_TYPE_2D;
5423 image_ci.format = swapchain_state->createInfo.imageFormat;
5424 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
5425 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
5426 image_ci.extent.depth = 1;
5427 image_ci.mipLevels = 1;
5428 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
5429 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
5430 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
5431 image_ci.usage = swapchain_state->createInfo.imageUsage;
5432 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
5433 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
5434 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
5435 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5436
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005437 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005438 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005439 }
5440 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005441 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005442 }
5443 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08005444 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005445 }
John Zulauf22b0fbe2019-10-15 06:26:16 -06005446
locke-lunarg296a3c92020-03-25 01:04:29 -06005447 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(device, pSwapchainImages[i], &image_ci);
John Zulauffaa7a522021-03-05 12:22:45 -07005448 auto *image_state = imageMap[pSwapchainImages[i]].get();
5449 assert(image_state);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005450 image_state->valid = false;
5451 image_state->create_from_swapchain = swapchain;
5452 image_state->bind_swapchain = swapchain;
5453 image_state->bind_swapchain_imageIndex = i;
Tony-LunarGe64e4fe2020-02-17 16:21:55 -07005454 image_state->is_swapchain_image = true;
John Zulauf29d00532021-03-04 13:28:54 -07005455
5456 // Since swapchains can't be linear, we can create an encoder here, and SyncValNeeds a fake_base_address
5457 image_state->fragment_encoder = std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(
5458 new subresource_adapter::ImageRangeEncoder(*image_state));
5459
5460 if (swapchain_image.bound_images.empty()) {
5461 // First time "bind" allocates
5462 image_state->swapchain_fake_address = fake_memory.Alloc(image_state->fragment_encoder->TotalSize());
5463 } else {
5464 // All others reuse
5465 image_state->swapchain_fake_address = (*swapchain_image.bound_images.cbegin())->swapchain_fake_address;
5466 // Since there are others, need to update the aliasing information
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06005467 for (auto other_image : swapchain_image.bound_images) {
5468 image_state->AddAliasingImage(other_image);
5469 }
John Zulauf29d00532021-03-04 13:28:54 -07005470 }
5471
5472 swapchain_image.image_state = image_state; // Don't move, it's already a reference to the imageMap
John Zulauffaa7a522021-03-05 12:22:45 -07005473 swapchain_image.bound_images.emplace(image_state);
Petr Kraus44f1c482020-04-25 20:09:25 +02005474
5475 AddImageStateProps(*image_state, device, physical_device);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005476 }
5477 }
5478
5479 if (*pSwapchainImageCount) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005480 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
5481 }
5482}
sourav parmar35e7a002020-06-09 17:58:44 -07005483
sourav parmar35e7a002020-06-09 17:58:44 -07005484void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer,
5485 const VkCopyAccelerationStructureInfoKHR *pInfo) {
5486 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5487 if (cb_state) {
sourav parmarcd5fb182020-07-17 12:58:44 -07005488 ACCELERATION_STRUCTURE_STATE_KHR *src_as_state = GetAccelerationStructureStateKHR(pInfo->src);
5489 ACCELERATION_STRUCTURE_STATE_KHR *dst_as_state = GetAccelerationStructureStateKHR(pInfo->dst);
sourav parmar35e7a002020-06-09 17:58:44 -07005490 if (dst_as_state != nullptr && src_as_state != nullptr) {
5491 dst_as_state->built = true;
5492 dst_as_state->build_info_khr = src_as_state->build_info_khr;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005493 if (!disabled[command_buffer_state]) {
5494 cb_state->AddChild(dst_as_state);
5495 cb_state->AddChild(src_as_state);
5496 }
sourav parmar35e7a002020-06-09 17:58:44 -07005497 }
5498 }
5499}
Piers Daniell39842ee2020-07-10 16:42:33 -06005500
5501void ValidationStateTracker::PreCallRecordCmdSetCullModeEXT(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) {
5502 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5503 cb_state->status |= CBSTATUS_CULL_MODE_SET;
5504 cb_state->static_status &= ~CBSTATUS_CULL_MODE_SET;
5505}
5506
5507void ValidationStateTracker::PreCallRecordCmdSetFrontFaceEXT(VkCommandBuffer commandBuffer, VkFrontFace frontFace) {
5508 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5509 cb_state->status |= CBSTATUS_FRONT_FACE_SET;
5510 cb_state->static_status &= ~CBSTATUS_FRONT_FACE_SET;
5511}
5512
5513void ValidationStateTracker::PreCallRecordCmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer,
5514 VkPrimitiveTopology primitiveTopology) {
5515 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5516 cb_state->primitiveTopology = primitiveTopology;
5517 cb_state->status |= CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5518 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5519}
5520
5521void ValidationStateTracker::PreCallRecordCmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer, uint32_t viewportCount,
5522 const VkViewport *pViewports) {
5523 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07005524 uint32_t bits = (1u << viewportCount) - 1u;
5525 cb_state->viewportWithCountMask |= bits;
5526 cb_state->trashedViewportMask &= ~bits;
Tobias Hector6663c9b2020-11-05 10:18:02 +00005527 cb_state->viewportWithCountCount = viewportCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005528 cb_state->trashedViewportCount = false;
Piers Daniell39842ee2020-07-10 16:42:33 -06005529 cb_state->status |= CBSTATUS_VIEWPORT_WITH_COUNT_SET;
5530 cb_state->static_status &= ~CBSTATUS_VIEWPORT_WITH_COUNT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005531
5532 cb_state->dynamicViewports.resize(std::max(size_t(viewportCount), cb_state->dynamicViewports.size()));
5533 for (size_t i = 0; i < viewportCount; ++i) {
5534 cb_state->dynamicViewports[i] = pViewports[i];
5535 }
Piers Daniell39842ee2020-07-10 16:42:33 -06005536}
5537
5538void ValidationStateTracker::PreCallRecordCmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer, uint32_t scissorCount,
5539 const VkRect2D *pScissors) {
5540 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07005541 uint32_t bits = (1u << scissorCount) - 1u;
5542 cb_state->scissorWithCountMask |= bits;
5543 cb_state->trashedScissorMask &= ~bits;
5544 cb_state->scissorWithCountCount = scissorCount;
5545 cb_state->trashedScissorCount = false;
Piers Daniell39842ee2020-07-10 16:42:33 -06005546 cb_state->status |= CBSTATUS_SCISSOR_WITH_COUNT_SET;
5547 cb_state->static_status &= ~CBSTATUS_SCISSOR_WITH_COUNT_SET;
5548}
5549
5550void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer, uint32_t firstBinding,
5551 uint32_t bindingCount, const VkBuffer *pBuffers,
5552 const VkDeviceSize *pOffsets, const VkDeviceSize *pSizes,
5553 const VkDeviceSize *pStrides) {
5554 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5555 if (pStrides) {
5556 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5557 cb_state->static_status &= ~CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5558 }
5559
5560 uint32_t end = firstBinding + bindingCount;
5561 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
5562 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
5563 }
5564
5565 for (uint32_t i = 0; i < bindingCount; ++i) {
5566 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07005567 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
Piers Daniell39842ee2020-07-10 16:42:33 -06005568 vertex_buffer_binding.offset = pOffsets[i];
5569 vertex_buffer_binding.size = (pSizes) ? pSizes[i] : VK_WHOLE_SIZE;
5570 vertex_buffer_binding.stride = (pStrides) ? pStrides[i] : 0;
5571 // Add binding for this vertex buffer to this commandbuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005572 if (!disabled[command_buffer_state] && pBuffers[i]) {
5573 cb_state->AddChild(vertex_buffer_binding.buffer_state.get());
Piers Daniell39842ee2020-07-10 16:42:33 -06005574 }
5575 }
5576}
5577
5578void ValidationStateTracker::PreCallRecordCmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) {
5579 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5580 cb_state->status |= CBSTATUS_DEPTH_TEST_ENABLE_SET;
5581 cb_state->static_status &= ~CBSTATUS_DEPTH_TEST_ENABLE_SET;
5582}
5583
5584void ValidationStateTracker::PreCallRecordCmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) {
5585 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5586 cb_state->status |= CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5587 cb_state->static_status &= ~CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5588}
5589
5590void ValidationStateTracker::PreCallRecordCmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) {
5591 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5592 cb_state->status |= CBSTATUS_DEPTH_COMPARE_OP_SET;
5593 cb_state->static_status &= ~CBSTATUS_DEPTH_COMPARE_OP_SET;
5594}
5595
5596void ValidationStateTracker::PreCallRecordCmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer,
5597 VkBool32 depthBoundsTestEnable) {
5598 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5599 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5600 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5601}
5602void ValidationStateTracker::PreCallRecordCmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) {
5603 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5604 cb_state->status |= CBSTATUS_STENCIL_TEST_ENABLE_SET;
5605 cb_state->static_status &= ~CBSTATUS_STENCIL_TEST_ENABLE_SET;
5606}
5607
5608void ValidationStateTracker::PreCallRecordCmdSetStencilOpEXT(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
5609 VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp,
5610 VkCompareOp compareOp) {
5611 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5612 cb_state->status |= CBSTATUS_STENCIL_OP_SET;
5613 cb_state->static_status &= ~CBSTATUS_STENCIL_OP_SET;
5614}
locke-lunarg4189aa22020-10-21 00:23:48 -06005615
5616void ValidationStateTracker::PreCallRecordCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle,
5617 uint32_t discardRectangleCount,
5618 const VkRect2D *pDiscardRectangles) {
5619 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5620 cb_state->status |= CBSTATUS_DISCARD_RECTANGLE_SET;
5621 cb_state->static_status &= ~CBSTATUS_DISCARD_RECTANGLE_SET;
5622}
5623
5624void ValidationStateTracker::PreCallRecordCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,
5625 const VkSampleLocationsInfoEXT *pSampleLocationsInfo) {
5626 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5627 cb_state->status |= CBSTATUS_SAMPLE_LOCATIONS_SET;
5628 cb_state->static_status &= ~CBSTATUS_SAMPLE_LOCATIONS_SET;
5629}
5630
5631void ValidationStateTracker::PreCallRecordCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer,
5632 VkCoarseSampleOrderTypeNV sampleOrderType,
5633 uint32_t customSampleOrderCount,
5634 const VkCoarseSampleOrderCustomNV *pCustomSampleOrders) {
5635 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5636 cb_state->status |= CBSTATUS_COARSE_SAMPLE_ORDER_SET;
5637 cb_state->static_status &= ~CBSTATUS_COARSE_SAMPLE_ORDER_SET;
5638}
Vikram Kushwahaa57b0c32021-04-19 12:21:46 -07005639
5640void ValidationStateTracker::PreCallRecordCmdSetPatchControlPointsEXT(VkCommandBuffer commandBuffer, uint32_t patchControlPoints) {
5641 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5642 cb_state->status |= CBSTATUS_PATCH_CONTROL_POINTS_SET;
5643 cb_state->static_status &= ~CBSTATUS_PATCH_CONTROL_POINTS_SET;
5644}
5645
5646void ValidationStateTracker::PreCallRecordCmdSetLogicOpEXT(VkCommandBuffer commandBuffer, VkLogicOp logicOp) {
5647 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5648 cb_state->status |= CBSTATUS_LOGIC_OP_SET;
5649 cb_state->static_status &= ~CBSTATUS_LOGIC_OP_SET;
5650}
5651
5652void ValidationStateTracker::PreCallRecordCmdSetRasterizerDiscardEnableEXT(VkCommandBuffer commandBuffer,
5653 VkBool32 rasterizerDiscardEnable) {
5654 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5655 cb_state->status |= CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET;
5656 cb_state->static_status &= ~CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET;
5657}
5658
5659void ValidationStateTracker::PreCallRecordCmdSetDepthBiasEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable) {
5660 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5661 cb_state->status |= CBSTATUS_DEPTH_BIAS_ENABLE_SET;
5662 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_ENABLE_SET;
5663}
5664
5665void ValidationStateTracker::PreCallRecordCmdSetPrimitiveRestartEnableEXT(VkCommandBuffer commandBuffer,
5666 VkBool32 primitiveRestartEnable) {
5667 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5668 cb_state->status |= CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET;
5669 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005670}
Piers Daniell924cd832021-05-18 13:48:47 -06005671
5672void ValidationStateTracker::PreCallRecordCmdSetVertexInputEXT(
5673 VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount,
5674 const VkVertexInputBindingDescription2EXT *pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount,
5675 const VkVertexInputAttributeDescription2EXT *pVertexAttributeDescriptions) {
5676 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5677 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET | CBSTATUS_VERTEX_INPUT_SET;
5678 cb_state->static_status &= ~(CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET | CBSTATUS_VERTEX_INPUT_SET);
5679}
Nathaniel Cesario42ac6ca2021-06-15 17:23:05 -06005680
5681void ValidationStateTracker::RecordGetBufferDeviceAddress(const VkBufferDeviceAddressInfo *pInfo, VkDeviceAddress address) {
5682 BUFFER_STATE *buffer_state = GetBufferState(pInfo->buffer);
5683 if (buffer_state) {
5684 // address is used for GPU-AV and ray tracing buffer validation
5685 buffer_state->deviceAddress = address;
5686 buffer_address_map_.emplace(address, buffer_state);
5687 }
5688}
5689
5690void ValidationStateTracker::PostCallRecordGetBufferDeviceAddress(VkDevice device, const VkBufferDeviceAddressInfo *pInfo,
5691 VkDeviceAddress address) {
5692 RecordGetBufferDeviceAddress(pInfo, address);
5693}
5694
5695void ValidationStateTracker::PostCallRecordGetBufferDeviceAddressKHR(VkDevice device, const VkBufferDeviceAddressInfo *pInfo,
5696 VkDeviceAddress address) {
5697 RecordGetBufferDeviceAddress(pInfo, address);
5698}
5699
5700void ValidationStateTracker::PostCallRecordGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfo *pInfo,
5701 VkDeviceAddress address) {
5702 RecordGetBufferDeviceAddress(pInfo, address);
Nathaniel Cesario39152e62021-07-02 13:04:16 -06005703}
5704
5705std::shared_ptr<SWAPCHAIN_NODE> ValidationStateTracker::CreateSwapchainState(const VkSwapchainCreateInfoKHR *create_info,
5706 VkSwapchainKHR swapchain) {
5707 return std::make_shared<SWAPCHAIN_NODE>(create_info, swapchain);
5708}