blob: a112cf69714113e4c0be48249f10f29d91c54f88 [file] [log] [blame]
sfricke-samsung486a51e2021-01-02 00:10:15 -08001/* Copyright (c) 2015-2021 The Khronos Group Inc.
2 * Copyright (c) 2015-2021 Valve Corporation
3 * Copyright (c) 2015-2021 LunarG, Inc.
4 * Copyright (C) 2015-2021 Google Inc.
Tobias Hector6663c9b2020-11-05 10:18:02 +00005 * Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved.
locke-lunargd556cc32019-09-17 01:21:23 -06006 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Mark Lobodzinski <mark@lunarg.com>
20 * Author: Dave Houlton <daveh@lunarg.com>
21 * Shannon McPherson <shannon@lunarg.com>
Tobias Hector6663c9b2020-11-05 10:18:02 +000022 * Author: Tobias Hector <tobias.hector@amd.com>
locke-lunargd556cc32019-09-17 01:21:23 -060023 */
24
David Zhao Akeley44139b12021-04-26 16:16:13 -070025#include <algorithm>
locke-lunargd556cc32019-09-17 01:21:23 -060026#include <cmath>
locke-lunargd556cc32019-09-17 01:21:23 -060027
28#include "vk_enum_string_helper.h"
29#include "vk_format_utils.h"
30#include "vk_layer_data.h"
31#include "vk_layer_utils.h"
32#include "vk_layer_logging.h"
33#include "vk_typemap_helper.h"
34
35#include "chassis.h"
36#include "state_tracker.h"
37#include "shader_validation.h"
Jeremy Gebben74aa7622020-12-15 11:18:00 -070038#include "sync_utils.h"
Jeremy Gebben159b3cc2021-06-03 09:09:03 -060039#include "cmd_buffer_state.h"
40#include "render_pass_state.h"
locke-lunarg4189aa22020-10-21 00:23:48 -060041
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -060042void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
43 if (add_obj) {
44 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
45 // Call base class
46 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
47 }
48}
49
John Zulauf2bc1fde2020-04-24 15:09:51 -060050// NOTE: Beware the lifespan of the rp_begin when holding the return. If the rp_begin isn't a "safe" copy, "IMAGELESS"
51// attachments won't persist past the API entry point exit.
Jeremy Gebben88f58142021-06-01 10:07:52 -060052static std::pair<uint32_t, const VkImageView *> GetFramebufferAttachments(const VkRenderPassBeginInfo &rp_begin,
53 const FRAMEBUFFER_STATE &fb_state) {
John Zulauf2bc1fde2020-04-24 15:09:51 -060054 const VkImageView *attachments = fb_state.createInfo.pAttachments;
55 uint32_t count = fb_state.createInfo.attachmentCount;
56 if (fb_state.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -070057 const auto *framebuffer_attachments = LvlFindInChain<VkRenderPassAttachmentBeginInfo>(rp_begin.pNext);
John Zulauf2bc1fde2020-04-24 15:09:51 -060058 if (framebuffer_attachments) {
59 attachments = framebuffer_attachments->pAttachments;
60 count = framebuffer_attachments->attachmentCount;
61 }
62 }
63 return std::make_pair(count, attachments);
64}
65
John Zulauf64ffe552021-02-06 10:25:07 -070066template <typename ImageViewPointer, typename Get>
67std::vector<ImageViewPointer> GetAttachmentViewsImpl(const VkRenderPassBeginInfo &rp_begin, const FRAMEBUFFER_STATE &fb_state,
68 const Get &get_fn) {
69 std::vector<ImageViewPointer> views;
John Zulauf2bc1fde2020-04-24 15:09:51 -060070
71 const auto count_attachment = GetFramebufferAttachments(rp_begin, fb_state);
72 const auto attachment_count = count_attachment.first;
73 const auto *attachments = count_attachment.second;
74 views.resize(attachment_count, nullptr);
75 for (uint32_t i = 0; i < attachment_count; i++) {
76 if (attachments[i] != VK_NULL_HANDLE) {
John Zulauf64ffe552021-02-06 10:25:07 -070077 views[i] = get_fn(attachments[i]);
John Zulauf2bc1fde2020-04-24 15:09:51 -060078 }
79 }
80 return views;
81}
82
John Zulauf64ffe552021-02-06 10:25:07 -070083std::vector<std::shared_ptr<const IMAGE_VIEW_STATE>> ValidationStateTracker::GetSharedAttachmentViews(
84 const VkRenderPassBeginInfo &rp_begin, const FRAMEBUFFER_STATE &fb_state) const {
85 auto get_fn = [this](VkImageView handle) { return this->GetShared<IMAGE_VIEW_STATE>(handle); };
86 return GetAttachmentViewsImpl<std::shared_ptr<const IMAGE_VIEW_STATE>>(rp_begin, fb_state, get_fn);
87}
88
locke-lunargd556cc32019-09-17 01:21:23 -060089#ifdef VK_USE_PLATFORM_ANDROID_KHR
90// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
91// This could also move into a seperate core_validation_android.cpp file... ?
92
Jeremy Gebbenf4fb2a02021-07-08 09:57:46 -060093template <typename CreateInfo>
94VkFormatFeatureFlags ValidationStateTracker::GetExternalFormatFeaturesANDROID(const CreateInfo *create_info) const {
95 VkFormatFeatureFlags format_features = 0;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -070096 const VkExternalFormatANDROID *ext_fmt_android = LvlFindInChain<VkExternalFormatANDROID>(create_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -060097 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
Spencer Fricke6bba8c72020-04-06 07:41:21 -070098 // VUID 01894 will catch if not found in map
99 auto it = ahb_ext_formats_map.find(ext_fmt_android->externalFormat);
100 if (it != ahb_ext_formats_map.end()) {
Jeremy Gebbenf4fb2a02021-07-08 09:57:46 -0600101 format_features = it->second;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700102 }
locke-lunargd556cc32019-09-17 01:21:23 -0600103 }
Jeremy Gebbenf4fb2a02021-07-08 09:57:46 -0600104 return format_features;
locke-lunargd556cc32019-09-17 01:21:23 -0600105}
106
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700107void ValidationStateTracker::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
108 VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties, VkResult result) {
109 if (VK_SUCCESS != result) return;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700110 auto ahb_format_props = LvlFindInChain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700111 if (ahb_format_props) {
Jeremy Gebbenfc6f8152021-03-18 16:58:55 -0600112 ahb_ext_formats_map.emplace(ahb_format_props->externalFormat, ahb_format_props->formatFeatures);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700113 }
114}
115
locke-lunargd556cc32019-09-17 01:21:23 -0600116#else
117
Jeremy Gebbenf4fb2a02021-07-08 09:57:46 -0600118template <typename CreateInfo>
119VkFormatFeatureFlags ValidationStateTracker::GetExternalFormatFeaturesANDROID(const CreateInfo *create_info) const {
120 return 0;
121}
locke-lunargd556cc32019-09-17 01:21:23 -0600122
123#endif // VK_USE_PLATFORM_ANDROID_KHR
124
Petr Kraus44f1c482020-04-25 20:09:25 +0200125void AddImageStateProps(IMAGE_STATE &image_state, const VkDevice device, const VkPhysicalDevice physical_device) {
126 // Add feature support according to Image Format Features (vkspec.html#resources-image-format-features)
127 // if format is AHB external format then the features are already set
128 if (image_state.has_ahb_format == false) {
129 const VkImageTiling image_tiling = image_state.createInfo.tiling;
130 const VkFormat image_format = image_state.createInfo.format;
131 if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
132 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {
133 VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, nullptr};
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600134 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state.image(), &drm_format_properties);
Petr Kraus44f1c482020-04-25 20:09:25 +0200135
136 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
137 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
138 nullptr};
139 format_properties_2.pNext = (void *)&drm_properties_list;
140 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Lionel Landwerlin09351a72020-06-22 18:15:59 +0300141 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
142 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
143 drm_properties_list.pDrmFormatModifierProperties = &drm_properties[0];
144 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Petr Kraus44f1c482020-04-25 20:09:25 +0200145
146 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300147 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier ==
148 drm_format_properties.drmFormatModifier) {
149 image_state.format_features =
Petr Kraus44f1c482020-04-25 20:09:25 +0200150 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300151 break;
Petr Kraus44f1c482020-04-25 20:09:25 +0200152 }
153 }
154 } else {
155 VkFormatProperties format_properties;
156 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_format, &format_properties);
157 image_state.format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
158 : format_properties.optimalTilingFeatures;
159 }
160 }
161}
162
locke-lunargd556cc32019-09-17 01:21:23 -0600163void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
164 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
165 if (VK_SUCCESS != result) return;
locke-lunarg296a3c92020-03-25 01:04:29 -0600166 auto is_node = std::make_shared<IMAGE_STATE>(device, *pImage, pCreateInfo);
sfricke-samsung71bc6572020-04-29 15:49:43 -0700167 is_node->disjoint = ((pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT) != 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600168 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
Jeremy Gebbenf4fb2a02021-07-08 09:57:46 -0600169 is_node->format_features = GetExternalFormatFeaturesANDROID(pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600170 }
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700171 const auto swapchain_info = LvlFindInChain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600172 if (swapchain_info) {
173 is_node->create_from_swapchain = swapchain_info->swapchain;
174 }
175
locke-lunargd556cc32019-09-17 01:21:23 -0600176 // Record the memory requirements in case they won't be queried
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700177 // External AHB memory can't be queried until after memory is bound
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600178 if (is_node->IsExternalAHB() == false) {
sfricke-samsung71bc6572020-04-29 15:49:43 -0700179 if (is_node->disjoint == false) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600180 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements[0]);
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700181 } else {
182 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
183 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
184 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
185 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
186 mem_req_info2.pNext = &image_plane_req;
187 mem_req_info2.image = *pImage;
188
189 assert(plane_count != 0); // assumes each format has at least first plane
190 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
191 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600192 is_node->requirements[0] = mem_reqs2.memoryRequirements;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700193
194 if (plane_count >= 2) {
195 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
196 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600197 is_node->requirements[1] = mem_reqs2.memoryRequirements;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700198 }
199 if (plane_count >= 3) {
200 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
201 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600202 is_node->requirements[2] = mem_reqs2.memoryRequirements;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700203 }
204 }
locke-lunargd556cc32019-09-17 01:21:23 -0600205 }
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700206
Petr Kraus44f1c482020-04-25 20:09:25 +0200207 AddImageStateProps(*is_node, device, physical_device);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700208
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700209 imageMap.emplace(*pImage, std::move(is_node));
locke-lunargd556cc32019-09-17 01:21:23 -0600210}
211
212void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
213 if (!image) return;
214 IMAGE_STATE *image_state = GetImageState(image);
locke-lunargd556cc32019-09-17 01:21:23 -0600215 // Clean up memory mapping, bindings and range references for image
locke-lunargd556cc32019-09-17 01:21:23 -0600216 if (image_state->bind_swapchain) {
217 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
218 if (swapchain) {
John Zulaufd13b38e2021-03-05 08:17:38 -0700219 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600220 }
221 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600222 image_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -0600223 imageMap.erase(image);
224}
225
226void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
227 VkImageLayout imageLayout, const VkClearColorValue *pColor,
228 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600229
230 if (disabled[command_buffer_state]) return;
231
locke-lunargd556cc32019-09-17 01:21:23 -0600232 auto cb_node = GetCBState(commandBuffer);
233 auto image_state = GetImageState(image);
234 if (cb_node && image_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600235 cb_node->AddChild(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600236 }
237}
238
239void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
240 VkImageLayout imageLayout,
241 const VkClearDepthStencilValue *pDepthStencil,
242 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600243 if (disabled[command_buffer_state]) return;
244
locke-lunargd556cc32019-09-17 01:21:23 -0600245 auto cb_node = GetCBState(commandBuffer);
246 auto image_state = GetImageState(image);
247 if (cb_node && image_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600248 cb_node->AddChild(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600249 }
250}
251
252void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
253 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
254 uint32_t regionCount, const VkImageCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600255 if (disabled[command_buffer_state]) return;
256
locke-lunargd556cc32019-09-17 01:21:23 -0600257 auto cb_node = GetCBState(commandBuffer);
258 auto src_image_state = GetImageState(srcImage);
259 auto dst_image_state = GetImageState(dstImage);
260
261 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600262 cb_node->AddChild(src_image_state);
263 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600264}
265
Jeff Leger178b1e52020-10-05 12:22:23 -0400266void ValidationStateTracker::PreCallRecordCmdCopyImage2KHR(VkCommandBuffer commandBuffer,
267 const VkCopyImageInfo2KHR *pCopyImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600268 if (disabled[command_buffer_state]) return;
269
Jeff Leger178b1e52020-10-05 12:22:23 -0400270 auto cb_node = GetCBState(commandBuffer);
271 auto src_image_state = GetImageState(pCopyImageInfo->srcImage);
272 auto dst_image_state = GetImageState(pCopyImageInfo->dstImage);
273
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600274 cb_node->AddChild(src_image_state);
275 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400276}
277
locke-lunargd556cc32019-09-17 01:21:23 -0600278void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
279 VkImageLayout srcImageLayout, VkImage dstImage,
280 VkImageLayout dstImageLayout, uint32_t regionCount,
281 const VkImageResolve *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600282 if (disabled[command_buffer_state]) return;
283
locke-lunargd556cc32019-09-17 01:21:23 -0600284 auto cb_node = GetCBState(commandBuffer);
285 auto src_image_state = GetImageState(srcImage);
286 auto dst_image_state = GetImageState(dstImage);
287
288 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600289 cb_node->AddChild(src_image_state);
290 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600291}
292
Jeff Leger178b1e52020-10-05 12:22:23 -0400293void ValidationStateTracker::PreCallRecordCmdResolveImage2KHR(VkCommandBuffer commandBuffer,
294 const VkResolveImageInfo2KHR *pResolveImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600295 if (disabled[command_buffer_state]) return;
296
Jeff Leger178b1e52020-10-05 12:22:23 -0400297 auto cb_node = GetCBState(commandBuffer);
298 auto src_image_state = GetImageState(pResolveImageInfo->srcImage);
299 auto dst_image_state = GetImageState(pResolveImageInfo->dstImage);
300
301 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600302 cb_node->AddChild(src_image_state);
303 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400304}
305
locke-lunargd556cc32019-09-17 01:21:23 -0600306void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
307 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
308 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600309 if (disabled[command_buffer_state]) return;
310
locke-lunargd556cc32019-09-17 01:21:23 -0600311 auto cb_node = GetCBState(commandBuffer);
312 auto src_image_state = GetImageState(srcImage);
313 auto dst_image_state = GetImageState(dstImage);
314
315 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600316 cb_node->AddChild(src_image_state);
317 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600318}
319
Jeff Leger178b1e52020-10-05 12:22:23 -0400320void ValidationStateTracker::PreCallRecordCmdBlitImage2KHR(VkCommandBuffer commandBuffer,
321 const VkBlitImageInfo2KHR *pBlitImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600322 if (disabled[command_buffer_state]) return;
323
Jeff Leger178b1e52020-10-05 12:22:23 -0400324 auto cb_node = GetCBState(commandBuffer);
325 auto src_image_state = GetImageState(pBlitImageInfo->srcImage);
326 auto dst_image_state = GetImageState(pBlitImageInfo->dstImage);
327
328 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600329 cb_node->AddChild(src_image_state);
330 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400331}
332
locke-lunargd556cc32019-09-17 01:21:23 -0600333void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
334 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
335 VkResult result) {
336 if (result != VK_SUCCESS) return;
Jeremy Gebbenf2912cd2021-07-07 07:57:39 -0600337
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500338 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600339
340 // Get a set of requirements in the case the app does not
sfricke-samsungad90e722020-07-08 20:54:24 -0700341 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
locke-lunargd556cc32019-09-17 01:21:23 -0600342
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700343 bufferMap.emplace(*pBuffer, std::move(buffer_state));
locke-lunargd556cc32019-09-17 01:21:23 -0600344}
345
346void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
347 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
348 VkResult result) {
349 if (result != VK_SUCCESS) return;
Jeremy Gebbenf2912cd2021-07-07 07:57:39 -0600350
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500351 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
locke-lunarg25b6c352020-08-06 17:44:18 -0600352
353 VkFormatProperties format_properties;
354 DispatchGetPhysicalDeviceFormatProperties(physical_device, pCreateInfo->format, &format_properties);
locke-lunarg25b6c352020-08-06 17:44:18 -0600355
Jeremy Gebbenf2912cd2021-07-07 07:57:39 -0600356 bufferViewMap[*pView] =
357 std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo, format_properties.bufferFeatures);
locke-lunargd556cc32019-09-17 01:21:23 -0600358}
359
360void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
361 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
362 VkResult result) {
363 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500364 auto image_state = GetImageShared(pCreateInfo->image);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700365 auto image_view_state = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
366
367 // Add feature support according to Image View Format Features (vkspec.html#resources-image-view-format-features)
368 const VkImageTiling image_tiling = image_state->createInfo.tiling;
369 const VkFormat image_view_format = pCreateInfo->format;
370 if (image_state->has_ahb_format == true) {
371 // The ImageView uses same Image's format feature since they share same AHB
372 image_view_state->format_features = image_state->format_features;
373 } else if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
374 // Parameter validation should catch if this is used without VK_EXT_image_drm_format_modifier
375 assert(device_extensions.vk_ext_image_drm_format_modifier);
376 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
377 nullptr};
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600378 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state->image(), &drm_format_properties);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700379
380 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
381 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
382 nullptr};
383 format_properties_2.pNext = (void *)&drm_properties_list;
nyorain38a9d232021-03-06 13:06:12 +0100384
385 // First call is to get the number of modifiers compatible with the queried format
386 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
387
388 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
389 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
390 drm_properties_list.pDrmFormatModifierProperties = drm_properties.data();
391
392 // Second call, now with an allocated array in pDrmFormatModifierProperties, is to get the modifiers
393 // compatible with the queried format
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700394 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
395
396 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300397 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier == drm_format_properties.drmFormatModifier) {
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700398 image_view_state->format_features |=
399 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300400 break;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700401 }
402 }
403 } else {
404 VkFormatProperties format_properties;
405 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_view_format, &format_properties);
406 image_view_state->format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
407 : format_properties.optimalTilingFeatures;
408 }
409
sfricke-samsungd23f6e62021-01-17 09:05:47 -0800410 auto usage_create_info = LvlFindInChain<VkImageViewUsageCreateInfo>(pCreateInfo->pNext);
411 image_view_state->inherited_usage = (usage_create_info) ? usage_create_info->usage : image_state->createInfo.usage;
412
locke-lunarg9939d4b2020-10-26 20:11:08 -0600413 // filter_cubic_props is used in CmdDraw validation. But it takes a lot of performance if it does in CmdDraw.
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700414 image_view_state->filter_cubic_props = LvlInitStruct<VkFilterCubicImageViewImageFormatPropertiesEXT>();
locke-lunarg9939d4b2020-10-26 20:11:08 -0600415 if (IsExtEnabled(device_extensions.vk_ext_filter_cubic)) {
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700416 auto imageview_format_info = LvlInitStruct<VkPhysicalDeviceImageViewImageFormatInfoEXT>();
locke-lunarg9939d4b2020-10-26 20:11:08 -0600417 imageview_format_info.imageViewType = pCreateInfo->viewType;
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700418 auto image_format_info = LvlInitStruct<VkPhysicalDeviceImageFormatInfo2>(&imageview_format_info);
locke-lunarg9939d4b2020-10-26 20:11:08 -0600419 image_format_info.type = image_state->createInfo.imageType;
420 image_format_info.format = image_state->createInfo.format;
421 image_format_info.tiling = image_state->createInfo.tiling;
sfricke-samsungd23f6e62021-01-17 09:05:47 -0800422 image_format_info.usage = image_view_state->inherited_usage;
locke-lunarg9939d4b2020-10-26 20:11:08 -0600423 image_format_info.flags = image_state->createInfo.flags;
424
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700425 auto image_format_properties = LvlInitStruct<VkImageFormatProperties2>(&image_view_state->filter_cubic_props);
locke-lunarg9939d4b2020-10-26 20:11:08 -0600426
427 DispatchGetPhysicalDeviceImageFormatProperties2(physical_device, &image_format_info, &image_format_properties);
428 }
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700429 imageViewMap.emplace(*pView, std::move(image_view_state));
locke-lunargd556cc32019-09-17 01:21:23 -0600430}
431
432void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
433 uint32_t regionCount, const VkBufferCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600434 if (disabled[command_buffer_state]) return;
435
locke-lunargd556cc32019-09-17 01:21:23 -0600436 auto cb_node = GetCBState(commandBuffer);
437 auto src_buffer_state = GetBufferState(srcBuffer);
438 auto dst_buffer_state = GetBufferState(dstBuffer);
439
440 // Update bindings between buffers and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600441 cb_node->AddChild(src_buffer_state);
442 cb_node->AddChild(dst_buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600443}
444
Jeff Leger178b1e52020-10-05 12:22:23 -0400445void ValidationStateTracker::PreCallRecordCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer,
446 const VkCopyBufferInfo2KHR *pCopyBufferInfos) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600447 if (disabled[command_buffer_state]) return;
448
Jeff Leger178b1e52020-10-05 12:22:23 -0400449 auto cb_node = GetCBState(commandBuffer);
450 auto src_buffer_state = GetBufferState(pCopyBufferInfos->srcBuffer);
451 auto dst_buffer_state = GetBufferState(pCopyBufferInfos->dstBuffer);
452
453 // Update bindings between buffers and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600454 cb_node->AddChild(src_buffer_state);
455 cb_node->AddChild(dst_buffer_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400456}
457
locke-lunargd556cc32019-09-17 01:21:23 -0600458void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
459 const VkAllocationCallbacks *pAllocator) {
460 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
461 if (!image_view_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -0600462
463 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600464 image_view_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -0600465 imageViewMap.erase(imageView);
466}
467
468void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
469 if (!buffer) return;
470 auto buffer_state = GetBufferState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -0600471
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600472 buffer_state->Destroy();
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600473 bufferMap.erase(buffer_state->buffer());
locke-lunargd556cc32019-09-17 01:21:23 -0600474}
475
476void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
477 const VkAllocationCallbacks *pAllocator) {
478 if (!bufferView) return;
479 auto buffer_view_state = GetBufferViewState(bufferView);
locke-lunargd556cc32019-09-17 01:21:23 -0600480
481 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600482 buffer_view_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -0600483 bufferViewMap.erase(bufferView);
484}
485
486void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
487 VkDeviceSize size, uint32_t data) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600488 if (disabled[command_buffer_state]) return;
489
locke-lunargd556cc32019-09-17 01:21:23 -0600490 auto cb_node = GetCBState(commandBuffer);
491 auto buffer_state = GetBufferState(dstBuffer);
492 // Update bindings between buffer and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600493 cb_node->AddChild(buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600494}
495
496void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
497 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
498 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600499 if (disabled[command_buffer_state]) return;
500
locke-lunargd556cc32019-09-17 01:21:23 -0600501 auto cb_node = GetCBState(commandBuffer);
502 auto src_image_state = GetImageState(srcImage);
503 auto dst_buffer_state = GetBufferState(dstBuffer);
504
505 // Update bindings between buffer/image and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600506 cb_node->AddChild(src_image_state);
507 cb_node->AddChild(dst_buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600508}
509
Jeff Leger178b1e52020-10-05 12:22:23 -0400510void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
511 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600512 if (disabled[command_buffer_state]) return;
513
Jeff Leger178b1e52020-10-05 12:22:23 -0400514 auto cb_node = GetCBState(commandBuffer);
515 auto src_image_state = GetImageState(pCopyImageToBufferInfo->srcImage);
516 auto dst_buffer_state = GetBufferState(pCopyImageToBufferInfo->dstBuffer);
517
518 // Update bindings between buffer/image and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600519 cb_node->AddChild(src_image_state);
520 cb_node->AddChild(dst_buffer_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400521}
522
locke-lunargd556cc32019-09-17 01:21:23 -0600523void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
524 VkImageLayout dstImageLayout, uint32_t regionCount,
525 const VkBufferImageCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600526 if (disabled[command_buffer_state]) return;
527
locke-lunargd556cc32019-09-17 01:21:23 -0600528 auto cb_node = GetCBState(commandBuffer);
529 auto src_buffer_state = GetBufferState(srcBuffer);
530 auto dst_image_state = GetImageState(dstImage);
531
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600532 cb_node->AddChild(src_buffer_state);
533 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600534}
535
Jeff Leger178b1e52020-10-05 12:22:23 -0400536void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
537 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600538
539 if (disabled[command_buffer_state]) return;
540
Jeff Leger178b1e52020-10-05 12:22:23 -0400541 auto cb_node = GetCBState(commandBuffer);
542 auto src_buffer_state = GetBufferState(pCopyBufferToImageInfo->srcBuffer);
543 auto dst_image_state = GetImageState(pCopyBufferToImageInfo->dstImage);
544
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600545 cb_node->AddChild(src_buffer_state);
546 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400547}
548
Jeremy Gebben159b3cc2021-06-03 09:09:03 -0600549QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
550 auto it = queueMap.find(queue);
551 if (it == queueMap.end()) {
552 return nullptr;
553 }
554 return &it->second;
locke-lunargd556cc32019-09-17 01:21:23 -0600555}
556
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600557const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
558 auto it = queueMap.find(queue);
559 if (it == queueMap.cend()) {
560 return nullptr;
561 }
562 return &it->second;
locke-lunargd556cc32019-09-17 01:21:23 -0600563}
564
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700565void ValidationStateTracker::RemoveAliasingImages(const layer_data::unordered_set<IMAGE_STATE *> &bound_images) {
locke-lunargd556cc32019-09-17 01:21:23 -0600566 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
567 // reference. It doesn't need two ways clear.
John Zulaufd13b38e2021-03-05 08:17:38 -0700568 for (auto *bound_image : bound_images) {
569 if (bound_image) {
570 bound_image->aliasing_images.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600571 }
572 }
573}
574
locke-lunargd556cc32019-09-17 01:21:23 -0600575const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
576 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
577 auto it = phys_dev_map->find(phys);
578 if (it == phys_dev_map->end()) {
579 return nullptr;
580 }
581 return &it->second;
582}
583
584PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
585 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
586 auto it = phys_dev_map->find(phys);
587 if (it == phys_dev_map->end()) {
588 return nullptr;
589 }
590 return &it->second;
591}
592
593PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
594const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
595
596// Return ptr to memory binding for given handle of specified type
597template <typename State, typename Result>
598static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
599 switch (typed_handle.type) {
600 case kVulkanObjectTypeImage:
601 return state->GetImageState(typed_handle.Cast<VkImage>());
602 case kVulkanObjectTypeBuffer:
603 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
604 case kVulkanObjectTypeAccelerationStructureNV:
sourav parmarcd5fb182020-07-17 12:58:44 -0700605 return state->GetAccelerationStructureStateNV(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -0600606 default:
607 break;
608 }
609 return nullptr;
610}
611
612const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
613 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
614}
615
616BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
617 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
618}
619
locke-lunarg540b2252020-08-03 13:23:36 -0600620void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, const VkPipelineBindPoint bind_point,
621 const char *function) {
locke-lunargb8d7a7a2020-10-25 16:01:52 -0600622 const auto lv_bind_point = ConvertToLvlBindPoint(bind_point);
623 auto &state = cb_state->lastBound[lv_bind_point];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700624 PIPELINE_STATE *pipe = state.pipeline_state;
locke-lunargd556cc32019-09-17 01:21:23 -0600625 if (VK_NULL_HANDLE != state.pipeline_layout) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700626 for (const auto &set_binding_pair : pipe->active_slots) {
627 uint32_t set_index = set_binding_pair.first;
locke-lunargd556cc32019-09-17 01:21:23 -0600628 // Pull the set node
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700629 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[set_index].bound_descriptor_set;
locke-lunargd556cc32019-09-17 01:21:23 -0600630
Tony-LunarG77822802020-05-28 16:35:46 -0600631 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
locke-lunargd556cc32019-09-17 01:21:23 -0600632
Tony-LunarG77822802020-05-28 16:35:46 -0600633 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
634 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
635 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700636 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pipe);
Tony-LunarG77822802020-05-28 16:35:46 -0600637
638 if (reduced_map.IsManyDescriptors()) {
639 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700640 descriptor_set->UpdateValidationCache(*cb_state, *pipe, binding_req_map);
Tony-LunarG77822802020-05-28 16:35:46 -0600641 }
642
643 // We can skip updating the state if "nothing" has changed since the last validation.
644 // See CoreChecks::ValidateCmdBufDrawState for more details.
645 bool descriptor_set_changed =
646 !reduced_map.IsManyDescriptors() ||
647 // Update if descriptor set (or contents) has changed
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700648 state.per_set[set_index].validated_set != descriptor_set ||
649 state.per_set[set_index].validated_set_change_count != descriptor_set->GetChangeCount() ||
Tony-LunarG77822802020-05-28 16:35:46 -0600650 (!disabled[image_layout_validation] &&
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700651 state.per_set[set_index].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
Tony-LunarG77822802020-05-28 16:35:46 -0600652 bool need_update = descriptor_set_changed ||
653 // Update if previous bindingReqMap doesn't include new bindingReqMap
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700654 !std::includes(state.per_set[set_index].validated_set_binding_req_map.begin(),
655 state.per_set[set_index].validated_set_binding_req_map.end(), binding_req_map.begin(),
Tony-LunarG77822802020-05-28 16:35:46 -0600656 binding_req_map.end());
657
658 if (need_update) {
659 // Bind this set and its active descriptor resources to the command buffer
660 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
661 // Only record the bindings that haven't already been recorded
662 BindingReqMap delta_reqs;
663 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700664 state.per_set[set_index].validated_set_binding_req_map.begin(),
665 state.per_set[set_index].validated_set_binding_req_map.end(),
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700666 layer_data::insert_iterator<BindingReqMap>(delta_reqs, delta_reqs.begin()));
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700667 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pipe, delta_reqs, function);
Tony-LunarG77822802020-05-28 16:35:46 -0600668 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700669 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pipe, binding_req_map, function);
locke-lunargd556cc32019-09-17 01:21:23 -0600670 }
671
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700672 state.per_set[set_index].validated_set = descriptor_set;
673 state.per_set[set_index].validated_set_change_count = descriptor_set->GetChangeCount();
674 state.per_set[set_index].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
Tony-LunarG77822802020-05-28 16:35:46 -0600675 if (reduced_map.IsManyDescriptors()) {
676 // Check whether old == new before assigning, the equality check is much cheaper than
677 // freeing and reallocating the map.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700678 if (state.per_set[set_index].validated_set_binding_req_map != set_binding_pair.second) {
679 state.per_set[set_index].validated_set_binding_req_map = set_binding_pair.second;
Jeff Bolz56308942019-10-06 22:05:23 -0500680 }
Tony-LunarG77822802020-05-28 16:35:46 -0600681 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700682 state.per_set[set_index].validated_set_binding_req_map = BindingReqMap();
locke-lunargd556cc32019-09-17 01:21:23 -0600683 }
684 }
685 }
686 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700687 if (!pipe->vertex_binding_descriptions_.empty()) {
locke-lunargd556cc32019-09-17 01:21:23 -0600688 cb_state->vertex_buffer_used = true;
689 }
690}
691
692// Remove set from setMap and delete the set
693void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500694 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600695 descriptor_set->Destroy();
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500696
locke-lunargd556cc32019-09-17 01:21:23 -0600697 setMap.erase(descriptor_set->GetSet());
698}
699
700// Free all DS Pools including their Sets & related sub-structs
701// NOTE : Calls to this function should be wrapped in mutex
702void ValidationStateTracker::DeleteDescriptorSetPools() {
703 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
704 // Remove this pools' sets from setMap and delete them
John Zulauf79f06582021-02-27 18:38:39 -0700705 for (auto *ds : ii->second->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -0600706 FreeDescriptorSet(ds);
707 }
708 ii->second->sets.clear();
709 ii = descriptorPoolMap.erase(ii);
710 }
711}
712
713// For given object struct return a ptr of BASE_NODE type for its wrapping struct
714BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500715 if (object_struct.node) {
716#ifdef _DEBUG
717 // assert that lookup would find the same object
718 VulkanTypedHandle other = object_struct;
719 other.node = nullptr;
720 assert(object_struct.node == GetStateStructPtrFromObject(other));
721#endif
722 return object_struct.node;
723 }
locke-lunargd556cc32019-09-17 01:21:23 -0600724 BASE_NODE *base_ptr = nullptr;
725 switch (object_struct.type) {
726 case kVulkanObjectTypeDescriptorSet: {
727 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
728 break;
729 }
730 case kVulkanObjectTypeSampler: {
731 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
732 break;
733 }
734 case kVulkanObjectTypeQueryPool: {
735 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
736 break;
737 }
738 case kVulkanObjectTypePipeline: {
739 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
740 break;
741 }
742 case kVulkanObjectTypeBuffer: {
743 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
744 break;
745 }
746 case kVulkanObjectTypeBufferView: {
747 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
748 break;
749 }
750 case kVulkanObjectTypeImage: {
751 base_ptr = GetImageState(object_struct.Cast<VkImage>());
752 break;
753 }
754 case kVulkanObjectTypeImageView: {
755 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
756 break;
757 }
758 case kVulkanObjectTypeEvent: {
759 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
760 break;
761 }
762 case kVulkanObjectTypeDescriptorPool: {
763 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
764 break;
765 }
766 case kVulkanObjectTypeCommandPool: {
767 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
768 break;
769 }
770 case kVulkanObjectTypeFramebuffer: {
771 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
772 break;
773 }
774 case kVulkanObjectTypeRenderPass: {
775 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
776 break;
777 }
778 case kVulkanObjectTypeDeviceMemory: {
779 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
780 break;
781 }
782 case kVulkanObjectTypeAccelerationStructureNV: {
sourav parmarcd5fb182020-07-17 12:58:44 -0700783 base_ptr = GetAccelerationStructureStateNV(object_struct.Cast<VkAccelerationStructureNV>());
784 break;
785 }
786 case kVulkanObjectTypeAccelerationStructureKHR: {
787 base_ptr = GetAccelerationStructureStateKHR(object_struct.Cast<VkAccelerationStructureKHR>());
locke-lunargd556cc32019-09-17 01:21:23 -0600788 break;
789 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500790 case kVulkanObjectTypeUnknown:
791 // This can happen if an element of the object_bindings vector has been
792 // zeroed out, after an object is destroyed.
793 break;
locke-lunargd556cc32019-09-17 01:21:23 -0600794 default:
795 // TODO : Any other objects to be handled here?
796 assert(0);
797 break;
798 }
799 return base_ptr;
800}
801
sfricke-samsungbf1a2ed2020-06-14 23:31:00 -0700802// Gets union of all features defined by Potential Format Features
803// except, does not handle the external format case for AHB as that only can be used for sampled images
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700804VkFormatFeatureFlags ValidationStateTracker::GetPotentialFormatFeatures(VkFormat format) const {
805 VkFormatFeatureFlags format_features = 0;
806
807 if (format != VK_FORMAT_UNDEFINED) {
808 VkFormatProperties format_properties;
809 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
810 format_features |= format_properties.linearTilingFeatures;
811 format_features |= format_properties.optimalTilingFeatures;
812 if (device_extensions.vk_ext_image_drm_format_modifier) {
813 // VK_KHR_get_physical_device_properties2 is required in this case
814 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2};
815 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
816 nullptr};
817 format_properties_2.pNext = (void *)&drm_properties_list;
Marc Alcala Prieto773871c2021-02-04 19:24:43 +0100818
819 // First call is to get the number of modifiers compatible with the queried format
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700820 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
Marc Alcala Prieto773871c2021-02-04 19:24:43 +0100821
822 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
823 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
824 drm_properties_list.pDrmFormatModifierProperties = drm_properties.data();
825
826 // Second call, now with an allocated array in pDrmFormatModifierProperties, is to get the modifiers
827 // compatible with the queried format
828 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
829
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700830 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
831 format_features |= drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
832 }
833 }
834 }
835
836 return format_features;
837}
838
locke-lunargd556cc32019-09-17 01:21:23 -0600839// Reset the command buffer state
840// Maintain the createInfo and set state to CB_NEW, but clear all other state
841void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700842 CMD_BUFFER_STATE *cb_state = GetCBState(cb);
843 if (cb_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600844 cb_state->Reset();
locke-lunargd556cc32019-09-17 01:21:23 -0600845 // Clean up the label data
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600846 ResetCmdDebugUtilsLabel(report_data, cb_state->commandBuffer());
locke-lunargd556cc32019-09-17 01:21:23 -0600847 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600848
locke-lunargd556cc32019-09-17 01:21:23 -0600849 if (command_buffer_reset_callback) {
850 (*command_buffer_reset_callback)(cb);
851 }
852}
853
854void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
855 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
856 VkResult result) {
857 if (VK_SUCCESS != result) return;
858
Locke Linf3873542021-04-26 11:25:10 -0600859 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
860 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
861 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
862
locke-lunargd556cc32019-09-17 01:21:23 -0600863 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
864 if (nullptr == enabled_features_found) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700865 const auto *features2 = LvlFindInChain<VkPhysicalDeviceFeatures2>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600866 if (features2) {
867 enabled_features_found = &(features2->features);
Locke Linf3873542021-04-26 11:25:10 -0600868
869 const auto *provoking_vertex_features = lvl_find_in_chain<VkPhysicalDeviceProvokingVertexFeaturesEXT>(features2->pNext);
870 if (provoking_vertex_features) {
871 state_tracker->enabled_features.provoking_vertex_features = *provoking_vertex_features;
872 }
locke-lunargd556cc32019-09-17 01:21:23 -0600873 }
874 }
875
locke-lunargd556cc32019-09-17 01:21:23 -0600876 if (nullptr == enabled_features_found) {
877 state_tracker->enabled_features.core = {};
878 } else {
879 state_tracker->enabled_features.core = *enabled_features_found;
880 }
881
882 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
883 // previously set them through an explicit API call.
884 uint32_t count;
885 auto pd_state = GetPhysicalDeviceState(gpu);
886 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
887 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
888 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
889 // Save local link to this device's physical device state
890 state_tracker->physical_device_state = pd_state;
891
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700892 const auto *vulkan_12_features = LvlFindInChain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700893 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700894 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700895 } else {
sfricke-samsung27c70722020-05-02 08:42:39 -0700896 // Set Extension Feature Aliases to false as there is no struct to check
897 state_tracker->enabled_features.core12.drawIndirectCount = VK_FALSE;
898 state_tracker->enabled_features.core12.samplerMirrorClampToEdge = VK_FALSE;
899 state_tracker->enabled_features.core12.descriptorIndexing = VK_FALSE;
900 state_tracker->enabled_features.core12.samplerFilterMinmax = VK_FALSE;
901 state_tracker->enabled_features.core12.shaderOutputLayer = VK_FALSE;
902 state_tracker->enabled_features.core12.shaderOutputViewportIndex = VK_FALSE;
sfricke-samsunga4143ac2020-12-18 00:00:53 -0800903 state_tracker->enabled_features.core12.subgroupBroadcastDynamicId = VK_FALSE;
sfricke-samsung27c70722020-05-02 08:42:39 -0700904
905 // These structs are only allowed in pNext chain if there is no VkPhysicalDeviceVulkan12Features
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700906
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700907 const auto *eight_bit_storage_features = LvlFindInChain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700908 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700909 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
910 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
911 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
912 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700913 }
914
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700915 const auto *float16_int8_features = LvlFindInChain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700916 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700917 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
918 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700919 }
920
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700921 const auto *descriptor_indexing_features = LvlFindInChain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700922 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700923 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
924 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
925 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
926 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
927 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
928 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
929 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
930 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
931 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
932 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
933 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
934 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
935 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
936 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
937 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
938 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
939 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
940 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
941 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
942 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
943 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
944 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
945 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
946 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
947 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
948 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
949 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
950 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
951 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
952 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
953 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
954 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
955 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
956 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
957 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
958 descriptor_indexing_features->descriptorBindingPartiallyBound;
959 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
960 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
961 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700962 }
963
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700964 const auto *scalar_block_layout_features = LvlFindInChain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700965 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700966 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700967 }
968
969 const auto *imageless_framebuffer_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700970 LvlFindInChain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700971 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700972 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700973 }
974
975 const auto *uniform_buffer_standard_layout_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700976 LvlFindInChain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700977 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700978 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
979 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700980 }
981
982 const auto *subgroup_extended_types_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700983 LvlFindInChain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700984 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700985 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
986 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700987 }
988
989 const auto *separate_depth_stencil_layouts_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700990 LvlFindInChain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700991 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700992 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
993 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700994 }
995
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700996 const auto *host_query_reset_features = LvlFindInChain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700997 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700998 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700999 }
1000
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001001 const auto *timeline_semaphore_features = LvlFindInChain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001002 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001003 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001004 }
1005
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001006 const auto *buffer_device_address = LvlFindInChain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001007 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001008 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
1009 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1010 buffer_device_address->bufferDeviceAddressCaptureReplay;
1011 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1012 buffer_device_address->bufferDeviceAddressMultiDevice;
1013 }
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001014
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001015 const auto *atomic_int64_features = LvlFindInChain<VkPhysicalDeviceShaderAtomicInt64Features>(pCreateInfo->pNext);
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001016 if (atomic_int64_features) {
1017 state_tracker->enabled_features.core12.shaderBufferInt64Atomics = atomic_int64_features->shaderBufferInt64Atomics;
1018 state_tracker->enabled_features.core12.shaderSharedInt64Atomics = atomic_int64_features->shaderSharedInt64Atomics;
1019 }
1020
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001021 const auto *memory_model_features = LvlFindInChain<VkPhysicalDeviceVulkanMemoryModelFeatures>(pCreateInfo->pNext);
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001022 if (memory_model_features) {
1023 state_tracker->enabled_features.core12.vulkanMemoryModel = memory_model_features->vulkanMemoryModel;
1024 state_tracker->enabled_features.core12.vulkanMemoryModelDeviceScope =
1025 memory_model_features->vulkanMemoryModelDeviceScope;
1026 state_tracker->enabled_features.core12.vulkanMemoryModelAvailabilityVisibilityChains =
1027 memory_model_features->vulkanMemoryModelAvailabilityVisibilityChains;
1028 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001029 }
1030
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001031 const auto *vulkan_11_features = LvlFindInChain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001032 if (vulkan_11_features) {
1033 state_tracker->enabled_features.core11 = *vulkan_11_features;
1034 } else {
1035 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
1036
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001037 const auto *sixteen_bit_storage_features = LvlFindInChain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001038 if (sixteen_bit_storage_features) {
1039 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
1040 sixteen_bit_storage_features->storageBuffer16BitAccess;
1041 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
1042 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
1043 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
1044 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
1045 }
1046
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001047 const auto *multiview_features = LvlFindInChain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001048 if (multiview_features) {
1049 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1050 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1051 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1052 }
1053
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001054 const auto *variable_pointers_features = LvlFindInChain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001055 if (variable_pointers_features) {
1056 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1057 variable_pointers_features->variablePointersStorageBuffer;
1058 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1059 }
1060
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001061 const auto *protected_memory_features = LvlFindInChain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001062 if (protected_memory_features) {
1063 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1064 }
1065
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001066 const auto *ycbcr_conversion_features = LvlFindInChain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001067 if (ycbcr_conversion_features) {
1068 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1069 }
1070
1071 const auto *shader_draw_parameters_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001072 LvlFindInChain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001073 if (shader_draw_parameters_features) {
1074 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001075 }
1076 }
1077
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001078 const auto *device_group_ci = LvlFindInChain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
Tony-LunarGca4891a2020-08-10 15:46:46 -06001079 if (device_group_ci) {
1080 state_tracker->physical_device_count = device_group_ci->physicalDeviceCount;
1081 state_tracker->device_group_create_info = *device_group_ci;
1082 } else {
1083 state_tracker->physical_device_count = 1;
1084 }
locke-lunargd556cc32019-09-17 01:21:23 -06001085
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001086 const auto *exclusive_scissor_features = LvlFindInChain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001087 if (exclusive_scissor_features) {
1088 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1089 }
1090
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001091 const auto *shading_rate_image_features = LvlFindInChain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001092 if (shading_rate_image_features) {
1093 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1094 }
1095
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001096 const auto *mesh_shader_features = LvlFindInChain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001097 if (mesh_shader_features) {
1098 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1099 }
1100
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001101 const auto *inline_uniform_block_features = LvlFindInChain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001102 if (inline_uniform_block_features) {
1103 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1104 }
1105
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001106 const auto *transform_feedback_features = LvlFindInChain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001107 if (transform_feedback_features) {
1108 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1109 }
1110
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001111 const auto *vtx_attrib_div_features = LvlFindInChain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001112 if (vtx_attrib_div_features) {
1113 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1114 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001115
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001116 const auto *buffer_device_address_ext = LvlFindInChain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001117 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001118 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001119 }
1120
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001121 const auto *cooperative_matrix_features = LvlFindInChain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001122 if (cooperative_matrix_features) {
1123 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1124 }
1125
locke-lunargd556cc32019-09-17 01:21:23 -06001126 const auto *compute_shader_derivatives_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001127 LvlFindInChain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001128 if (compute_shader_derivatives_features) {
1129 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1130 }
1131
1132 const auto *fragment_shader_barycentric_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001133 LvlFindInChain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001134 if (fragment_shader_barycentric_features) {
1135 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1136 }
1137
1138 const auto *shader_image_footprint_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001139 LvlFindInChain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001140 if (shader_image_footprint_features) {
1141 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1142 }
1143
1144 const auto *fragment_shader_interlock_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001145 LvlFindInChain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001146 if (fragment_shader_interlock_features) {
1147 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1148 }
1149
1150 const auto *demote_to_helper_invocation_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001151 LvlFindInChain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001152 if (demote_to_helper_invocation_features) {
1153 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1154 }
1155
1156 const auto *texel_buffer_alignment_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001157 LvlFindInChain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001158 if (texel_buffer_alignment_features) {
1159 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1160 }
1161
locke-lunargd556cc32019-09-17 01:21:23 -06001162 const auto *pipeline_exe_props_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001163 LvlFindInChain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001164 if (pipeline_exe_props_features) {
1165 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1166 }
1167
Jeff Bolz82f854d2019-09-17 14:56:47 -05001168 const auto *dedicated_allocation_image_aliasing_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001169 LvlFindInChain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
Jeff Bolz82f854d2019-09-17 14:56:47 -05001170 if (dedicated_allocation_image_aliasing_features) {
1171 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1172 *dedicated_allocation_image_aliasing_features;
1173 }
1174
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001175 const auto *performance_query_features = LvlFindInChain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001176 if (performance_query_features) {
1177 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1178 }
1179
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001180 const auto *device_coherent_memory_features = LvlFindInChain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
Tobias Hector782bcde2019-11-28 16:19:42 +00001181 if (device_coherent_memory_features) {
1182 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1183 }
1184
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001185 const auto *ycbcr_image_array_features = LvlFindInChain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsungcead0802020-01-30 22:20:10 -08001186 if (ycbcr_image_array_features) {
1187 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1188 }
1189
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001190 const auto *ray_query_features = LvlFindInChain<VkPhysicalDeviceRayQueryFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001191 if (ray_query_features) {
1192 state_tracker->enabled_features.ray_query_features = *ray_query_features;
1193 }
1194
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001195 const auto *ray_tracing_pipeline_features = LvlFindInChain<VkPhysicalDeviceRayTracingPipelineFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001196 if (ray_tracing_pipeline_features) {
1197 state_tracker->enabled_features.ray_tracing_pipeline_features = *ray_tracing_pipeline_features;
1198 }
1199
1200 const auto *ray_tracing_acceleration_structure_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001201 LvlFindInChain<VkPhysicalDeviceAccelerationStructureFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001202 if (ray_tracing_acceleration_structure_features) {
1203 state_tracker->enabled_features.ray_tracing_acceleration_structure_features = *ray_tracing_acceleration_structure_features;
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001204 }
1205
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001206 const auto *robustness2_features = LvlFindInChain<VkPhysicalDeviceRobustness2FeaturesEXT>(pCreateInfo->pNext);
Jeff Bolz165818a2020-05-08 11:19:03 -05001207 if (robustness2_features) {
1208 state_tracker->enabled_features.robustness2_features = *robustness2_features;
1209 }
1210
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001211 const auto *fragment_density_map_features = LvlFindInChain<VkPhysicalDeviceFragmentDensityMapFeaturesEXT>(pCreateInfo->pNext);
janharaldfredriksen-arm3b793772020-05-12 18:55:53 +02001212 if (fragment_density_map_features) {
1213 state_tracker->enabled_features.fragment_density_map_features = *fragment_density_map_features;
1214 }
1215
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001216 const auto *fragment_density_map_features2 = LvlFindInChain<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT>(pCreateInfo->pNext);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001217 if (fragment_density_map_features2) {
1218 state_tracker->enabled_features.fragment_density_map2_features = *fragment_density_map_features2;
1219 }
1220
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001221 const auto *astc_decode_features = LvlFindInChain<VkPhysicalDeviceASTCDecodeFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0c4a06f2020-06-27 01:24:32 -07001222 if (astc_decode_features) {
1223 state_tracker->enabled_features.astc_decode_features = *astc_decode_features;
1224 }
1225
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001226 const auto *custom_border_color_features = LvlFindInChain<VkPhysicalDeviceCustomBorderColorFeaturesEXT>(pCreateInfo->pNext);
Tony-LunarG7337b312020-04-15 16:40:25 -06001227 if (custom_border_color_features) {
1228 state_tracker->enabled_features.custom_border_color_features = *custom_border_color_features;
1229 }
1230
sfricke-samsungfd661d62020-05-16 00:57:27 -07001231 const auto *pipeline_creation_cache_control_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001232 LvlFindInChain<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsungfd661d62020-05-16 00:57:27 -07001233 if (pipeline_creation_cache_control_features) {
1234 state_tracker->enabled_features.pipeline_creation_cache_control_features = *pipeline_creation_cache_control_features;
1235 }
1236
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001237 const auto *fragment_shading_rate_features = LvlFindInChain<VkPhysicalDeviceFragmentShadingRateFeaturesKHR>(pCreateInfo->pNext);
Tobias Hector6663c9b2020-11-05 10:18:02 +00001238 if (fragment_shading_rate_features) {
1239 state_tracker->enabled_features.fragment_shading_rate_features = *fragment_shading_rate_features;
1240 }
1241
Piers Daniell39842ee2020-07-10 16:42:33 -06001242 const auto *extended_dynamic_state_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001243 LvlFindInChain<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT>(pCreateInfo->pNext);
Piers Daniell39842ee2020-07-10 16:42:33 -06001244 if (extended_dynamic_state_features) {
1245 state_tracker->enabled_features.extended_dynamic_state_features = *extended_dynamic_state_features;
1246 }
1247
Vikram Kushwahaa57b0c32021-04-19 12:21:46 -07001248 const auto *extended_dynamic_state2_features =
1249 LvlFindInChain<VkPhysicalDeviceExtendedDynamicState2FeaturesEXT>(pCreateInfo->pNext);
1250 if (extended_dynamic_state2_features) {
1251 state_tracker->enabled_features.extended_dynamic_state2_features = *extended_dynamic_state2_features;
1252 }
1253
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001254 const auto *multiview_features = LvlFindInChain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
locke-lunarg3fa463a2020-10-23 16:39:04 -06001255 if (multiview_features) {
1256 state_tracker->enabled_features.multiview_features = *multiview_features;
1257 }
1258
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001259 const auto *portability_features = LvlFindInChain<VkPhysicalDevicePortabilitySubsetFeaturesKHR>(pCreateInfo->pNext);
Nathaniel Cesariob3f2d702020-11-09 09:20:49 -07001260 if (portability_features) {
1261 state_tracker->enabled_features.portability_subset_features = *portability_features;
1262 }
1263
sfricke-samsung0065ce02020-12-03 22:46:37 -08001264 const auto *shader_integer_functions2_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001265 LvlFindInChain<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001266 if (shader_integer_functions2_features) {
1267 state_tracker->enabled_features.shader_integer_functions2_features = *shader_integer_functions2_features;
1268 }
1269
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001270 const auto *shader_sm_builtins_feature = LvlFindInChain<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001271 if (shader_sm_builtins_feature) {
1272 state_tracker->enabled_features.shader_sm_builtins_feature = *shader_sm_builtins_feature;
1273 }
1274
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001275 const auto *shader_atomic_float_feature = LvlFindInChain<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001276 if (shader_atomic_float_feature) {
1277 state_tracker->enabled_features.shader_atomic_float_feature = *shader_atomic_float_feature;
1278 }
1279
1280 const auto *shader_image_atomic_int64_feature =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001281 LvlFindInChain<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001282 if (shader_image_atomic_int64_feature) {
1283 state_tracker->enabled_features.shader_image_atomic_int64_feature = *shader_image_atomic_int64_feature;
1284 }
1285
sfricke-samsung486a51e2021-01-02 00:10:15 -08001286 const auto *shader_clock_feature = LvlFindInChain<VkPhysicalDeviceShaderClockFeaturesKHR>(pCreateInfo->pNext);
1287 if (shader_clock_feature) {
1288 state_tracker->enabled_features.shader_clock_feature = *shader_clock_feature;
1289 }
1290
Jeremy Gebben5f585ae2021-02-02 09:03:06 -07001291 const auto *conditional_rendering_features =
1292 LvlFindInChain<VkPhysicalDeviceConditionalRenderingFeaturesEXT>(pCreateInfo->pNext);
1293 if (conditional_rendering_features) {
1294 state_tracker->enabled_features.conditional_rendering = *conditional_rendering_features;
1295 }
1296
Shannon McPhersondb287d42021-02-02 15:27:32 -07001297 const auto *workgroup_memory_explicit_layout_features =
1298 LvlFindInChain<VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>(pCreateInfo->pNext);
1299 if (workgroup_memory_explicit_layout_features) {
1300 state_tracker->enabled_features.workgroup_memory_explicit_layout_features = *workgroup_memory_explicit_layout_features;
1301 }
1302
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001303 const auto *synchronization2_features =
1304 LvlFindInChain<VkPhysicalDeviceSynchronization2FeaturesKHR>(pCreateInfo->pNext);
1305 if (synchronization2_features) {
1306 state_tracker->enabled_features.synchronization2_features = *synchronization2_features;
1307 }
1308
Locke Linf3873542021-04-26 11:25:10 -06001309 const auto *provoking_vertex_features = lvl_find_in_chain<VkPhysicalDeviceProvokingVertexFeaturesEXT>(pCreateInfo->pNext);
1310 if (provoking_vertex_features) {
1311 state_tracker->enabled_features.provoking_vertex_features = *provoking_vertex_features;
1312 }
1313
Piers Daniellcb6d8032021-04-19 18:51:26 -06001314 const auto *vertex_input_dynamic_state_features =
1315 LvlFindInChain<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT>(pCreateInfo->pNext);
1316 if (vertex_input_dynamic_state_features) {
1317 state_tracker->enabled_features.vertex_input_dynamic_state_features = *vertex_input_dynamic_state_features;
1318 }
1319
David Zhao Akeley44139b12021-04-26 16:16:13 -07001320 const auto *inherited_viewport_scissor_features =
1321 LvlFindInChain<VkPhysicalDeviceInheritedViewportScissorFeaturesNV>(pCreateInfo->pNext);
1322 if (inherited_viewport_scissor_features) {
1323 state_tracker->enabled_features.inherited_viewport_scissor_features = *inherited_viewport_scissor_features;
1324 }
1325
Tony-LunarG4490de42021-06-21 15:49:19 -06001326 const auto *multi_draw_features = LvlFindInChain<VkPhysicalDeviceMultiDrawFeaturesEXT>(pCreateInfo->pNext);
1327 if (multi_draw_features) {
1328 state_tracker->enabled_features.multi_draw_features = *multi_draw_features;
1329 }
1330
locke-lunargd556cc32019-09-17 01:21:23 -06001331 // Store physical device properties and physical device mem limits into CoreChecks structs
1332 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1333 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001334 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1335 &state_tracker->phys_dev_props_core11);
1336 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1337 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001338
1339 const auto &dev_ext = state_tracker->device_extensions;
1340 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1341
1342 if (dev_ext.vk_khr_push_descriptor) {
1343 // Get the needed push_descriptor limits
1344 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1345 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1346 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1347 }
1348
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001349 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001350 VkPhysicalDeviceDescriptorIndexingProperties descriptor_indexing_prop;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001351 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1352 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1353 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1354 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1355 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1356 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1357 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1358 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1359 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1360 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1361 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1362 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1363 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1364 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1365 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1366 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1367 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1368 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1369 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1370 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1371 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1372 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1373 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1374 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1375 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1376 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1377 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
1378 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1379 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
1380 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
1381 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
1382 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
1383 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
1384 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
1385 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1386 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1387 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
1388 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
1389 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1390 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
1391 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
1392 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
1393 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
1394 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
1395 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
1396 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
1397 }
1398
locke-lunargd556cc32019-09-17 01:21:23 -06001399 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1400 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1401 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1402 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001403
1404 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001405 VkPhysicalDeviceDepthStencilResolveProperties depth_stencil_resolve_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001406 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
1407 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
1408 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
1409 depth_stencil_resolve_props.supportedStencilResolveModes;
1410 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
1411 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
1412 }
1413
locke-lunargd556cc32019-09-17 01:21:23 -06001414 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001415 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
sourav parmarcd5fb182020-07-17 12:58:44 -07001416 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing_pipeline, &phys_dev_props->ray_tracing_propsKHR);
1417 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_acceleration_structure, &phys_dev_props->acc_structure_props);
locke-lunargd556cc32019-09-17 01:21:23 -06001418 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1419 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001420 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map_2, &phys_dev_props->fragment_density_map2_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001421 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
sfricke-samsung8f658d42020-05-03 20:12:24 -07001422 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_sample_locations, &phys_dev_props->sample_locations_props);
Tony-LunarG7337b312020-04-15 16:40:25 -06001423 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_custom_border_color, &phys_dev_props->custom_border_color_props);
locke-lunarg3fa463a2020-10-23 16:39:04 -06001424 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_multiview, &phys_dev_props->multiview_props);
Nathaniel Cesario3291c912020-11-17 16:54:41 -07001425 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_portability_subset, &phys_dev_props->portability_props);
Locke Lin016d8482021-05-27 12:11:31 -06001426 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_provoking_vertex, &phys_dev_props->provoking_vertex_props);
Tony-LunarG4490de42021-06-21 15:49:19 -06001427 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_multi_draw, &phys_dev_props->multi_draw_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001428
1429 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001430 VkPhysicalDeviceTimelineSemaphoreProperties timeline_semaphore_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001431 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
1432 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
1433 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
1434 }
1435
1436 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001437 VkPhysicalDeviceFloatControlsProperties float_controls_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001438 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
1439 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
1440 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
1441 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
1442 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
1443 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
1444 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
1445 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
1446 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
1447 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
1448 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
1449 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
1450 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
1451 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
1452 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
1453 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
1454 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
1455 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
1456 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
1457 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
1458 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
1459 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07001460
locke-lunargd556cc32019-09-17 01:21:23 -06001461 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1462 // Get the needed cooperative_matrix properties
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07001463 auto cooperative_matrix_props = LvlInitStruct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1464 auto prop2 = LvlInitStruct<VkPhysicalDeviceProperties2>(&cooperative_matrix_props);
locke-lunargd556cc32019-09-17 01:21:23 -06001465 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1466 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1467
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001468 uint32_t num_cooperative_matrix_properties = 0;
1469 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &num_cooperative_matrix_properties, NULL);
1470 state_tracker->cooperative_matrix_properties.resize(num_cooperative_matrix_properties,
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07001471 LvlInitStruct<VkCooperativeMatrixPropertiesNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06001472
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001473 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &num_cooperative_matrix_properties,
locke-lunargd556cc32019-09-17 01:21:23 -06001474 state_tracker->cooperative_matrix_properties.data());
1475 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001476 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06001477 // Get the needed subgroup limits
Locke Lin016d8482021-05-27 12:11:31 -06001478 auto subgroup_prop = LvlInitStruct<VkPhysicalDeviceSubgroupProperties>();
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07001479 auto prop2 = LvlInitStruct<VkPhysicalDeviceProperties2>(&subgroup_prop);
locke-lunargd556cc32019-09-17 01:21:23 -06001480 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1481
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001482 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
1483 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
1484 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
1485 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06001486 }
1487
Tobias Hector6663c9b2020-11-05 10:18:02 +00001488 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_fragment_shading_rate, &phys_dev_props->fragment_shading_rate_props);
1489
locke-lunargd556cc32019-09-17 01:21:23 -06001490 // Store queue family data
1491 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1492 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001493 const VkDeviceQueueCreateInfo &queue_create_info = pCreateInfo->pQueueCreateInfos[i];
sfricke-samsungb585ec12021-05-06 03:10:13 -07001494 state_tracker->queue_family_index_set.insert(queue_create_info.queueFamilyIndex);
1495 state_tracker->device_queue_info_list.push_back(
1496 {i, queue_create_info.queueFamilyIndex, queue_create_info.flags, queue_create_info.queueCount});
locke-lunargd556cc32019-09-17 01:21:23 -06001497 }
1498 }
1499}
1500
1501void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1502 if (!device) return;
1503
locke-lunargd556cc32019-09-17 01:21:23 -06001504 // Reset all command buffers before destroying them, to unlink object_bindings.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001505 for (auto &command_buffer : commandBufferMap) {
1506 ResetCommandBufferState(command_buffer.first);
locke-lunargd556cc32019-09-17 01:21:23 -06001507 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001508 pipelineMap.clear();
1509 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001510 commandBufferMap.clear();
1511
1512 // This will also delete all sets in the pool & remove them from setMap
1513 DeleteDescriptorSetPools();
1514 // All sets should be removed
1515 assert(setMap.empty());
1516 descriptorSetLayoutMap.clear();
1517 imageViewMap.clear();
1518 imageMap.clear();
1519 bufferViewMap.clear();
1520 bufferMap.clear();
1521 // Queues persist until device is destroyed
1522 queueMap.clear();
1523}
1524
locke-lunargd556cc32019-09-17 01:21:23 -06001525// Track which resources are in-flight by atomically incrementing their "in_use" count
1526void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1527 cb_node->submitCount++;
locke-lunargd556cc32019-09-17 01:21:23 -06001528
locke-lunargd556cc32019-09-17 01:21:23 -06001529 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1530 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1531 // should then be flagged prior to calling this function
1532 for (auto event : cb_node->writeEventsBeforeWait) {
1533 auto event_state = GetEventState(event);
1534 if (event_state) event_state->write_in_use++;
1535 }
1536}
1537
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001538void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
Jeremy Gebbencbf22862021-03-03 12:01:22 -07001539 layer_data::unordered_map<VkQueue, uint64_t> other_queue_seqs;
1540 layer_data::unordered_map<VkSemaphore, uint64_t> timeline_semaphore_counters;
locke-lunargd556cc32019-09-17 01:21:23 -06001541
1542 // Roll this queue forward, one submission at a time.
1543 while (pQueue->seq < seq) {
1544 auto &submission = pQueue->submissions.front();
1545
1546 for (auto &wait : submission.waitSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001547 auto semaphore_state = GetSemaphoreState(wait.semaphore);
1548 if (semaphore_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001549 semaphore_state->EndUse();
locke-lunargd556cc32019-09-17 01:21:23 -06001550 }
Mike Schuchardt2df08912020-12-15 16:28:09 -08001551 if (wait.type == VK_SEMAPHORE_TYPE_TIMELINE) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001552 auto &last_counter = timeline_semaphore_counters[wait.semaphore];
1553 last_counter = std::max(last_counter, wait.payload);
Marshall Drew-Brook03847582020-11-06 15:10:45 -08001554 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001555 auto &last_seq = other_queue_seqs[wait.queue];
1556 last_seq = std::max(last_seq, wait.seq);
Marshall Drew-Brook03847582020-11-06 15:10:45 -08001557 }
locke-lunargd556cc32019-09-17 01:21:23 -06001558 }
1559
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001560 for (auto &signal : submission.signalSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001561 auto semaphore_state = GetSemaphoreState(signal.semaphore);
1562 if (semaphore_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001563 semaphore_state->EndUse();
Mike Schuchardt2df08912020-12-15 16:28:09 -08001564 if (semaphore_state->type == VK_SEMAPHORE_TYPE_TIMELINE && semaphore_state->payload < signal.payload) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001565 semaphore_state->payload = signal.payload;
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001566 }
locke-lunargd556cc32019-09-17 01:21:23 -06001567 }
1568 }
1569
1570 for (auto &semaphore : submission.externalSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001571 auto semaphore_state = GetSemaphoreState(semaphore);
1572 if (semaphore_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001573 semaphore_state->EndUse();
locke-lunargd556cc32019-09-17 01:21:23 -06001574 }
1575 }
1576
1577 for (auto cb : submission.cbs) {
1578 auto cb_node = GetCBState(cb);
1579 if (!cb_node) {
1580 continue;
1581 }
1582 // First perform decrement on general case bound objects
locke-lunargd556cc32019-09-17 01:21:23 -06001583 for (auto event : cb_node->writeEventsBeforeWait) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001584 auto event_node = eventMap.find(event);
1585 if (event_node != eventMap.end()) {
John Zulauf48057322020-12-02 11:59:31 -07001586 event_node->second->write_in_use--;
locke-lunargd556cc32019-09-17 01:21:23 -06001587 }
1588 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001589 QueryMap local_query_to_state_map;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001590 VkQueryPool first_pool = VK_NULL_HANDLE;
Jeff Bolz310775c2019-10-09 00:46:33 -05001591 for (auto &function : cb_node->queryUpdates) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001592 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &local_query_to_state_map);
Jeff Bolz310775c2019-10-09 00:46:33 -05001593 }
1594
John Zulauf79f06582021-02-27 18:38:39 -07001595 for (const auto &query_state_pair : local_query_to_state_map) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001596 if (query_state_pair.second == QUERYSTATE_ENDED) {
1597 queryToStateMap[query_state_pair.first] = QUERYSTATE_AVAILABLE;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001598 }
locke-lunargd556cc32019-09-17 01:21:23 -06001599 }
Jeremy Gebben5570abe2021-05-16 18:35:13 -06001600 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
1601 cb_node->EndUse();
1602 }
locke-lunargd556cc32019-09-17 01:21:23 -06001603 }
1604
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001605 auto fence_state = GetFenceState(submission.fence);
1606 if (fence_state && fence_state->scope == kSyncScopeInternal) {
1607 fence_state->state = FENCE_RETIRED;
locke-lunargd556cc32019-09-17 01:21:23 -06001608 }
1609
1610 pQueue->submissions.pop_front();
1611 pQueue->seq++;
1612 }
1613
1614 // Roll other queues forward to the highest seq we saw a wait for
John Zulauf79f06582021-02-27 18:38:39 -07001615 for (const auto &qs : other_queue_seqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001616 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001617 }
John Zulauf79f06582021-02-27 18:38:39 -07001618 for (const auto &sc : timeline_semaphore_counters) {
Marshall Drew-Brook03847582020-11-06 15:10:45 -08001619 RetireTimelineSemaphore(sc.first, sc.second);
1620 }
locke-lunargd556cc32019-09-17 01:21:23 -06001621}
1622
1623// Submit a fence to a queue, delimiting previous fences and previous untracked
1624// work by it.
1625static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1626 pFence->state = FENCE_INFLIGHT;
1627 pFence->signaler.first = pQueue->queue;
1628 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1629}
1630
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001631uint64_t ValidationStateTracker::RecordSubmitFence(QUEUE_STATE *queue_state, VkFence fence, uint32_t submit_count) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001632 auto fence_state = GetFenceState(fence);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001633 uint64_t early_retire_seq = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001634 if (fence_state) {
1635 if (fence_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06001636 // Mark fence in use
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001637 SubmitFence(queue_state, fence_state, std::max(1u, submit_count));
1638 if (!submit_count) {
locke-lunargd556cc32019-09-17 01:21:23 -06001639 // If no submissions, but just dropping a fence on the end of the queue,
1640 // record an empty submission with just the fence, so we can determine
1641 // its completion.
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001642 CB_SUBMISSION submission;
1643 submission.fence = fence;
1644 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06001645 }
1646 } else {
1647 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001648 early_retire_seq = queue_state->seq + queue_state->submissions.size();
locke-lunargd556cc32019-09-17 01:21:23 -06001649 }
1650 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001651 return early_retire_seq;
1652}
1653
1654void ValidationStateTracker::RecordSubmitCommandBuffer(CB_SUBMISSION &submission, VkCommandBuffer command_buffer) {
1655 auto cb_node = GetCBState(command_buffer);
1656 if (cb_node) {
1657 submission.cbs.push_back(command_buffer);
John Zulauf79f06582021-02-27 18:38:39 -07001658 for (auto *secondary_cmd_buffer : cb_node->linkedCommandBuffers) {
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06001659 submission.cbs.push_back(secondary_cmd_buffer->commandBuffer());
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001660 IncrementResources(secondary_cmd_buffer);
1661 }
1662 IncrementResources(cb_node);
Jeremy Gebben5570abe2021-05-16 18:35:13 -06001663 // increment use count for all bound objects including secondary cbs
1664 cb_node->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001665
1666 VkQueryPool first_pool = VK_NULL_HANDLE;
1667 EventToStageMap local_event_to_stage_map;
1668 QueryMap local_query_to_state_map;
1669 for (auto &function : cb_node->queryUpdates) {
1670 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &local_query_to_state_map);
1671 }
1672
John Zulauf79f06582021-02-27 18:38:39 -07001673 for (const auto &query_state_pair : local_query_to_state_map) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001674 queryToStateMap[query_state_pair.first] = query_state_pair.second;
1675 }
1676
John Zulauf79f06582021-02-27 18:38:39 -07001677 for (const auto &function : cb_node->eventUpdates) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001678 function(nullptr, /*do_validate*/ false, &local_event_to_stage_map);
1679 }
1680
John Zulauf79f06582021-02-27 18:38:39 -07001681 for (const auto &eventStagePair : local_event_to_stage_map) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001682 eventMap[eventStagePair.first]->stageMask = eventStagePair.second;
1683 }
1684 }
1685}
1686
1687void ValidationStateTracker::RecordSubmitWaitSemaphore(CB_SUBMISSION &submission, VkQueue queue, VkSemaphore semaphore,
1688 uint64_t value, uint64_t next_seq) {
1689 auto semaphore_state = GetSemaphoreState(semaphore);
1690 if (semaphore_state) {
1691 if (semaphore_state->scope == kSyncScopeInternal) {
1692 SEMAPHORE_WAIT wait;
1693 wait.semaphore = semaphore;
1694 wait.type = semaphore_state->type;
1695 if (semaphore_state->type == VK_SEMAPHORE_TYPE_BINARY) {
1696 if (semaphore_state->signaler.first != VK_NULL_HANDLE) {
1697 wait.queue = semaphore_state->signaler.first;
1698 wait.seq = semaphore_state->signaler.second;
1699 submission.waitSemaphores.emplace_back(std::move(wait));
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001700 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001701 }
1702 semaphore_state->signaler.first = VK_NULL_HANDLE;
1703 semaphore_state->signaled = false;
1704 } else if (semaphore_state->payload < value) {
1705 wait.queue = queue;
1706 wait.seq = next_seq;
1707 wait.payload = value;
1708 submission.waitSemaphores.emplace_back(std::move(wait));
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001709 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001710 }
1711 } else {
1712 submission.externalSemaphores.push_back(semaphore);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001713 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001714 if (semaphore_state->scope == kSyncScopeExternalTemporary) {
1715 semaphore_state->scope = kSyncScopeInternal;
1716 }
1717 }
1718 }
1719}
1720
1721bool ValidationStateTracker::RecordSubmitSignalSemaphore(CB_SUBMISSION &submission, VkQueue queue, VkSemaphore semaphore,
1722 uint64_t value, uint64_t next_seq) {
1723 bool retire_early = false;
1724 auto semaphore_state = GetSemaphoreState(semaphore);
1725 if (semaphore_state) {
1726 if (semaphore_state->scope == kSyncScopeInternal) {
1727 SEMAPHORE_SIGNAL signal;
1728 signal.semaphore = semaphore;
1729 signal.seq = next_seq;
1730 if (semaphore_state->type == VK_SEMAPHORE_TYPE_BINARY) {
1731 semaphore_state->signaler.first = queue;
1732 semaphore_state->signaler.second = next_seq;
1733 semaphore_state->signaled = true;
1734 } else {
1735 signal.payload = value;
1736 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001737 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001738 submission.signalSemaphores.emplace_back(std::move(signal));
1739 } else {
1740 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1741 retire_early = true;
1742 }
1743 }
1744 return retire_early;
1745}
1746
1747void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1748 VkFence fence, VkResult result) {
1749 if (result != VK_SUCCESS) return;
1750 auto queue_state = GetQueueState(queue);
1751
1752 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, submitCount);
locke-lunargd556cc32019-09-17 01:21:23 -06001753
1754 // Now process each individual submit
1755 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001756 CB_SUBMISSION submission;
locke-lunargd556cc32019-09-17 01:21:23 -06001757 const VkSubmitInfo *submit = &pSubmits[submit_idx];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001758 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001759 auto *timeline_semaphore_submit = LvlFindInChain<VkTimelineSemaphoreSubmitInfo>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001760 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
Jeremy Gebben4ae5cc72021-03-10 15:34:44 -07001761 uint64_t value = 0;
1762 if (timeline_semaphore_submit && timeline_semaphore_submit->pWaitSemaphoreValues != nullptr &&
1763 (i < timeline_semaphore_submit->waitSemaphoreValueCount)) {
1764 value = timeline_semaphore_submit->pWaitSemaphoreValues[i];
1765 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001766 RecordSubmitWaitSemaphore(submission, queue, submit->pWaitSemaphores[i], value, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001767 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001768
1769 bool retire_early = false;
locke-lunargd556cc32019-09-17 01:21:23 -06001770 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
Jeremy Gebben4ae5cc72021-03-10 15:34:44 -07001771 uint64_t value = 0;
1772 if (timeline_semaphore_submit && timeline_semaphore_submit->pSignalSemaphoreValues != nullptr &&
1773 (i < timeline_semaphore_submit->signalSemaphoreValueCount)) {
1774 value = timeline_semaphore_submit->pSignalSemaphoreValues[i];
1775 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001776 retire_early |= RecordSubmitSignalSemaphore(submission, queue, submit->pSignalSemaphores[i], value, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001777 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001778 if (retire_early) {
1779 early_retire_seq = std::max(early_retire_seq, next_seq);
1780 }
1781
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001782 const auto perf_submit = LvlFindInChain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001783 submission.perf_submit_pass = perf_submit ? perf_submit->counterPassIndex : 0;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001784
locke-lunargd556cc32019-09-17 01:21:23 -06001785 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001786 RecordSubmitCommandBuffer(submission, submit->pCommandBuffers[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06001787 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001788 submission.fence = submit_idx == (submitCount - 1) ? fence : VK_NULL_HANDLE;
1789 queue_state->submissions.emplace_back(std::move(submission));
1790 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001791
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001792 if (early_retire_seq) {
1793 RetireWorkOnQueue(queue_state, early_retire_seq);
1794 }
1795}
1796
1797void ValidationStateTracker::PostCallRecordQueueSubmit2KHR(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR *pSubmits,
1798 VkFence fence, VkResult result) {
1799 if (result != VK_SUCCESS) return;
1800 auto queue_state = GetQueueState(queue);
1801
1802 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, submitCount);
1803
1804 // Now process each individual submit
1805 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1806 CB_SUBMISSION submission;
1807 const VkSubmitInfo2KHR *submit = &pSubmits[submit_idx];
1808 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
1809 for (uint32_t i = 0; i < submit->waitSemaphoreInfoCount; ++i) {
1810 const auto &sem_info = submit->pWaitSemaphoreInfos[i];
1811 RecordSubmitWaitSemaphore(submission, queue, sem_info.semaphore, sem_info.value, next_seq);
1812 }
1813 bool retire_early = false;
1814 for (uint32_t i = 0; i < submit->signalSemaphoreInfoCount; ++i) {
1815 const auto &sem_info = submit->pSignalSemaphoreInfos[i];
1816 retire_early |= RecordSubmitSignalSemaphore(submission, queue, sem_info.semaphore, sem_info.value, next_seq);
1817 }
1818 if (retire_early) {
1819 early_retire_seq = std::max(early_retire_seq, next_seq);
1820 }
1821 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
1822 submission.perf_submit_pass = perf_submit ? perf_submit->counterPassIndex : 0;
1823
1824 for (uint32_t i = 0; i < submit->commandBufferInfoCount; i++) {
1825 RecordSubmitCommandBuffer(submission, submit->pCommandBufferInfos[i].commandBuffer);
1826 }
1827 submission.fence = submit_idx == (submitCount - 1) ? fence : VK_NULL_HANDLE;
1828 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06001829 }
1830
1831 if (early_retire_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001832 RetireWorkOnQueue(queue_state, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001833 }
1834}
1835
1836void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1837 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1838 VkResult result) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06001839 if (VK_SUCCESS != result) {
1840 return;
locke-lunargd556cc32019-09-17 01:21:23 -06001841 }
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06001842 const auto &memory_type = phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex];
1843 const auto &memory_heap = phys_dev_mem_props.memoryHeaps[memory_type.heapIndex];
1844 auto fake_address = fake_memory.Alloc(pAllocateInfo->allocationSize);
1845
1846 layer_data::optional<DedicatedBinding> dedicated_binding;
1847
1848 auto dedicated = LvlFindInChain<VkMemoryDedicatedAllocateInfo>(pAllocateInfo->pNext);
1849 if (dedicated) {
1850 if (dedicated->buffer) {
1851 const auto *buffer_state = GetBufferState(dedicated->buffer);
1852 assert(buffer_state);
1853 if (!buffer_state) {
1854 return;
1855 }
1856 dedicated_binding.emplace(dedicated->buffer, buffer_state->createInfo);
1857 } else if (dedicated->image) {
1858 const auto *image_state = GetImageState(dedicated->image);
1859 assert(image_state);
1860 if (!image_state) {
1861 return;
1862 }
1863 dedicated_binding.emplace(dedicated->image, image_state->createInfo);
1864 }
1865 }
1866 memObjMap[*pMemory] = std::make_shared<DEVICE_MEMORY_STATE>(*pMemory, pAllocateInfo, fake_address, memory_type, memory_heap,
1867 std::move(dedicated_binding));
locke-lunargd556cc32019-09-17 01:21:23 -06001868 return;
1869}
1870
1871void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1872 if (!mem) return;
1873 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06001874 if (!mem_info) return;
locke-lunargd556cc32019-09-17 01:21:23 -06001875 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001876 mem_info->Destroy();
John Zulauf79952712020-04-07 11:25:54 -06001877 fake_memory.Free(mem_info->fake_base_address);
locke-lunargd556cc32019-09-17 01:21:23 -06001878 memObjMap.erase(mem);
1879}
1880
1881void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
1882 VkFence fence, VkResult result) {
1883 if (result != VK_SUCCESS) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001884 auto queue_state = GetQueueState(queue);
locke-lunargd556cc32019-09-17 01:21:23 -06001885
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001886 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, bindInfoCount);
locke-lunargd556cc32019-09-17 01:21:23 -06001887
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001888 for (uint32_t bind_idx = 0; bind_idx < bindInfoCount; ++bind_idx) {
1889 const VkBindSparseInfo &bind_info = pBindInfo[bind_idx];
locke-lunargd556cc32019-09-17 01:21:23 -06001890 // Track objects tied to memory
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001891 for (uint32_t j = 0; j < bind_info.bufferBindCount; j++) {
1892 for (uint32_t k = 0; k < bind_info.pBufferBinds[j].bindCount; k++) {
1893 auto sparse_binding = bind_info.pBufferBinds[j].pBinds[k];
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001894 auto buffer_state = GetBufferState(bind_info.pBufferBinds[j].buffer);
1895 auto mem_state = GetDevMemShared(sparse_binding.memory);
1896 if (buffer_state && mem_state) {
1897 buffer_state->SetSparseMemBinding(mem_state, sparse_binding.memoryOffset, sparse_binding.size);
1898 }
locke-lunargd556cc32019-09-17 01:21:23 -06001899 }
1900 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001901 for (uint32_t j = 0; j < bind_info.imageOpaqueBindCount; j++) {
1902 for (uint32_t k = 0; k < bind_info.pImageOpaqueBinds[j].bindCount; k++) {
1903 auto sparse_binding = bind_info.pImageOpaqueBinds[j].pBinds[k];
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001904 auto image_state = GetImageState(bind_info.pImageOpaqueBinds[j].image);
1905 auto mem_state = GetDevMemShared(sparse_binding.memory);
1906 if (image_state && mem_state) {
1907 image_state->SetSparseMemBinding(mem_state, sparse_binding.memoryOffset, sparse_binding.size);
1908 }
locke-lunargd556cc32019-09-17 01:21:23 -06001909 }
1910 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001911 for (uint32_t j = 0; j < bind_info.imageBindCount; j++) {
1912 for (uint32_t k = 0; k < bind_info.pImageBinds[j].bindCount; k++) {
1913 auto sparse_binding = bind_info.pImageBinds[j].pBinds[k];
locke-lunargd556cc32019-09-17 01:21:23 -06001914 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
1915 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001916 auto image_state = GetImageState(bind_info.pImageBinds[j].image);
1917 auto mem_state = GetDevMemShared(sparse_binding.memory);
1918 if (image_state && mem_state) {
1919 image_state->SetSparseMemBinding(mem_state, sparse_binding.memoryOffset, size);
1920 }
locke-lunargd556cc32019-09-17 01:21:23 -06001921 }
1922 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001923 CB_SUBMISSION submission;
1924 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001925 for (uint32_t i = 0; i < bind_info.waitSemaphoreCount; ++i) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001926 RecordSubmitWaitSemaphore(submission, queue, bind_info.pWaitSemaphores[i], 0, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001927 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001928 bool retire_early = false;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001929 for (uint32_t i = 0; i < bind_info.signalSemaphoreCount; ++i) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001930 retire_early |= RecordSubmitSignalSemaphore(submission, queue, bind_info.pSignalSemaphores[i], 0, next_seq);
1931 }
1932 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1933 if (retire_early) {
1934 early_retire_seq = std::max(early_retire_seq, queue_state->seq + queue_state->submissions.size() + 1);
locke-lunargd556cc32019-09-17 01:21:23 -06001935 }
1936
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001937 submission.fence = bind_idx == (bindInfoCount - 1) ? fence : VK_NULL_HANDLE;
1938 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06001939 }
1940
1941 if (early_retire_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001942 RetireWorkOnQueue(queue_state, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001943 }
1944}
1945
1946void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
1947 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
1948 VkResult result) {
1949 if (VK_SUCCESS != result) return;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001950 semaphoreMap[*pSemaphore] = std::make_shared<SEMAPHORE_STATE>(*pSemaphore, LvlFindInChain<VkSemaphoreTypeCreateInfo>(pCreateInfo->pNext));
locke-lunargd556cc32019-09-17 01:21:23 -06001951}
1952
Mike Schuchardt2df08912020-12-15 16:28:09 -08001953void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBits handle_type,
1954 VkSemaphoreImportFlags flags) {
locke-lunargd556cc32019-09-17 01:21:23 -06001955 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
1956 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001957 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) &&
locke-lunargd556cc32019-09-17 01:21:23 -06001958 sema_node->scope == kSyncScopeInternal) {
1959 sema_node->scope = kSyncScopeExternalTemporary;
1960 } else {
1961 sema_node->scope = kSyncScopeExternalPermanent;
1962 }
1963 }
1964}
1965
Mike Schuchardt2df08912020-12-15 16:28:09 -08001966void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfo *pSignalInfo,
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001967 VkResult result) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001968 auto *semaphore_state = GetSemaphoreState(pSignalInfo->semaphore);
1969 semaphore_state->payload = pSignalInfo->value;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001970}
1971
locke-lunargd556cc32019-09-17 01:21:23 -06001972void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
1973 auto mem_info = GetDevMemState(mem);
1974 if (mem_info) {
1975 mem_info->mapped_range.offset = offset;
1976 mem_info->mapped_range.size = size;
1977 mem_info->p_driver_data = *ppData;
1978 }
1979}
1980
1981void ValidationStateTracker::RetireFence(VkFence fence) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001982 auto fence_state = GetFenceState(fence);
1983 if (fence_state && fence_state->scope == kSyncScopeInternal) {
1984 if (fence_state->signaler.first != VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -06001985 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001986 RetireWorkOnQueue(GetQueueState(fence_state->signaler.first), fence_state->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001987 } else {
1988 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
1989 // the fence as retired.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001990 fence_state->state = FENCE_RETIRED;
locke-lunargd556cc32019-09-17 01:21:23 -06001991 }
1992 }
1993}
1994
1995void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
1996 VkBool32 waitAll, uint64_t timeout, VkResult result) {
1997 if (VK_SUCCESS != result) return;
1998
1999 // When we know that all fences are complete we can clean/remove their CBs
2000 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
2001 for (uint32_t i = 0; i < fenceCount; i++) {
2002 RetireFence(pFences[i]);
2003 }
2004 }
2005 // NOTE : Alternate case not handled here is when some fences have completed. In
2006 // this case for app to guarantee which fences completed it will have to call
2007 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
2008}
2009
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002010void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002011 auto semaphore_state = GetSemaphoreState(semaphore);
2012 if (semaphore_state) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002013 for (auto &pair : queueMap) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002014 QUEUE_STATE &queue_state = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002015 uint64_t max_seq = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002016 for (const auto &submission : queue_state.submissions) {
2017 for (const auto &signal_semaphore : submission.signalSemaphores) {
2018 if (signal_semaphore.semaphore == semaphore && signal_semaphore.payload <= until_payload) {
2019 if (signal_semaphore.seq > max_seq) {
2020 max_seq = signal_semaphore.seq;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002021 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002022 }
2023 }
2024 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06002025 if (max_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002026 RetireWorkOnQueue(&queue_state, max_seq);
Tony-LunarG47d5e272020-04-07 15:35:55 -06002027 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002028 }
2029 }
2030}
2031
John Zulauff89de662020-04-13 18:57:34 -06002032void ValidationStateTracker::RecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2033 VkResult result) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002034 if (VK_SUCCESS != result) return;
2035
2036 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
2037 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
2038 }
2039}
2040
John Zulauff89de662020-04-13 18:57:34 -06002041void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2042 VkResult result) {
2043 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2044}
2045
2046void ValidationStateTracker::PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo,
2047 uint64_t timeout, VkResult result) {
2048 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2049}
2050
Adrian Coca Lorentec7d76102020-09-28 13:58:16 +02002051void ValidationStateTracker::RecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2052 VkResult result) {
2053 if (VK_SUCCESS != result) return;
2054
2055 RetireTimelineSemaphore(semaphore, *pValue);
2056}
2057
2058void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2059 VkResult result) {
2060 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2061}
2062void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2063 VkResult result) {
2064 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2065}
2066
locke-lunargd556cc32019-09-17 01:21:23 -06002067void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
2068 if (VK_SUCCESS != result) return;
2069 RetireFence(fence);
2070}
2071
2072void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
Jeremy Gebben5573a8c2021-06-04 08:55:10 -06002073 queueMap.emplace(queue, QUEUE_STATE(queue, queue_family_index));
locke-lunargd556cc32019-09-17 01:21:23 -06002074}
2075
2076void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2077 VkQueue *pQueue) {
2078 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2079}
2080
2081void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2082 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2083}
2084
2085void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2086 if (VK_SUCCESS != result) return;
2087 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002088 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002089}
2090
2091void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2092 if (VK_SUCCESS != result) return;
2093 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002094 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002095 }
2096}
2097
2098void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2099 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002100 auto fence_state = GetFenceState(fence);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002101 fence_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002102 fenceMap.erase(fence);
2103}
2104
2105void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2106 const VkAllocationCallbacks *pAllocator) {
2107 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002108 auto semaphore_state = GetSemaphoreState(semaphore);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002109 semaphore_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002110 semaphoreMap.erase(semaphore);
2111}
2112
2113void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2114 if (!event) return;
John Zulauf48057322020-12-02 11:59:31 -07002115 EVENT_STATE *event_state = Get<EVENT_STATE>(event);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002116 event_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002117 eventMap.erase(event);
2118}
2119
2120void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2121 const VkAllocationCallbacks *pAllocator) {
2122 if (!queryPool) return;
2123 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002124 qp_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002125 queryPoolMap.erase(queryPool);
2126}
2127
locke-lunargd556cc32019-09-17 01:21:23 -06002128void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2129 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2130 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002131 // Track objects tied to memory
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002132 auto mem_state = GetDevMemShared(mem);
2133 if (mem_state) {
2134 buffer_state->SetMemBinding(mem_state, memoryOffset);
2135 }
locke-lunargd556cc32019-09-17 01:21:23 -06002136 }
2137}
2138
2139void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2140 VkDeviceSize memoryOffset, VkResult result) {
2141 if (VK_SUCCESS != result) return;
2142 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2143}
2144
2145void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002146 const VkBindBufferMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06002147 for (uint32_t i = 0; i < bindInfoCount; i++) {
2148 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2149 }
2150}
2151
2152void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002153 const VkBindBufferMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06002154 for (uint32_t i = 0; i < bindInfoCount; i++) {
2155 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2156 }
2157}
2158
Spencer Fricke6c127102020-04-16 06:25:20 -07002159void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer) {
locke-lunargd556cc32019-09-17 01:21:23 -06002160 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2161 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002162 buffer_state->memory_requirements_checked = true;
2163 }
2164}
2165
2166void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2167 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002168 RecordGetBufferMemoryRequirementsState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002169}
2170
2171void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002172 const VkBufferMemoryRequirementsInfo2 *pInfo,
2173 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002174 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002175}
2176
2177void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002178 const VkBufferMemoryRequirementsInfo2 *pInfo,
2179 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002180 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002181}
2182
Spencer Fricke6c127102020-04-16 06:25:20 -07002183void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002184 const VkImagePlaneMemoryRequirementsInfo *plane_info =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002185 (pInfo == nullptr) ? nullptr : LvlFindInChain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002186 IMAGE_STATE *image_state = GetImageState(image);
2187 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002188 if (plane_info != nullptr) {
2189 // Multi-plane image
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002190 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002191 image_state->memory_requirements_checked[0] = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002192 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002193 image_state->memory_requirements_checked[1] = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002194 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002195 image_state->memory_requirements_checked[2] = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002196 }
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002197 } else if (!image_state->disjoint) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002198 // Single Plane image
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002199 image_state->memory_requirements_checked[0] = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002200 }
locke-lunargd556cc32019-09-17 01:21:23 -06002201 }
2202}
2203
2204void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2205 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002206 RecordGetImageMemoryRequirementsState(image, nullptr);
locke-lunargd556cc32019-09-17 01:21:23 -06002207}
2208
2209void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2210 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002211 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002212}
2213
2214void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2215 const VkImageMemoryRequirementsInfo2 *pInfo,
2216 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002217 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002218}
2219
2220static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2221 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2222 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2223 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2224 image_state->sparse_metadata_required = true;
2225 }
2226}
2227
2228void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2229 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2230 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2231 auto image_state = GetImageState(image);
2232 image_state->get_sparse_reqs_called = true;
2233 if (!pSparseMemoryRequirements) return;
2234 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2235 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2236 }
2237}
2238
2239void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
Mike Schuchardt2df08912020-12-15 16:28:09 -08002240 VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount,
2241 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements) {
locke-lunargd556cc32019-09-17 01:21:23 -06002242 auto image_state = GetImageState(pInfo->image);
2243 image_state->get_sparse_reqs_called = true;
2244 if (!pSparseMemoryRequirements) return;
2245 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2246 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2247 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2248 }
2249}
2250
2251void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08002252 VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount,
2253 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements) {
locke-lunargd556cc32019-09-17 01:21:23 -06002254 auto image_state = GetImageState(pInfo->image);
2255 image_state->get_sparse_reqs_called = true;
2256 if (!pSparseMemoryRequirements) return;
2257 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2258 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2259 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2260 }
2261}
2262
2263void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2264 const VkAllocationCallbacks *pAllocator) {
2265 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002266 auto shader_module_state = GetShaderModuleState(shaderModule);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002267 shader_module_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002268 shaderModuleMap.erase(shaderModule);
2269}
2270
2271void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2272 const VkAllocationCallbacks *pAllocator) {
2273 if (!pipeline) return;
2274 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
locke-lunargd556cc32019-09-17 01:21:23 -06002275 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002276 pipeline_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002277 pipelineMap.erase(pipeline);
2278}
2279
2280void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2281 const VkAllocationCallbacks *pAllocator) {
2282 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002283 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002284 pipeline_layout_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002285 pipelineLayoutMap.erase(pipelineLayout);
2286}
2287
2288void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2289 const VkAllocationCallbacks *pAllocator) {
2290 if (!sampler) return;
2291 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
locke-lunargd556cc32019-09-17 01:21:23 -06002292 // Any bound cmd buffers are now invalid
2293 if (sampler_state) {
Yuly Novikov424cdd52020-05-26 16:45:12 -04002294 if (sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2295 sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
2296 custom_border_color_sampler_count--;
2297 }
2298
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002299 sampler_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002300 }
2301 samplerMap.erase(sampler);
2302}
2303
2304void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2305 const VkAllocationCallbacks *pAllocator) {
2306 if (!descriptorSetLayout) return;
2307 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2308 if (layout_it != descriptorSetLayoutMap.end()) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002309 layout_it->second.get()->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002310 descriptorSetLayoutMap.erase(layout_it);
2311 }
2312}
2313
2314void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2315 const VkAllocationCallbacks *pAllocator) {
2316 if (!descriptorPool) return;
2317 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002318 if (desc_pool_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002319 // Free sets that were in this pool
John Zulauf79f06582021-02-27 18:38:39 -07002320 for (auto *ds : desc_pool_state->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06002321 FreeDescriptorSet(ds);
2322 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002323 desc_pool_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002324 descriptorPoolMap.erase(descriptorPool);
2325 }
2326}
2327
2328// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2329void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2330 const VkCommandBuffer *command_buffers) {
2331 for (uint32_t i = 0; i < command_buffer_count; i++) {
John Zulaufd1f85d42020-04-15 12:23:15 -06002332 // Allow any derived class to clean up command buffer state
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002333 if (command_buffer_reset_callback) {
2334 (*command_buffer_reset_callback)(command_buffers[i]);
2335 }
John Zulaufd1f85d42020-04-15 12:23:15 -06002336 if (command_buffer_free_callback) {
2337 (*command_buffer_free_callback)(command_buffers[i]);
2338 }
2339
locke-lunargd556cc32019-09-17 01:21:23 -06002340 auto cb_state = GetCBState(command_buffers[i]);
2341 // Remove references to command buffer's state and delete
2342 if (cb_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002343 // Remove the cb_state's references from COMMAND_POOL_STATEs
2344 pool_state->commandBuffers.erase(command_buffers[i]);
2345 // Remove the cb debug labels
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06002346 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer());
locke-lunargd556cc32019-09-17 01:21:23 -06002347 // Remove CBState from CB map
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002348 cb_state->Destroy();
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06002349 commandBufferMap.erase(cb_state->commandBuffer());
locke-lunargd556cc32019-09-17 01:21:23 -06002350 }
2351 }
2352}
2353
2354void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2355 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002356 auto pool = GetCommandPoolState(commandPool);
2357 FreeCommandBufferStates(pool, commandBufferCount, pCommandBuffers);
locke-lunargd556cc32019-09-17 01:21:23 -06002358}
2359
2360void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2361 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2362 VkResult result) {
2363 if (VK_SUCCESS != result) return;
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06002364 auto queue_flags = GetPhysicalDeviceState()->queue_family_properties[pCreateInfo->queueFamilyIndex].queueFlags;
2365 commandPoolMap[*pCommandPool] = std::make_shared<COMMAND_POOL_STATE>(*pCommandPool, pCreateInfo, queue_flags);
locke-lunargd556cc32019-09-17 01:21:23 -06002366}
2367
2368void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2369 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2370 VkResult result) {
2371 if (VK_SUCCESS != result) return;
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002372
2373 uint32_t index_count = 0, n_perf_pass = 0;
2374 bool has_cb = false, has_rb = false;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002375 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002376 const auto *perf = LvlFindInChain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002377 index_count = perf->counterIndexCount;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002378
Mark Lobodzinski7e948e42020-09-09 10:23:36 -06002379 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002380 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2381 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2382 switch (counter.scope) {
2383 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002384 has_cb = true;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002385 break;
2386 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002387 has_rb = true;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002388 break;
2389 default:
2390 break;
2391 }
2392 }
2393
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002394 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf, &n_perf_pass);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002395 }
2396
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002397 queryPoolMap[*pQueryPool] =
2398 std::make_shared<QUERY_POOL_STATE>(*pQueryPool, pCreateInfo, index_count, n_perf_pass, has_cb, has_rb);
locke-lunargd556cc32019-09-17 01:21:23 -06002399
2400 QueryObject query_obj{*pQueryPool, 0u};
2401 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2402 query_obj.query = i;
2403 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2404 }
2405}
2406
2407void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2408 const VkAllocationCallbacks *pAllocator) {
2409 if (!commandPool) return;
2410 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2411 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2412 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2413 if (cp_state) {
2414 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2415 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2416 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002417 cp_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002418 commandPoolMap.erase(commandPool);
2419 }
2420}
2421
2422void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2423 VkCommandPoolResetFlags flags, VkResult result) {
2424 if (VK_SUCCESS != result) return;
2425 // Reset all of the CBs allocated from this pool
2426 auto command_pool_state = GetCommandPoolState(commandPool);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002427 for (auto cmd_buffer : command_pool_state->commandBuffers) {
2428 ResetCommandBufferState(cmd_buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002429 }
2430}
2431
2432void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2433 VkResult result) {
2434 for (uint32_t i = 0; i < fenceCount; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002435 auto fence_state = GetFenceState(pFences[i]);
2436 if (fence_state) {
2437 if (fence_state->scope == kSyncScopeInternal) {
2438 fence_state->state = FENCE_UNSIGNALED;
2439 } else if (fence_state->scope == kSyncScopeExternalTemporary) {
2440 fence_state->scope = kSyncScopeInternal;
locke-lunargd556cc32019-09-17 01:21:23 -06002441 }
2442 }
2443 }
2444}
2445
locke-lunargd556cc32019-09-17 01:21:23 -06002446void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2447 const VkAllocationCallbacks *pAllocator) {
2448 if (!framebuffer) return;
2449 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002450 framebuffer_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002451 frameBufferMap.erase(framebuffer);
2452}
2453
2454void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2455 const VkAllocationCallbacks *pAllocator) {
2456 if (!renderPass) return;
2457 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002458 rp_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002459 renderPassMap.erase(renderPass);
2460}
2461
2462void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2463 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2464 if (VK_SUCCESS != result) return;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002465 fenceMap[*pFence] = std::make_shared<FENCE_STATE>(*pFence, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002466}
2467
2468bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2469 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2470 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002471 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002472 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2473 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2474 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2475 cgpl_state->pipe_state.reserve(count);
2476 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002477 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002478 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002479 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002480 }
2481 return false;
2482}
2483
2484void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2485 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2486 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2487 VkResult result, void *cgpl_state_data) {
2488 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2489 // This API may create pipelines regardless of the return value
2490 for (uint32_t i = 0; i < count; i++) {
2491 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002492 (cgpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002493 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2494 }
2495 }
2496 cgpl_state->pipe_state.clear();
2497}
2498
2499bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2500 const VkComputePipelineCreateInfo *pCreateInfos,
2501 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002502 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002503 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2504 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2505 ccpl_state->pipe_state.reserve(count);
2506 for (uint32_t i = 0; i < count; i++) {
2507 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002508 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002509 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002510 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002511 }
2512 return false;
2513}
2514
2515void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2516 const VkComputePipelineCreateInfo *pCreateInfos,
2517 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2518 VkResult result, void *ccpl_state_data) {
2519 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2520
2521 // This API may create pipelines regardless of the return value
2522 for (uint32_t i = 0; i < count; i++) {
2523 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002524 (ccpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002525 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2526 }
2527 }
2528 ccpl_state->pipe_state.clear();
2529}
2530
2531bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2532 uint32_t count,
2533 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2534 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002535 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002536 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2537 crtpl_state->pipe_state.reserve(count);
2538 for (uint32_t i = 0; i < count; i++) {
2539 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002540 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002541 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002542 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002543 }
2544 return false;
2545}
2546
2547void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2548 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2549 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2550 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2551 // This API may create pipelines regardless of the return value
2552 for (uint32_t i = 0; i < count; i++) {
2553 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002554 (crtpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002555 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2556 }
2557 }
2558 crtpl_state->pipe_state.clear();
2559}
2560
sourav parmarcd5fb182020-07-17 12:58:44 -07002561bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
2562 VkPipelineCache pipelineCache, uint32_t count,
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002563 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2564 const VkAllocationCallbacks *pAllocator,
2565 VkPipeline *pPipelines, void *crtpl_state_data) const {
2566 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2567 crtpl_state->pipe_state.reserve(count);
2568 for (uint32_t i = 0; i < count; i++) {
2569 // Create and initialize internal tracking data structure
2570 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
2571 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
2572 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
2573 }
2574 return false;
2575}
2576
sourav parmarcd5fb182020-07-17 12:58:44 -07002577void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
2578 VkPipelineCache pipelineCache, uint32_t count,
2579 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2580 const VkAllocationCallbacks *pAllocator,
2581 VkPipeline *pPipelines, VkResult result,
2582 void *crtpl_state_data) {
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002583 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2584 // This API may create pipelines regardless of the return value
2585 for (uint32_t i = 0; i < count; i++) {
2586 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002587 (crtpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002588 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2589 }
2590 }
2591 crtpl_state->pipe_state.clear();
2592}
2593
locke-lunargd556cc32019-09-17 01:21:23 -06002594void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2595 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2596 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002597 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002598 if (pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2599 pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
Tony-LunarG7337b312020-04-15 16:40:25 -06002600 custom_border_color_sampler_count++;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002601 }
locke-lunargd556cc32019-09-17 01:21:23 -06002602}
2603
2604void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2605 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2606 const VkAllocationCallbacks *pAllocator,
2607 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2608 if (VK_SUCCESS != result) return;
2609 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2610}
2611
2612// For repeatable sorting, not very useful for "memory in range" search
2613struct PushConstantRangeCompare {
2614 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2615 if (lhs->offset == rhs->offset) {
2616 if (lhs->size == rhs->size) {
2617 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2618 return lhs->stageFlags < rhs->stageFlags;
2619 }
2620 // If the offsets are the same then sorting by the end of range is useful for validation
2621 return lhs->size < rhs->size;
2622 }
2623 return lhs->offset < rhs->offset;
2624 }
2625};
2626
2627static PushConstantRangesDict push_constant_ranges_dict;
2628
2629PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2630 if (!info->pPushConstantRanges) {
2631 // Hand back the empty entry (creating as needed)...
2632 return push_constant_ranges_dict.look_up(PushConstantRanges());
2633 }
2634
2635 // Sort the input ranges to ensure equivalent ranges map to the same id
2636 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2637 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2638 sorted.insert(info->pPushConstantRanges + i);
2639 }
2640
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07002641 PushConstantRanges ranges;
2642 ranges.reserve(sorted.size());
John Zulauf79f06582021-02-27 18:38:39 -07002643 for (const auto *range : sorted) {
locke-lunargd556cc32019-09-17 01:21:23 -06002644 ranges.emplace_back(*range);
2645 }
2646 return push_constant_ranges_dict.look_up(std::move(ranges));
2647}
2648
2649// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2650static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2651
2652// Dictionary of canonical form of the "compatible for set" records
2653static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2654
2655static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2656 const PipelineLayoutSetLayoutsId set_layouts_id) {
2657 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2658}
2659
2660void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2661 const VkAllocationCallbacks *pAllocator,
2662 VkPipelineLayout *pPipelineLayout, VkResult result) {
2663 if (VK_SUCCESS != result) return;
2664
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002665 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>(*pPipelineLayout);
locke-lunargd556cc32019-09-17 01:21:23 -06002666 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2667 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2668 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002669 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002670 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2671 }
2672
2673 // Get canonical form IDs for the "compatible for set" contents
2674 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
2675 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
2676 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
2677
2678 // Create table of "compatible for set N" cannonical forms for trivial accept validation
2679 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2680 pipeline_layout_state->compat_for_set.emplace_back(
2681 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
2682 }
2683 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
2684}
2685
2686void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
2687 const VkAllocationCallbacks *pAllocator,
2688 VkDescriptorPool *pDescriptorPool, VkResult result) {
2689 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002690 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002691}
2692
2693void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2694 VkDescriptorPoolResetFlags flags, VkResult result) {
2695 if (VK_SUCCESS != result) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002696 DESCRIPTOR_POOL_STATE *pool = GetDescriptorPoolState(descriptorPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002697 // TODO: validate flags
2698 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
John Zulauf79f06582021-02-27 18:38:39 -07002699 for (auto *ds : pool->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06002700 FreeDescriptorSet(ds);
2701 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002702 pool->sets.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06002703 // Reset available count for each type and available sets for this pool
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002704 for (auto it = pool->availableDescriptorTypeCount.begin(); it != pool->availableDescriptorTypeCount.end(); ++it) {
2705 pool->availableDescriptorTypeCount[it->first] = pool->maxDescriptorTypeCount[it->first];
locke-lunargd556cc32019-09-17 01:21:23 -06002706 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002707 pool->availableSets = pool->maxSets;
locke-lunargd556cc32019-09-17 01:21:23 -06002708}
2709
2710bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
2711 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002712 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002713 // Always update common data
2714 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2715 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2716 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
2717
2718 return false;
2719}
2720
2721// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
2722void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
2723 VkDescriptorSet *pDescriptorSets, VkResult result,
2724 void *ads_state_data) {
2725 if (VK_SUCCESS != result) return;
2726 // All the updates are contained in a single cvdescriptorset function
2727 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2728 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2729 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
2730}
2731
2732void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
2733 const VkDescriptorSet *pDescriptorSets) {
2734 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
2735 // Update available descriptor sets in pool
2736 pool_state->availableSets += count;
2737
2738 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
2739 for (uint32_t i = 0; i < count; ++i) {
2740 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
2741 auto descriptor_set = setMap[pDescriptorSets[i]].get();
2742 uint32_t type_index = 0, descriptor_count = 0;
2743 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
2744 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
2745 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
2746 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
2747 }
2748 FreeDescriptorSet(descriptor_set);
2749 pool_state->sets.erase(descriptor_set);
2750 }
2751 }
2752}
2753
2754void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
2755 const VkWriteDescriptorSet *pDescriptorWrites,
2756 uint32_t descriptorCopyCount,
2757 const VkCopyDescriptorSet *pDescriptorCopies) {
2758 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
2759 pDescriptorCopies);
2760}
2761
2762void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
2763 VkCommandBuffer *pCommandBuffer, VkResult result) {
2764 if (VK_SUCCESS != result) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002765 auto pool = GetCommandPoolShared(pCreateInfo->commandPool);
2766 if (pool) {
locke-lunargd556cc32019-09-17 01:21:23 -06002767 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
2768 // Add command buffer to its commandPool map
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002769 pool->commandBuffers.insert(pCommandBuffer[i]);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002770 auto cb_state = std::make_shared<CMD_BUFFER_STATE>(pCommandBuffer[i], pCreateInfo);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002771 cb_state->command_pool = pool;
2772 cb_state->unprotected = pool->unprotected;
locke-lunargd556cc32019-09-17 01:21:23 -06002773 // Add command buffer to map
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002774 commandBufferMap[pCommandBuffer[i]] = std::move(cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002775 ResetCommandBufferState(pCommandBuffer[i]);
2776 }
2777 }
2778}
2779
locke-lunargfc78e932020-11-19 17:06:24 -07002780void UpdateSubpassAttachments(const safe_VkSubpassDescription2 &subpass, std::vector<SUBPASS_INFO> &subpasses) {
2781 for (uint32_t index = 0; index < subpass.inputAttachmentCount; ++index) {
2782 const uint32_t attachment_index = subpass.pInputAttachments[index].attachment;
2783 if (attachment_index != VK_ATTACHMENT_UNUSED) {
2784 subpasses[attachment_index].used = true;
2785 subpasses[attachment_index].usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
2786 subpasses[attachment_index].layout = subpass.pInputAttachments[index].layout;
2787 }
2788 }
2789
2790 for (uint32_t index = 0; index < subpass.colorAttachmentCount; ++index) {
2791 const uint32_t attachment_index = subpass.pColorAttachments[index].attachment;
2792 if (attachment_index != VK_ATTACHMENT_UNUSED) {
2793 subpasses[attachment_index].used = true;
2794 subpasses[attachment_index].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2795 subpasses[attachment_index].layout = subpass.pColorAttachments[index].layout;
2796 }
2797 if (subpass.pResolveAttachments) {
2798 const uint32_t attachment_index2 = subpass.pResolveAttachments[index].attachment;
2799 if (attachment_index2 != VK_ATTACHMENT_UNUSED) {
2800 subpasses[attachment_index2].used = true;
2801 subpasses[attachment_index2].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2802 subpasses[attachment_index2].layout = subpass.pResolveAttachments[index].layout;
2803 }
2804 }
2805 }
2806
2807 if (subpass.pDepthStencilAttachment) {
2808 const uint32_t attachment_index = subpass.pDepthStencilAttachment->attachment;
2809 if (attachment_index != VK_ATTACHMENT_UNUSED) {
2810 subpasses[attachment_index].used = true;
2811 subpasses[attachment_index].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
2812 subpasses[attachment_index].layout = subpass.pDepthStencilAttachment->layout;
2813 }
2814 }
2815}
2816
2817void UpdateAttachmentsView(ValidationStateTracker &tracker, CMD_BUFFER_STATE &cb_state, const FRAMEBUFFER_STATE &framebuffer,
2818 const VkRenderPassBeginInfo *pRenderPassBegin) {
2819 auto &attachments = *(cb_state.active_attachments.get());
2820 const bool imageless = (framebuffer.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) ? true : false;
2821 const VkRenderPassAttachmentBeginInfo *attachment_info_struct = nullptr;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002822 if (pRenderPassBegin) attachment_info_struct = LvlFindInChain<VkRenderPassAttachmentBeginInfo>(pRenderPassBegin->pNext);
locke-lunargfc78e932020-11-19 17:06:24 -07002823
2824 for (uint32_t i = 0; i < attachments.size(); ++i) {
2825 if (imageless) {
2826 if (attachment_info_struct && i < attachment_info_struct->attachmentCount) {
2827 auto res = cb_state.attachments_view_states.insert(
2828 tracker.GetShared<IMAGE_VIEW_STATE>(attachment_info_struct->pAttachments[i]));
2829 attachments[i] = res.first->get();
2830 }
2831 } else {
2832 auto res = cb_state.attachments_view_states.insert(framebuffer.attachments_view_state[i]);
2833 attachments[i] = res.first->get();
2834 }
2835 }
2836}
2837
locke-lunargd556cc32019-09-17 01:21:23 -06002838void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
2839 const VkCommandBufferBeginInfo *pBeginInfo) {
2840 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2841 if (!cb_state) return;
locke-lunargfc78e932020-11-19 17:06:24 -07002842
locke-lunargd556cc32019-09-17 01:21:23 -06002843 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
2844 ResetCommandBufferState(commandBuffer);
2845 }
2846 // Set updated state here in case implicit reset occurs above
2847 cb_state->state = CB_RECORDING;
2848 cb_state->beginInfo = *pBeginInfo;
Tony-LunarG3c287f62020-12-17 12:39:49 -07002849 if (cb_state->beginInfo.pInheritanceInfo && (cb_state->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY)) {
locke-lunargd556cc32019-09-17 01:21:23 -06002850 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
2851 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
2852 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
2853 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
2854 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
locke-lunargaecf2152020-05-12 17:15:41 -06002855 cb_state->activeRenderPass = GetShared<RENDER_PASS_STATE>(cb_state->beginInfo.pInheritanceInfo->renderPass);
locke-lunargd556cc32019-09-17 01:21:23 -06002856 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
locke-lunargfc78e932020-11-19 17:06:24 -07002857
locke-lunargaecf2152020-05-12 17:15:41 -06002858 if (cb_state->beginInfo.pInheritanceInfo->framebuffer) {
2859 cb_state->activeFramebuffer = GetShared<FRAMEBUFFER_STATE>(cb_state->beginInfo.pInheritanceInfo->framebuffer);
locke-lunargfc78e932020-11-19 17:06:24 -07002860 cb_state->active_subpasses = nullptr;
2861 cb_state->active_attachments = nullptr;
2862
2863 if (cb_state->activeFramebuffer) {
2864 cb_state->framebuffers.insert(cb_state->activeFramebuffer);
2865
2866 // Set cb_state->active_subpasses
2867 cb_state->active_subpasses =
2868 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
2869 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
2870 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
2871
2872 // Set cb_state->active_attachments & cb_state->attachments_view_states
2873 cb_state->active_attachments =
2874 std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
2875 UpdateAttachmentsView(*this, *cb_state, *cb_state->activeFramebuffer, nullptr);
2876
2877 // Connect this framebuffer and its children to this cmdBuffer
Jeremy Gebben5570abe2021-05-16 18:35:13 -06002878 if (!disabled[command_buffer_state]) {
2879 cb_state->AddChild(cb_state->activeFramebuffer.get());
2880 }
locke-lunargfc78e932020-11-19 17:06:24 -07002881 }
locke-lunargaecf2152020-05-12 17:15:41 -06002882 }
David Zhao Akeley44139b12021-04-26 16:16:13 -07002883
2884 // Check for VkCommandBufferInheritanceViewportScissorInfoNV (VK_NV_inherited_viewport_scissor)
2885 auto p_inherited_viewport_scissor_info =
2886 LvlFindInChain<VkCommandBufferInheritanceViewportScissorInfoNV>(cb_state->beginInfo.pInheritanceInfo->pNext);
2887 if (p_inherited_viewport_scissor_info != nullptr && p_inherited_viewport_scissor_info->viewportScissor2D) {
2888 auto pViewportDepths = p_inherited_viewport_scissor_info->pViewportDepths;
2889 cb_state->inheritedViewportDepths.assign(
2890 pViewportDepths, pViewportDepths + p_inherited_viewport_scissor_info->viewportDepthCount);
2891 }
locke-lunargd556cc32019-09-17 01:21:23 -06002892 }
2893 }
2894
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002895 auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002896 if (chained_device_group_struct) {
2897 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
2898 } else {
2899 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
2900 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002901
2902 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06002903}
2904
2905void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
2906 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2907 if (!cb_state) return;
2908 // Cached validation is specific to a specific recording of a specific command buffer.
John Zulauf79f06582021-02-27 18:38:39 -07002909 for (auto *descriptor_set : cb_state->validated_descriptor_sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06002910 descriptor_set->ClearCachedValidation(cb_state);
2911 }
2912 cb_state->validated_descriptor_sets.clear();
2913 if (VK_SUCCESS == result) {
2914 cb_state->state = CB_RECORDED;
2915 }
2916}
2917
2918void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
2919 VkResult result) {
2920 if (VK_SUCCESS == result) {
2921 ResetCommandBufferState(commandBuffer);
2922 }
2923}
2924
2925CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
2926 // initially assume everything is static state
2927 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
2928
2929 if (ds) {
2930 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
locke-lunarg4189aa22020-10-21 00:23:48 -06002931 flags &= ~ConvertToCBStatusFlagBits(ds->pDynamicStates[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002932 }
2933 }
locke-lunargd556cc32019-09-17 01:21:23 -06002934 return flags;
2935}
2936
2937// Validation cache:
2938// CV is the bottommost implementor of this extension. Don't pass calls down.
2939// utility function to set collective state for pipeline
2940void SetPipelineState(PIPELINE_STATE *pPipe) {
2941 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
2942 if (pPipe->graphicsPipelineCI.pColorBlendState) {
2943 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
2944 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
2945 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2946 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2947 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2948 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2949 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2950 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2951 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2952 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
2953 pPipe->blendConstantsEnabled = true;
2954 }
2955 }
2956 }
2957 }
sfricke-samsung8f658d42020-05-03 20:12:24 -07002958 // Check if sample location is enabled
2959 if (pPipe->graphicsPipelineCI.pMultisampleState) {
2960 const VkPipelineSampleLocationsStateCreateInfoEXT *sample_location_state =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002961 LvlFindInChain<VkPipelineSampleLocationsStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pMultisampleState->pNext);
sfricke-samsung8f658d42020-05-03 20:12:24 -07002962 if (sample_location_state != nullptr) {
2963 pPipe->sample_location_enabled = sample_location_state->sampleLocationsEnable;
2964 }
2965 }
locke-lunargd556cc32019-09-17 01:21:23 -06002966}
2967
2968void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
2969 VkPipeline pipeline) {
2970 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2971 assert(cb_state);
2972
2973 auto pipe_state = GetPipelineState(pipeline);
2974 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002975 bool rasterization_enabled = VK_FALSE == pipe_state->graphicsPipelineCI.ptr()->pRasterizationState->rasterizerDiscardEnable;
David Zhao Akeley44139b12021-04-26 16:16:13 -07002976 const auto* viewport_state = pipe_state->graphicsPipelineCI.ptr()->pViewportState;
2977 const auto* dynamic_state = pipe_state->graphicsPipelineCI.ptr()->pDynamicState;
locke-lunargd556cc32019-09-17 01:21:23 -06002978 cb_state->status &= ~cb_state->static_status;
David Zhao Akeley44139b12021-04-26 16:16:13 -07002979 cb_state->static_status = MakeStaticStateMask(dynamic_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002980 cb_state->status |= cb_state->static_status;
locke-lunarg4189aa22020-10-21 00:23:48 -06002981 cb_state->dynamic_status = CBSTATUS_ALL_STATE_SET & (~cb_state->static_status);
David Zhao Akeley44139b12021-04-26 16:16:13 -07002982
2983 // Used to calculate CMD_BUFFER_STATE::usedViewportScissorCount upon draw command with this graphics pipeline.
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002984 // If rasterization disabled (no viewport/scissors used), or the actual number of viewports/scissors is dynamic (unknown at
2985 // this time), then these are set to 0 to disable this checking.
David Zhao Akeley44139b12021-04-26 16:16:13 -07002986 auto has_dynamic_viewport_count = cb_state->dynamic_status & CBSTATUS_VIEWPORT_WITH_COUNT_SET;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002987 auto has_dynamic_scissor_count = cb_state->dynamic_status & CBSTATUS_SCISSOR_WITH_COUNT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07002988 cb_state->pipelineStaticViewportCount =
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002989 has_dynamic_viewport_count || !rasterization_enabled ? 0 : viewport_state->viewportCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07002990 cb_state->pipelineStaticScissorCount =
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002991 has_dynamic_scissor_count || !rasterization_enabled ? 0 : viewport_state->scissorCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07002992
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002993 // Trash dynamic viewport/scissor state if pipeline defines static state and enabled rasterization.
David Zhao Akeley44139b12021-04-26 16:16:13 -07002994 // akeley98 NOTE: There's a bit of an ambiguity in the spec, whether binding such a pipeline overwrites
2995 // the entire viewport (scissor) array, or only the subsection defined by the viewport (scissor) count.
2996 // I am taking the latter interpretation based on the implementation details of NVIDIA's Vulkan driver.
David Zhao Akeley44139b12021-04-26 16:16:13 -07002997 if (!has_dynamic_viewport_count) {
2998 cb_state->trashedViewportCount = true;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002999 if (rasterization_enabled && (cb_state->static_status & CBSTATUS_VIEWPORT_SET)) {
David Zhao Akeley44139b12021-04-26 16:16:13 -07003000 cb_state->trashedViewportMask |= (uint32_t(1) << viewport_state->viewportCount) - 1u;
3001 // should become = ~uint32_t(0) if the other interpretation is correct.
3002 }
3003 }
3004 if (!has_dynamic_scissor_count) {
3005 cb_state->trashedScissorCount = true;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003006 if (rasterization_enabled && (cb_state->static_status & CBSTATUS_SCISSOR_SET)) {
David Zhao Akeley44139b12021-04-26 16:16:13 -07003007 cb_state->trashedScissorMask |= (uint32_t(1) << viewport_state->scissorCount) - 1u;
3008 // should become = ~uint32_t(0) if the other interpretation is correct.
3009 }
3010 }
locke-lunargd556cc32019-09-17 01:21:23 -06003011 }
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003012 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
3013 cb_state->lastBound[lv_bind_point].pipeline_state = pipe_state;
locke-lunargd556cc32019-09-17 01:21:23 -06003014 SetPipelineState(pipe_state);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003015 if (!disabled[command_buffer_state]) {
3016 cb_state->AddChild(pipe_state);
3017 }
locke-lunargb8be8222020-10-20 00:34:37 -06003018 for (auto &slot : pipe_state->active_slots) {
3019 for (auto &req : slot.second) {
3020 for (auto &sampler : req.second.samplers_used_by_image) {
3021 for (auto &des : sampler) {
3022 des.second = nullptr;
3023 }
3024 }
3025 }
3026 }
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -06003027 cb_state->lastBound[lv_bind_point].UpdateSamplerDescriptorsUsedByImage();
locke-lunargd556cc32019-09-17 01:21:23 -06003028}
3029
3030void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3031 uint32_t viewportCount, const VkViewport *pViewports) {
3032 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07003033 uint32_t bits = ((1u << viewportCount) - 1u) << firstViewport;
3034 cb_state->viewportMask |= bits;
3035 cb_state->trashedViewportMask &= ~bits;
locke-lunargd556cc32019-09-17 01:21:23 -06003036 cb_state->status |= CBSTATUS_VIEWPORT_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003037 cb_state->static_status &= ~CBSTATUS_VIEWPORT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003038
3039 cb_state->dynamicViewports.resize(std::max(size_t(firstViewport + viewportCount), cb_state->dynamicViewports.size()));
3040 for (size_t i = 0; i < viewportCount; ++i) {
3041 cb_state->dynamicViewports[firstViewport + i] = pViewports[i];
3042 }
locke-lunargd556cc32019-09-17 01:21:23 -06003043}
3044
3045void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
3046 uint32_t exclusiveScissorCount,
3047 const VkRect2D *pExclusiveScissors) {
3048 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3049 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
3050 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
3051 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003052 cb_state->static_status &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003053}
3054
3055void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3056 VkImageLayout imageLayout) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003057 if (disabled[command_buffer_state]) return;
3058
locke-lunargd556cc32019-09-17 01:21:23 -06003059 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3060
3061 if (imageView != VK_NULL_HANDLE) {
3062 auto view_state = GetImageViewState(imageView);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003063 cb_state->AddChild(view_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003064 }
3065}
3066
3067void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3068 uint32_t viewportCount,
3069 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3070 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3071 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3072 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3073 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003074 cb_state->static_status &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003075}
3076
3077void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3078 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3079 const VkAllocationCallbacks *pAllocator,
3080 VkAccelerationStructureNV *pAccelerationStructure,
3081 VkResult result) {
3082 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003083 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003084
3085 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003086 auto as_memory_requirements_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003087 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06003088 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure();
locke-lunargd556cc32019-09-17 01:21:23 -06003089 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3090
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003091 auto scratch_memory_req_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003092 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06003093 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure();
locke-lunargd556cc32019-09-17 01:21:23 -06003094 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3095 &as_state->build_scratch_memory_requirements);
3096
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003097 auto update_memory_req_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003098 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06003099 update_memory_req_info.accelerationStructure = as_state->acceleration_structure();
locke-lunargd556cc32019-09-17 01:21:23 -06003100 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3101 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003102 as_state->allocator = pAllocator;
locke-lunargd556cc32019-09-17 01:21:23 -06003103 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3104}
3105
Jeff Bolz95176d02020-04-01 00:36:16 -05003106void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3107 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3108 const VkAllocationCallbacks *pAllocator,
3109 VkAccelerationStructureKHR *pAccelerationStructure,
3110 VkResult result) {
3111 if (VK_SUCCESS != result) return;
sourav parmarcd5fb182020-07-17 12:58:44 -07003112 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE_KHR>(*pAccelerationStructure, pCreateInfo);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003113 as_state->allocator = pAllocator;
sourav parmarcd5fb182020-07-17 12:58:44 -07003114 accelerationStructureMap_khr[*pAccelerationStructure] = std::move(as_state);
Jeff Bolz95176d02020-04-01 00:36:16 -05003115}
3116
sourav parmarcd5fb182020-07-17 12:58:44 -07003117void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructuresKHR(
3118 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
3119 const VkAccelerationStructureBuildRangeInfoKHR *const *ppBuildRangeInfos) {
3120 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3121 if (cb_state == nullptr) {
3122 return;
3123 }
3124 for (uint32_t i = 0; i < infoCount; ++i) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003125 auto *dst_as_state = GetAccelerationStructureStateKHR(pInfos[i].dstAccelerationStructure);
sourav parmarcd5fb182020-07-17 12:58:44 -07003126 if (dst_as_state != nullptr) {
3127 dst_as_state->built = true;
3128 dst_as_state->build_info_khr.initialize(&pInfos[i]);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003129 if (!disabled[command_buffer_state]) {
3130 cb_state->AddChild(dst_as_state);
3131 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003132 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003133 if (!disabled[command_buffer_state]) {
3134 auto *src_as_state = GetAccelerationStructureStateKHR(pInfos[i].srcAccelerationStructure);
3135 if (src_as_state != nullptr) {
3136 cb_state->AddChild(src_as_state);
3137 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003138 }
3139 }
3140 cb_state->hasBuildAccelerationStructureCmd = true;
3141}
3142
3143void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructuresIndirectKHR(
3144 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
3145 const VkDeviceAddress *pIndirectDeviceAddresses, const uint32_t *pIndirectStrides,
3146 const uint32_t *const *ppMaxPrimitiveCounts) {
3147 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3148 if (cb_state == nullptr) {
3149 return;
3150 }
3151 for (uint32_t i = 0; i < infoCount; ++i) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003152 auto *dst_as_state = GetAccelerationStructureStateKHR(pInfos[i].dstAccelerationStructure);
sourav parmarcd5fb182020-07-17 12:58:44 -07003153 if (dst_as_state != nullptr) {
3154 dst_as_state->built = true;
3155 dst_as_state->build_info_khr.initialize(&pInfos[i]);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003156 if (!disabled[command_buffer_state]) {
3157 cb_state->AddChild(dst_as_state);
3158 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003159 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003160 if (!disabled[command_buffer_state]) {
3161 auto *src_as_state = GetAccelerationStructureStateKHR(pInfos[i].srcAccelerationStructure);
3162 if (src_as_state != nullptr) {
3163 cb_state->AddChild(src_as_state);
3164 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003165 }
3166 }
3167 cb_state->hasBuildAccelerationStructureCmd = true;
3168}
locke-lunargd556cc32019-09-17 01:21:23 -06003169void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
Mike Schuchardt2df08912020-12-15 16:28:09 -08003170 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2 *pMemoryRequirements) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003171 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureStateNV(pInfo->accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003172 if (as_state != nullptr) {
3173 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
3174 as_state->memory_requirements = *pMemoryRequirements;
3175 as_state->memory_requirements_checked = true;
3176 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
3177 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
3178 as_state->build_scratch_memory_requirements_checked = true;
3179 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
3180 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
3181 as_state->update_scratch_memory_requirements_checked = true;
3182 }
3183 }
3184}
3185
sourav parmarcd5fb182020-07-17 12:58:44 -07003186void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
3187 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06003188 if (VK_SUCCESS != result) return;
3189 for (uint32_t i = 0; i < bindInfoCount; i++) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003190 const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06003191
sourav parmarcd5fb182020-07-17 12:58:44 -07003192 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureStateNV(info.accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003193 if (as_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06003194 // Track objects tied to memory
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003195 auto mem_state = GetDevMemShared(info.memory);
3196 if (mem_state) {
3197 as_state->SetMemBinding(mem_state, info.memoryOffset);
3198 }
locke-lunargd556cc32019-09-17 01:21:23 -06003199
3200 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05003201 // XXX TODO: Query device address for KHR extension
sourav parmarcd5fb182020-07-17 12:58:44 -07003202 if (enabled[gpu_validation]) {
locke-lunargd556cc32019-09-17 01:21:23 -06003203 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
3204 }
3205 }
3206 }
3207}
3208
3209void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
3210 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
3211 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
3212 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3213 if (cb_state == nullptr) {
3214 return;
3215 }
3216
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003217 auto *dst_as_state = GetAccelerationStructureStateNV(dst);
locke-lunargd556cc32019-09-17 01:21:23 -06003218 if (dst_as_state != nullptr) {
3219 dst_as_state->built = true;
3220 dst_as_state->build_info.initialize(pInfo);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003221 if (!disabled[command_buffer_state]) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003222 cb_state->AddChild(dst_as_state);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003223 }
locke-lunargd556cc32019-09-17 01:21:23 -06003224 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003225 if (!disabled[command_buffer_state]) {
3226 auto *src_as_state = GetAccelerationStructureStateNV(src);
3227 if (src_as_state != nullptr) {
3228 cb_state->AddChild(src_as_state);
3229 }
locke-lunargd556cc32019-09-17 01:21:23 -06003230 }
3231 cb_state->hasBuildAccelerationStructureCmd = true;
3232}
3233
3234void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
3235 VkAccelerationStructureNV dst,
3236 VkAccelerationStructureNV src,
3237 VkCopyAccelerationStructureModeNV mode) {
3238 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3239 if (cb_state) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003240 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureStateNV(src);
3241 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureStateNV(dst);
locke-lunargd556cc32019-09-17 01:21:23 -06003242 if (dst_as_state != nullptr && src_as_state != nullptr) {
3243 dst_as_state->built = true;
3244 dst_as_state->build_info = src_as_state->build_info;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003245 if (!disabled[command_buffer_state]) {
3246 cb_state->AddChild(dst_as_state);
3247 cb_state->AddChild(src_as_state);
3248 }
locke-lunargd556cc32019-09-17 01:21:23 -06003249 }
3250 }
3251}
3252
Jeff Bolz95176d02020-04-01 00:36:16 -05003253void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
3254 VkAccelerationStructureKHR accelerationStructure,
3255 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06003256 if (!accelerationStructure) return;
sourav parmarcd5fb182020-07-17 12:58:44 -07003257 auto *as_state = GetAccelerationStructureStateKHR(accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003258 if (as_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003259 as_state->Destroy();
sourav parmarcd5fb182020-07-17 12:58:44 -07003260 accelerationStructureMap_khr.erase(accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003261 }
3262}
3263
Jeff Bolz95176d02020-04-01 00:36:16 -05003264void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
3265 VkAccelerationStructureNV accelerationStructure,
3266 const VkAllocationCallbacks *pAllocator) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003267 if (!accelerationStructure) return;
3268 auto *as_state = GetAccelerationStructureStateNV(accelerationStructure);
3269 if (as_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003270 as_state->Destroy();
sourav parmarcd5fb182020-07-17 12:58:44 -07003271 accelerationStructureMap.erase(accelerationStructure);
3272 }
Jeff Bolz95176d02020-04-01 00:36:16 -05003273}
3274
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003275void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3276 uint32_t viewportCount,
3277 const VkViewportWScalingNV *pViewportWScalings) {
3278 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3279 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003280 cb_state->static_status &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003281}
3282
locke-lunargd556cc32019-09-17 01:21:23 -06003283void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
3284 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3285 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003286 cb_state->static_status &= ~CBSTATUS_LINE_WIDTH_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003287}
3288
3289void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
3290 uint16_t lineStipplePattern) {
3291 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3292 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003293 cb_state->static_status &= ~CBSTATUS_LINE_STIPPLE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003294}
3295
3296void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
3297 float depthBiasClamp, float depthBiasSlopeFactor) {
3298 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3299 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003300 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003301}
3302
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003303void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
3304 const VkRect2D *pScissors) {
3305 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07003306 uint32_t bits = ((1u << scissorCount) - 1u) << firstScissor;
3307 cb_state->scissorMask |= bits;
3308 cb_state->trashedScissorMask &= ~bits;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003309 cb_state->status |= CBSTATUS_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003310 cb_state->static_status &= ~CBSTATUS_SCISSOR_SET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003311}
3312
locke-lunargd556cc32019-09-17 01:21:23 -06003313void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
3314 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3315 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003316 cb_state->static_status &= ~CBSTATUS_BLEND_CONSTANTS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003317}
3318
3319void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
3320 float maxDepthBounds) {
3321 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3322 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003323 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003324}
3325
3326void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3327 uint32_t compareMask) {
3328 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3329 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003330 cb_state->static_status &= ~CBSTATUS_STENCIL_READ_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003331}
3332
3333void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3334 uint32_t writeMask) {
3335 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3336 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003337 cb_state->static_status &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003338}
3339
3340void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3341 uint32_t reference) {
3342 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3343 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003344 cb_state->static_status &= ~CBSTATUS_STENCIL_REFERENCE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003345}
3346
3347// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
3348// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
3349// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
3350void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
3351 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
3352 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
3353 cvdescriptorset::DescriptorSet *push_descriptor_set,
3354 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
3355 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
3356 // Defensive
3357 assert(pipeline_layout);
3358 if (!pipeline_layout) return;
3359
3360 uint32_t required_size = first_set + set_count;
3361 const uint32_t last_binding_index = required_size - 1;
3362 assert(last_binding_index < pipeline_layout->compat_for_set.size());
3363
3364 // Some useful shorthand
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003365 const auto lv_bind_point = ConvertToLvlBindPoint(pipeline_bind_point);
3366 auto &last_bound = cb_state->lastBound[lv_bind_point];
locke-lunargd556cc32019-09-17 01:21:23 -06003367 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
3368 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
3369
3370 // We need this three times in this function, but nowhere else
3371 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
3372 if (ds && ds->IsPushDescriptor()) {
3373 assert(ds == last_bound.push_descriptor_set.get());
3374 last_bound.push_descriptor_set = nullptr;
3375 return true;
3376 }
3377 return false;
3378 };
3379
3380 // Clean up the "disturbed" before and after the range to be set
3381 if (required_size < current_size) {
3382 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
3383 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
3384 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
3385 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
3386 }
3387 } else {
3388 // We're not disturbing past last, so leave the upper binding data alone.
3389 required_size = current_size;
3390 }
3391 }
3392
3393 // We resize if we need more set entries or if those past "last" are disturbed
3394 if (required_size != current_size) {
3395 last_bound.per_set.resize(required_size);
3396 }
3397
3398 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
3399 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
3400 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
3401 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3402 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
3403 last_bound.per_set[set_idx].dynamicOffsets.clear();
3404 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
3405 }
3406 }
3407
3408 // Now update the bound sets with the input sets
3409 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
3410 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
3411 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
3412 cvdescriptorset::DescriptorSet *descriptor_set =
3413 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
3414
3415 // Record binding (or push)
3416 if (descriptor_set != last_bound.push_descriptor_set.get()) {
3417 // Only cleanup the push descriptors if they aren't the currently used set.
3418 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3419 }
3420 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
3421 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
3422
3423 if (descriptor_set) {
3424 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
3425 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
3426 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
3427 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
3428 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
3429 input_dynamic_offsets = end_offset;
3430 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
3431 } else {
3432 last_bound.per_set[set_idx].dynamicOffsets.clear();
3433 }
3434 if (!descriptor_set->IsPushDescriptor()) {
3435 // Can't cache validation of push_descriptors
3436 cb_state->validated_descriptor_sets.insert(descriptor_set);
3437 }
3438 }
3439 }
3440}
3441
3442// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
3443void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
3444 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3445 uint32_t firstSet, uint32_t setCount,
3446 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
3447 const uint32_t *pDynamicOffsets) {
3448 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3449 auto pipeline_layout = GetPipelineLayout(layout);
3450
3451 // Resize binding arrays
3452 uint32_t last_set_index = firstSet + setCount - 1;
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003453 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
3454 if (last_set_index >= cb_state->lastBound[lv_bind_point].per_set.size()) {
3455 cb_state->lastBound[lv_bind_point].per_set.resize(last_set_index + 1);
locke-lunargd556cc32019-09-17 01:21:23 -06003456 }
3457
3458 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
3459 dynamicOffsetCount, pDynamicOffsets);
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003460 cb_state->lastBound[lv_bind_point].pipeline_layout = layout;
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -06003461 cb_state->lastBound[lv_bind_point].UpdateSamplerDescriptorsUsedByImage();
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003462}
3463
locke-lunargd556cc32019-09-17 01:21:23 -06003464void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
3465 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
3466 const VkWriteDescriptorSet *pDescriptorWrites) {
3467 const auto &pipeline_layout = GetPipelineLayout(layout);
3468 // Short circuit invalid updates
3469 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003470 !pipeline_layout->set_layouts[set]->IsPushDescriptor()) {
locke-lunargd556cc32019-09-17 01:21:23 -06003471 return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003472 }
locke-lunargd556cc32019-09-17 01:21:23 -06003473
3474 // We need a descriptor set to update the bindings with, compatible with the passed layout
Jeremy Gebben50fb1832021-03-19 09:10:13 -06003475 const auto& dsl = pipeline_layout->set_layouts[set];
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003476 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
3477 auto &last_bound = cb_state->lastBound[lv_bind_point];
locke-lunargd556cc32019-09-17 01:21:23 -06003478 auto &push_descriptor_set = last_bound.push_descriptor_set;
3479 // If we are disturbing the current push_desriptor_set clear it
3480 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003481 last_bound.UnbindAndResetPushDescriptorSet(cb_state, new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003482 }
3483
3484 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3485 nullptr);
3486 last_bound.pipeline_layout = layout;
3487
3488 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003489 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003490}
3491
3492void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3493 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3494 uint32_t set, uint32_t descriptorWriteCount,
3495 const VkWriteDescriptorSet *pDescriptorWrites) {
3496 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3497 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3498}
3499
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003500void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3501 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3502 const void *pValues) {
3503 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3504 if (cb_state != nullptr) {
3505 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3506
3507 auto &push_constant_data = cb_state->push_constant_data;
3508 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3509 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
locke-lunargde3f0fa2020-09-10 11:55:31 -06003510 cb_state->push_constant_pipeline_layout_set = layout;
3511
3512 auto flags = stageFlags;
3513 uint32_t bit_shift = 0;
3514 while (flags) {
3515 if (flags & 1) {
3516 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
3517 const auto it = cb_state->push_constant_data_update.find(flag);
3518
3519 if (it != cb_state->push_constant_data_update.end()) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06003520 std::memset(it->second.data() + offset, PC_Byte_Updated, static_cast<std::size_t>(size));
locke-lunargde3f0fa2020-09-10 11:55:31 -06003521 }
3522 }
3523 flags = flags >> 1;
3524 ++bit_shift;
3525 }
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003526 }
3527}
3528
locke-lunargd556cc32019-09-17 01:21:23 -06003529void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3530 VkIndexType indexType) {
locke-lunargd556cc32019-09-17 01:21:23 -06003531 auto cb_state = GetCBState(commandBuffer);
3532
3533 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
Piers Daniell39842ee2020-07-10 16:42:33 -06003534 cb_state->static_status &= ~CBSTATUS_INDEX_BUFFER_BOUND;
locke-lunarg1ae57d62020-11-18 10:49:19 -07003535 cb_state->index_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(buffer);
3536 cb_state->index_buffer_binding.size = cb_state->index_buffer_binding.buffer_state->createInfo.size;
locke-lunargd556cc32019-09-17 01:21:23 -06003537 cb_state->index_buffer_binding.offset = offset;
3538 cb_state->index_buffer_binding.index_type = indexType;
3539 // Add binding for this index buffer to this commandbuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003540 if (!disabled[command_buffer_state]) {
3541 cb_state->AddChild(cb_state->index_buffer_binding.buffer_state.get());
3542 }
locke-lunargd556cc32019-09-17 01:21:23 -06003543}
3544
3545void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3546 uint32_t bindingCount, const VkBuffer *pBuffers,
3547 const VkDeviceSize *pOffsets) {
3548 auto cb_state = GetCBState(commandBuffer);
3549
3550 uint32_t end = firstBinding + bindingCount;
3551 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3552 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3553 }
3554
3555 for (uint32_t i = 0; i < bindingCount; ++i) {
3556 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07003557 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003558 vertex_buffer_binding.offset = pOffsets[i];
Piers Daniell39842ee2020-07-10 16:42:33 -06003559 vertex_buffer_binding.size = VK_WHOLE_SIZE;
3560 vertex_buffer_binding.stride = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06003561 // Add binding for this vertex buffer to this commandbuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003562 if (pBuffers[i] && !disabled[command_buffer_state]) {
3563 cb_state->AddChild(vertex_buffer_binding.buffer_state.get());
Jeff Bolz165818a2020-05-08 11:19:03 -05003564 }
locke-lunargd556cc32019-09-17 01:21:23 -06003565 }
3566}
3567
3568void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3569 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003570 if (disabled[command_buffer_state]) return;
3571
locke-lunargd556cc32019-09-17 01:21:23 -06003572 auto cb_state = GetCBState(commandBuffer);
3573 auto dst_buffer_state = GetBufferState(dstBuffer);
3574
3575 // Update bindings between buffer and cmd buffer
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003576 if (cb_state && dst_buffer_state) {
3577 cb_state->AddChild(dst_buffer_state);
3578 }
locke-lunargd556cc32019-09-17 01:21:23 -06003579}
3580
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06003581static bool SetEventStageMask(VkEvent event, VkPipelineStageFlags2KHR stageMask,
Jeff Bolz310775c2019-10-09 00:46:33 -05003582 EventToStageMap *localEventToStageMap) {
3583 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003584 return false;
3585}
3586
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003587void ValidationStateTracker::RecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask) {
locke-lunargd556cc32019-09-17 01:21:23 -06003588 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003589 if (!disabled[command_buffer_state]) {
3590 auto event_state = GetEventState(event);
3591 if (event_state) {
3592 cb_state->AddChild(event_state);
3593 }
locke-lunargd556cc32019-09-17 01:21:23 -06003594 }
3595 cb_state->events.push_back(event);
3596 if (!cb_state->waitedEvents.count(event)) {
3597 cb_state->writeEventsBeforeWait.push_back(event);
3598 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003599 cb_state->eventUpdates.emplace_back(
3600 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3601 return SetEventStageMask(event, stageMask, localEventToStageMap);
3602 });
locke-lunargd556cc32019-09-17 01:21:23 -06003603}
3604
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003605void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3606 VkPipelineStageFlags stageMask) {
3607 RecordCmdSetEvent(commandBuffer, event, stageMask);
3608}
3609
3610void ValidationStateTracker::PreCallRecordCmdSetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event,
3611 const VkDependencyInfoKHR *pDependencyInfo) {
3612 auto stage_masks = sync_utils::GetGlobalStageMasks(*pDependencyInfo);
3613
3614 RecordCmdSetEvent(commandBuffer, event, stage_masks.src);
Jeremy Gebben79649152021-06-22 14:46:24 -06003615
3616 RecordBarriers(commandBuffer, pDependencyInfo);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003617}
3618
3619void ValidationStateTracker::RecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3620 VkPipelineStageFlags2KHR stageMask) {
locke-lunargd556cc32019-09-17 01:21:23 -06003621 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003622 if (!disabled[command_buffer_state]) {
3623 auto event_state = GetEventState(event);
3624 if (event_state) {
3625 cb_state->AddChild(event_state);
3626 }
locke-lunargd556cc32019-09-17 01:21:23 -06003627 }
3628 cb_state->events.push_back(event);
3629 if (!cb_state->waitedEvents.count(event)) {
3630 cb_state->writeEventsBeforeWait.push_back(event);
3631 }
3632
3633 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003634 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003635 return SetEventStageMask(event, VkPipelineStageFlags2KHR(0), localEventToStageMap);
Jeff Bolz310775c2019-10-09 00:46:33 -05003636 });
locke-lunargd556cc32019-09-17 01:21:23 -06003637}
3638
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003639void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3640 VkPipelineStageFlags stageMask) {
3641 RecordCmdResetEvent(commandBuffer, event, stageMask);
3642}
3643
3644void ValidationStateTracker::PreCallRecordCmdResetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event,
3645 VkPipelineStageFlags2KHR stageMask) {
3646 RecordCmdResetEvent(commandBuffer, event, stageMask);
3647}
3648
3649void ValidationStateTracker::RecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents) {
locke-lunargd556cc32019-09-17 01:21:23 -06003650 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3651 for (uint32_t i = 0; i < eventCount; ++i) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003652 if (!disabled[command_buffer_state]) {
3653 auto event_state = GetEventState(pEvents[i]);
3654 if (event_state) {
3655 cb_state->AddChild(event_state);
3656 }
locke-lunargd556cc32019-09-17 01:21:23 -06003657 }
3658 cb_state->waitedEvents.insert(pEvents[i]);
3659 cb_state->events.push_back(pEvents[i]);
3660 }
3661}
3662
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003663void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3664 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3665 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3666 uint32_t bufferMemoryBarrierCount,
3667 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3668 uint32_t imageMemoryBarrierCount,
3669 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3670 RecordCmdWaitEvents(commandBuffer, eventCount, pEvents);
Jeremy Gebben79649152021-06-22 14:46:24 -06003671 RecordBarriers(commandBuffer, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers,
3672 imageMemoryBarrierCount, pImageMemoryBarriers);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003673}
3674
3675void ValidationStateTracker::PreCallRecordCmdWaitEvents2KHR(VkCommandBuffer commandBuffer, uint32_t eventCount,
3676 const VkEvent *pEvents, const VkDependencyInfoKHR *pDependencyInfos) {
3677 RecordCmdWaitEvents(commandBuffer, eventCount, pEvents);
Jeremy Gebben79649152021-06-22 14:46:24 -06003678 for (uint32_t i = 0; i < eventCount; i++) {
3679 RecordBarriers(commandBuffer, &pDependencyInfos[i]);
3680 }
3681}
3682
3683void ValidationStateTracker::PostCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
3684 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
3685 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3686 uint32_t bufferMemoryBarrierCount,
3687 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3688 uint32_t imageMemoryBarrierCount,
3689 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3690 RecordBarriers(commandBuffer, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers,
3691 imageMemoryBarrierCount, pImageMemoryBarriers);
3692}
3693
3694void ValidationStateTracker::PreCallRecordCmdPipelineBarrier2KHR(VkCommandBuffer commandBuffer,
3695 const VkDependencyInfoKHR *pDependencyInfo) {
3696 RecordBarriers(commandBuffer, pDependencyInfo);
3697}
3698
3699void ValidationStateTracker::RecordBarriers(VkCommandBuffer commandBuffer, uint32_t memoryBarrierCount,
3700 const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount,
3701 const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount,
3702 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3703 if (disabled[command_buffer_state]) return;
3704
3705 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3706 for (uint32_t i = 0; i < bufferMemoryBarrierCount; i++) {
3707 auto buffer_state = GetBufferState(pBufferMemoryBarriers[i].buffer);
3708 if (buffer_state) {
3709 cb_state->AddChild(buffer_state);
3710 }
3711 }
3712 for (uint32_t i = 0; i < imageMemoryBarrierCount; i++) {
3713 auto image_state = GetImageState(pImageMemoryBarriers[i].image);
3714 if (image_state) {
3715 cb_state->AddChild(image_state);
3716 }
3717 }
3718}
3719
3720void ValidationStateTracker::RecordBarriers(VkCommandBuffer commandBuffer, const VkDependencyInfoKHR *pDependencyInfo) {
3721 if (disabled[command_buffer_state]) return;
3722
3723 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3724 for (uint32_t i = 0; i < pDependencyInfo->bufferMemoryBarrierCount; i++) {
3725 auto buffer_state = GetBufferState(pDependencyInfo->pBufferMemoryBarriers[i].buffer);
3726 if (buffer_state) {
3727 cb_state->AddChild(buffer_state);
3728 }
3729 }
3730 for (uint32_t i = 0; i < pDependencyInfo->imageMemoryBarrierCount; i++) {
3731 auto image_state = GetImageState(pDependencyInfo->pImageMemoryBarriers[i].image);
3732 if (image_state) {
3733 cb_state->AddChild(image_state);
3734 }
3735 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003736}
3737
Jeff Bolz310775c2019-10-09 00:46:33 -05003738bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
3739 (*localQueryToStateMap)[object] = value;
3740 return false;
3741}
3742
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003743bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
3744 QueryState value, QueryMap *localQueryToStateMap) {
Jeff Bolz310775c2019-10-09 00:46:33 -05003745 for (uint32_t i = 0; i < queryCount; i++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003746 QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
Jeff Bolz310775c2019-10-09 00:46:33 -05003747 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06003748 }
3749 return false;
3750}
3751
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003752QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex,
3753 uint32_t perfPass) const {
3754 QueryObject query = QueryObject(QueryObject(queryPool, queryIndex), perfPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003755
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003756 auto iter = localQueryToStateMap->find(query);
3757 if (iter != localQueryToStateMap->end()) return iter->second;
Jeff Bolz310775c2019-10-09 00:46:33 -05003758
Jeff Bolz310775c2019-10-09 00:46:33 -05003759 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06003760}
3761
3762void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003763 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003764 cb_state->activeQueries.insert(query_obj);
3765 cb_state->startedQueries.insert(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003766 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3767 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3768 QueryMap *localQueryToStateMap) {
3769 SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
3770 return false;
3771 });
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003772 if (!disabled[command_buffer_state]) {
3773 auto pool_state = GetQueryPoolState(query_obj.pool);
3774 cb_state->AddChild(pool_state);
3775 }
locke-lunargd556cc32019-09-17 01:21:23 -06003776}
3777
3778void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3779 VkFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003780 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003781 QueryObject query = {queryPool, slot};
3782 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3783 RecordCmdBeginQuery(cb_state, query);
3784}
3785
3786void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003787 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003788 cb_state->activeQueries.erase(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003789 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3790 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3791 QueryMap *localQueryToStateMap) {
3792 return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3793 });
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003794 if (!disabled[command_buffer_state]) {
3795 auto pool_state = GetQueryPoolState(query_obj.pool);
3796 cb_state->AddChild(pool_state);
3797 }
locke-lunargd556cc32019-09-17 01:21:23 -06003798}
3799
3800void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003801 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003802 QueryObject query_obj = {queryPool, slot};
3803 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3804 RecordCmdEndQuery(cb_state, query_obj);
3805}
3806
3807void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3808 uint32_t firstQuery, uint32_t queryCount) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003809 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003810 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3811
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02003812 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
3813 QueryObject query = {queryPool, slot};
3814 cb_state->resetQueries.insert(query);
3815 }
3816
Jeff Bolz310775c2019-10-09 00:46:33 -05003817 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003818 bool do_validate, VkQueryPool &firstPerfQueryPool,
3819 uint32_t perfQueryPass,
3820 QueryMap *localQueryToStateMap) {
3821 return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06003822 });
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003823 if (!disabled[command_buffer_state]) {
3824 auto pool_state = GetQueryPoolState(queryPool);
3825 cb_state->AddChild(pool_state);
3826 }
locke-lunargd556cc32019-09-17 01:21:23 -06003827}
3828
3829void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3830 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3831 VkDeviceSize dstOffset, VkDeviceSize stride,
3832 VkQueryResultFlags flags) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003833 if (disabled[query_validation] || disabled[command_buffer_state]) return;
3834
locke-lunargd556cc32019-09-17 01:21:23 -06003835 auto cb_state = GetCBState(commandBuffer);
3836 auto dst_buff_state = GetBufferState(dstBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003837 cb_state->AddChild(dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003838 auto pool_state = GetQueryPoolState(queryPool);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003839 cb_state->AddChild(pool_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003840}
3841
3842void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3843 VkQueryPool queryPool, uint32_t slot) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003844 PostCallRecordCmdWriteTimestamp2KHR(commandBuffer, pipelineStage, queryPool, slot);
3845}
3846
3847void ValidationStateTracker::PostCallRecordCmdWriteTimestamp2KHR(VkCommandBuffer commandBuffer,
3848 VkPipelineStageFlags2KHR pipelineStage, VkQueryPool queryPool,
3849 uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003850 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003851 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003852 if (!disabled[command_buffer_state]) {
3853 auto pool_state = GetQueryPoolState(queryPool);
3854 cb_state->AddChild(pool_state);
3855 }
locke-lunargd556cc32019-09-17 01:21:23 -06003856 QueryObject query = {queryPool, slot};
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003857 cb_state->queryUpdates.emplace_back([query](const ValidationStateTracker *device_data, bool do_validate,
3858 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3859 QueryMap *localQueryToStateMap) {
3860 return SetQueryState(QueryObject(query, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3861 });
locke-lunargd556cc32019-09-17 01:21:23 -06003862}
3863
Marijn Suijten6750fdc2020-12-30 22:06:42 +01003864void ValidationStateTracker::PostCallRecordCmdWriteAccelerationStructuresPropertiesKHR(
3865 VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR *pAccelerationStructures,
3866 VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) {
3867 if (disabled[query_validation]) return;
3868 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003869 if (!disabled[command_buffer_state]) {
3870 auto pool_state = GetQueryPoolState(queryPool);
3871 cb_state->AddChild(pool_state);
3872 }
Marijn Suijten6750fdc2020-12-30 22:06:42 +01003873 cb_state->queryUpdates.emplace_back(
3874 [queryPool, firstQuery, accelerationStructureCount](const ValidationStateTracker *device_data, bool do_validate,
3875 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3876 QueryMap *localQueryToStateMap) {
3877 return SetQueryStateMulti(queryPool, firstQuery, accelerationStructureCount, perfQueryPass, QUERYSTATE_ENDED,
3878 localQueryToStateMap);
3879 });
3880}
3881
locke-lunargd556cc32019-09-17 01:21:23 -06003882void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3883 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3884 VkResult result) {
3885 if (VK_SUCCESS != result) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003886
Jeremy Gebben88f58142021-06-01 10:07:52 -06003887 std::vector<std::shared_ptr<IMAGE_VIEW_STATE>> views;
Mike Schuchardt2df08912020-12-15 16:28:09 -08003888 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) == 0) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003889 views.resize(pCreateInfo->attachmentCount);
locke-lunarg1ae57d62020-11-18 10:49:19 -07003890
locke-lunargd556cc32019-09-17 01:21:23 -06003891 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003892 views[i] = GetShared<IMAGE_VIEW_STATE>(pCreateInfo->pAttachments[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003893 }
3894 }
Jeremy Gebben88f58142021-06-01 10:07:52 -06003895
3896 frameBufferMap[*pFramebuffer] = std::make_shared<FRAMEBUFFER_STATE>(
3897 *pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass), std::move(views));
locke-lunargd556cc32019-09-17 01:21:23 -06003898}
3899
locke-lunargd556cc32019-09-17 01:21:23 -06003900void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
3901 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3902 VkResult result) {
3903 if (VK_SUCCESS != result) return;
Jeremy Gebben88f58142021-06-01 10:07:52 -06003904 renderPassMap[*pRenderPass] = std::make_shared<RENDER_PASS_STATE>(*pRenderPass, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003905}
3906
Mike Schuchardt2df08912020-12-15 16:28:09 -08003907void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07003908 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3909 VkResult result) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003910 if (VK_SUCCESS != result) return;
3911
3912 renderPassMap[*pRenderPass] = std::make_shared<RENDER_PASS_STATE>(*pRenderPass, pCreateInfo);
Tony-LunarG977448c2019-12-02 14:52:02 -07003913}
3914
Mike Schuchardt2df08912020-12-15 16:28:09 -08003915void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07003916 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3917 VkResult result) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003918 if (VK_SUCCESS != result) return;
3919
3920 renderPassMap[*pRenderPass] = std::make_shared<RENDER_PASS_STATE>(*pRenderPass, pCreateInfo);
Tony-LunarG977448c2019-12-02 14:52:02 -07003921}
3922
locke-lunargd556cc32019-09-17 01:21:23 -06003923void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
3924 const VkRenderPassBeginInfo *pRenderPassBegin,
3925 const VkSubpassContents contents) {
3926 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunargaecf2152020-05-12 17:15:41 -06003927 auto render_pass_state = pRenderPassBegin ? GetShared<RENDER_PASS_STATE>(pRenderPassBegin->renderPass) : nullptr;
3928 auto framebuffer = pRenderPassBegin ? GetShared<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer) : nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06003929
3930 if (render_pass_state) {
locke-lunargaecf2152020-05-12 17:15:41 -06003931 cb_state->activeFramebuffer = framebuffer;
locke-lunargd556cc32019-09-17 01:21:23 -06003932 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07003933 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06003934 cb_state->activeSubpass = 0;
3935 cb_state->activeSubpassContents = contents;
locke-lunargfc78e932020-11-19 17:06:24 -07003936
locke-lunargd556cc32019-09-17 01:21:23 -06003937 // Connect this RP to cmdBuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003938 if (!disabled[command_buffer_state]) {
3939 cb_state->AddChild(render_pass_state.get());
3940 }
locke-lunargd556cc32019-09-17 01:21:23 -06003941
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07003942 auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06003943 if (chained_device_group_struct) {
3944 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
3945 } else {
3946 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
3947 }
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003948
locke-lunargfc78e932020-11-19 17:06:24 -07003949 cb_state->active_subpasses = nullptr;
3950 cb_state->active_attachments = nullptr;
3951
3952 if (framebuffer) {
3953 cb_state->framebuffers.insert(framebuffer);
3954
3955 // Set cb_state->active_subpasses
3956 cb_state->active_subpasses =
3957 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
3958 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
3959 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
3960
3961 // Set cb_state->active_attachments & cb_state->attachments_view_states
3962 cb_state->active_attachments =
3963 std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(framebuffer->createInfo.attachmentCount);
3964 UpdateAttachmentsView(*this, *cb_state, *cb_state->activeFramebuffer, pRenderPassBegin);
3965
3966 // Connect this framebuffer and its children to this cmdBuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003967 cb_state->AddChild(framebuffer.get());
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003968 }
locke-lunargd556cc32019-09-17 01:21:23 -06003969 }
3970}
3971
3972void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
3973 const VkRenderPassBeginInfo *pRenderPassBegin,
3974 VkSubpassContents contents) {
3975 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
3976}
3977
3978void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
3979 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08003980 const VkSubpassBeginInfo *pSubpassBeginInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06003981 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3982}
3983
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06003984void ValidationStateTracker::PostCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
3985 uint32_t counterBufferCount,
3986 const VkBuffer *pCounterBuffers,
3987 const VkDeviceSize *pCounterBufferOffsets) {
3988 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3989
3990 cb_state->transform_feedback_active = true;
3991}
3992
3993void ValidationStateTracker::PostCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
3994 uint32_t counterBufferCount, const VkBuffer *pCounterBuffers,
3995 const VkDeviceSize *pCounterBufferOffsets) {
3996 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3997
3998 cb_state->transform_feedback_active = false;
3999}
4000
Tony-LunarG977448c2019-12-02 14:52:02 -07004001void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
4002 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004003 const VkSubpassBeginInfo *pSubpassBeginInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004004 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4005}
4006
locke-lunargd556cc32019-09-17 01:21:23 -06004007void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4008 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4009 cb_state->activeSubpass++;
4010 cb_state->activeSubpassContents = contents;
locke-lunargfc78e932020-11-19 17:06:24 -07004011
4012 // Update cb_state->active_subpasses
4013 if (cb_state->activeRenderPass && cb_state->activeFramebuffer) {
4014 cb_state->active_subpasses = nullptr;
4015 cb_state->active_subpasses =
4016 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
4017
4018 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
4019 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
4020 }
locke-lunargd556cc32019-09-17 01:21:23 -06004021}
4022
4023void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4024 RecordCmdNextSubpass(commandBuffer, contents);
4025}
4026
4027void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004028 const VkSubpassBeginInfo *pSubpassBeginInfo,
4029 const VkSubpassEndInfo *pSubpassEndInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06004030 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4031}
4032
Tony-LunarG977448c2019-12-02 14:52:02 -07004033void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004034 const VkSubpassBeginInfo *pSubpassBeginInfo,
4035 const VkSubpassEndInfo *pSubpassEndInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004036 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4037}
4038
locke-lunargd556cc32019-09-17 01:21:23 -06004039void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
4040 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4041 cb_state->activeRenderPass = nullptr;
locke-lunargfc78e932020-11-19 17:06:24 -07004042 cb_state->active_attachments = nullptr;
4043 cb_state->active_subpasses = nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004044 cb_state->activeSubpass = 0;
4045 cb_state->activeFramebuffer = VK_NULL_HANDLE;
4046}
4047
4048void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
4049 RecordCmdEndRenderPassState(commandBuffer);
4050}
4051
4052void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004053 const VkSubpassEndInfo *pSubpassEndInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06004054 RecordCmdEndRenderPassState(commandBuffer);
4055}
4056
Tony-LunarG977448c2019-12-02 14:52:02 -07004057void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004058 const VkSubpassEndInfo *pSubpassEndInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004059 RecordCmdEndRenderPassState(commandBuffer);
4060}
locke-lunargd556cc32019-09-17 01:21:23 -06004061void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
4062 const VkCommandBuffer *pCommandBuffers) {
4063 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4064
4065 CMD_BUFFER_STATE *sub_cb_state = NULL;
4066 for (uint32_t i = 0; i < commandBuffersCount; i++) {
4067 sub_cb_state = GetCBState(pCommandBuffers[i]);
4068 assert(sub_cb_state);
4069 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
4070 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
4071 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
4072 // from the validation step to the recording step
4073 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
4074 }
4075 }
4076
4077 // Propagate inital layout and current layout state to the primary cmd buffer
4078 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
4079 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
4080 // for those other classes.
4081 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
4082 const auto image = sub_layout_map_entry.first;
4083 const auto *image_state = GetImageState(image);
4084 if (!image_state) continue; // Can't set layouts of a dead image
4085
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06004086 auto *cb_subres_map = cb_state->GetImageSubresourceLayoutMap(*image_state);
John Zulauf17708d02021-02-22 11:20:58 -07004087 const auto *sub_cb_subres_map = &sub_layout_map_entry.second;
locke-lunargd556cc32019-09-17 01:21:23 -06004088 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
4089 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
4090 }
4091
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06004092 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer();
locke-lunargd556cc32019-09-17 01:21:23 -06004093 cb_state->linkedCommandBuffers.insert(sub_cb_state);
Jeremy Gebben5570abe2021-05-16 18:35:13 -06004094 cb_state->AddChild(sub_cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004095 for (auto &function : sub_cb_state->queryUpdates) {
4096 cb_state->queryUpdates.push_back(function);
4097 }
4098 for (auto &function : sub_cb_state->queue_submit_functions) {
4099 cb_state->queue_submit_functions.push_back(function);
4100 }
David Zhao Akeley44139b12021-04-26 16:16:13 -07004101
4102 // State is trashed after executing secondary command buffers.
4103 // Importantly, this function runs after CoreChecks::PreCallValidateCmdExecuteCommands.
4104 cb_state->trashedViewportMask = ~uint32_t(0);
4105 cb_state->trashedScissorMask = ~uint32_t(0);
4106 cb_state->trashedViewportCount = true;
4107 cb_state->trashedScissorCount = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004108 }
4109}
4110
4111void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
4112 VkFlags flags, void **ppData, VkResult result) {
4113 if (VK_SUCCESS != result) return;
4114 RecordMappedMemory(mem, offset, size, ppData);
4115}
4116
4117void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
4118 auto mem_info = GetDevMemState(mem);
4119 if (mem_info) {
4120 mem_info->mapped_range = MemRange();
4121 mem_info->p_driver_data = nullptr;
4122 }
4123}
4124
4125void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
4126 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
4127 if (image_state) {
locke-lunargae26eac2020-04-16 15:29:05 -06004128 // An Android sepcial image cannot get VkSubresourceLayout until the image binds a memory.
4129 // See: VUID-vkGetImageSubresourceLayout-image-01895
4130 image_state->fragment_encoder =
4131 std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(new subresource_adapter::ImageRangeEncoder(*image_state));
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07004132 const auto swapchain_info = LvlFindInChain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06004133 if (swapchain_info) {
John Zulauf29d00532021-03-04 13:28:54 -07004134 auto *swapchain = GetSwapchainState(swapchain_info->swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004135 if (swapchain) {
John Zulaufd13b38e2021-03-05 08:17:38 -07004136 SWAPCHAIN_IMAGE &swap_image = swapchain->images[swapchain_info->imageIndex];
John Zulauf29d00532021-03-04 13:28:54 -07004137 if (swap_image.bound_images.empty()) {
4138 // If this is the first "binding" of an image to this swapchain index, get a fake allocation
4139 image_state->swapchain_fake_address = fake_memory.Alloc(image_state->fragment_encoder->TotalSize());
4140 } else {
4141 image_state->swapchain_fake_address = (*swap_image.bound_images.cbegin())->swapchain_fake_address;
4142 }
John Zulaufd13b38e2021-03-05 08:17:38 -07004143 swap_image.bound_images.emplace(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004144 image_state->bind_swapchain = swapchain_info->swapchain;
4145 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
John Zulaufd13b38e2021-03-05 08:17:38 -07004146
John Zulauf29d00532021-03-04 13:28:54 -07004147 // All images bound to this swapchain and index are aliases
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06004148 for (auto *other_image : swap_image.bound_images) {
4149 image_state->AddAliasingImage(other_image);
4150 }
locke-lunargd556cc32019-09-17 01:21:23 -06004151 }
4152 } else {
4153 // Track bound memory range information
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004154 auto mem_info = GetDevMemShared(bindInfo.memory);
locke-lunargd556cc32019-09-17 01:21:23 -06004155 if (mem_info) {
John Zulaufd13b38e2021-03-05 08:17:38 -07004156 if (image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06004157 for (auto *base_node : mem_info->ObjectBindings()) {
4158 if (base_node->Handle().type == kVulkanObjectTypeImage) {
4159 auto other_image = static_cast<IMAGE_STATE *>(base_node);
4160 image_state->AddAliasingImage(other_image);
4161 }
4162 }
John Zulaufd13b38e2021-03-05 08:17:38 -07004163 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004164 // Track objects tied to memory
4165 image_state->SetMemBinding(mem_info, bindInfo.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06004166 }
locke-lunargd556cc32019-09-17 01:21:23 -06004167 }
locke-lunargd556cc32019-09-17 01:21:23 -06004168 }
4169}
4170
4171void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
4172 VkDeviceSize memoryOffset, VkResult result) {
4173 if (VK_SUCCESS != result) return;
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06004174 auto bind_info = LvlInitStruct<VkBindImageMemoryInfo>();
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004175 bind_info.image = image;
4176 bind_info.memory = mem;
4177 bind_info.memoryOffset = memoryOffset;
4178 UpdateBindImageMemoryState(bind_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004179}
4180
4181void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004182 const VkBindImageMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004183 if (VK_SUCCESS != result) return;
4184 for (uint32_t i = 0; i < bindInfoCount; i++) {
4185 UpdateBindImageMemoryState(pBindInfos[i]);
4186 }
4187}
4188
4189void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004190 const VkBindImageMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004191 if (VK_SUCCESS != result) return;
4192 for (uint32_t i = 0; i < bindInfoCount; i++) {
4193 UpdateBindImageMemoryState(pBindInfos[i]);
4194 }
4195}
4196
4197void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
4198 auto event_state = GetEventState(event);
4199 if (event_state) {
4200 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
4201 }
locke-lunargd556cc32019-09-17 01:21:23 -06004202}
4203
4204void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
4205 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
4206 VkResult result) {
4207 if (VK_SUCCESS != result) return;
4208 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
4209 pImportSemaphoreFdInfo->flags);
4210}
4211
4212void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004213 VkExternalSemaphoreHandleTypeFlagBits handle_type) {
locke-lunargd556cc32019-09-17 01:21:23 -06004214 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
Mike Schuchardt2df08912020-12-15 16:28:09 -08004215 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT) {
locke-lunargd556cc32019-09-17 01:21:23 -06004216 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
4217 semaphore_state->scope = kSyncScopeExternalPermanent;
4218 }
4219}
4220
4221#ifdef VK_USE_PLATFORM_WIN32_KHR
4222void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
4223 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
4224 if (VK_SUCCESS != result) return;
4225 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
4226 pImportSemaphoreWin32HandleInfo->flags);
4227}
4228
4229void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
4230 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4231 HANDLE *pHandle, VkResult result) {
4232 if (VK_SUCCESS != result) return;
4233 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
4234}
4235
4236void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
4237 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
4238 if (VK_SUCCESS != result) return;
4239 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
4240 pImportFenceWin32HandleInfo->flags);
4241}
4242
4243void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
4244 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4245 HANDLE *pHandle, VkResult result) {
4246 if (VK_SUCCESS != result) return;
4247 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
4248}
4249#endif
4250
4251void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
4252 VkResult result) {
4253 if (VK_SUCCESS != result) return;
4254 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
4255}
4256
Mike Schuchardt2df08912020-12-15 16:28:09 -08004257void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBits handle_type,
4258 VkFenceImportFlags flags) {
locke-lunargd556cc32019-09-17 01:21:23 -06004259 FENCE_STATE *fence_node = GetFenceState(fence);
4260 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08004261 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT || flags & VK_FENCE_IMPORT_TEMPORARY_BIT) &&
locke-lunargd556cc32019-09-17 01:21:23 -06004262 fence_node->scope == kSyncScopeInternal) {
4263 fence_node->scope = kSyncScopeExternalTemporary;
4264 } else {
4265 fence_node->scope = kSyncScopeExternalPermanent;
4266 }
4267 }
4268}
4269
4270void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
4271 VkResult result) {
4272 if (VK_SUCCESS != result) return;
4273 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
4274}
4275
Mike Schuchardt2df08912020-12-15 16:28:09 -08004276void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBits handle_type) {
locke-lunargd556cc32019-09-17 01:21:23 -06004277 FENCE_STATE *fence_state = GetFenceState(fence);
4278 if (fence_state) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08004279 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT) {
locke-lunargd556cc32019-09-17 01:21:23 -06004280 // Export with reference transference becomes external
4281 fence_state->scope = kSyncScopeExternalPermanent;
4282 } else if (fence_state->scope == kSyncScopeInternal) {
4283 // Export with copy transference has a side effect of resetting the fence
4284 fence_state->state = FENCE_UNSIGNALED;
4285 }
4286 }
4287}
4288
4289void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
4290 VkResult result) {
4291 if (VK_SUCCESS != result) return;
4292 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
4293}
4294
4295void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
4296 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
4297 if (VK_SUCCESS != result) return;
John Zulaufd5115702021-01-18 12:34:33 -07004298 const auto event = *pEvent;
Jeremy Gebbencbf22862021-03-03 12:01:22 -07004299 eventMap.emplace(event, std::make_shared<EVENT_STATE>(event, pCreateInfo->flags));
locke-lunargd556cc32019-09-17 01:21:23 -06004300}
4301
4302void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
4303 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
4304 SWAPCHAIN_NODE *old_swapchain_state) {
4305 if (VK_SUCCESS == result) {
Nathaniel Cesario39152e62021-07-02 13:04:16 -06004306 auto swapchain_state = CreateSwapchainState(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004307 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
4308 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
4309 swapchain_state->shared_presentable = true;
4310 }
4311 surface_state->swapchain = swapchain_state.get();
4312 swapchainMap[*pSwapchain] = std::move(swapchain_state);
4313 } else {
4314 surface_state->swapchain = nullptr;
4315 }
4316 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
4317 if (old_swapchain_state) {
4318 old_swapchain_state->retired = true;
4319 }
4320 return;
4321}
4322
4323void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
4324 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
4325 VkResult result) {
4326 auto surface_state = GetSurfaceState(pCreateInfo->surface);
4327 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
4328 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
4329}
4330
4331void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
4332 const VkAllocationCallbacks *pAllocator) {
4333 if (!swapchain) return;
4334 auto swapchain_data = GetSwapchainState(swapchain);
4335 if (swapchain_data) {
John Zulauffaa7a522021-03-05 12:22:45 -07004336 for (auto &swapchain_image : swapchain_data->images) {
4337 // TODO: missing validation that the bound images are empty (except for image_state above)
4338 // Clean up the aliases and the bound_images *before* erasing the image_state.
Jeremy Gebben5570abe2021-05-16 18:35:13 -06004339 RemoveAliasingImages(swapchain_image.bound_images);
John Zulauffaa7a522021-03-05 12:22:45 -07004340 swapchain_image.bound_images.clear();
4341
4342 if (swapchain_image.image_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004343 swapchain_image.image_state->Destroy();
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06004344 imageMap.erase(swapchain_image.image_state->image());
John Zulauffaa7a522021-03-05 12:22:45 -07004345 swapchain_image.image_state = nullptr;
John Zulauf2d60a452021-03-04 15:12:03 -07004346 }
locke-lunargd556cc32019-09-17 01:21:23 -06004347 }
4348
4349 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
4350 if (surface_state) {
4351 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
4352 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004353 swapchain_data->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06004354 swapchainMap.erase(swapchain);
4355 }
4356}
4357
sfricke-samsung5c1b7392020-12-13 22:17:15 -08004358void ValidationStateTracker::PostCallRecordCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
4359 const VkDisplayModeCreateInfoKHR *pCreateInfo,
4360 const VkAllocationCallbacks *pAllocator, VkDisplayModeKHR *pMode,
4361 VkResult result) {
4362 if (VK_SUCCESS != result) return;
4363 if (!pMode) return;
Jeremy Gebben5573a8c2021-06-04 08:55:10 -06004364 display_mode_map[*pMode] = std::make_shared<DISPLAY_MODE_STATE>(*pMode, physicalDevice);
sfricke-samsung5c1b7392020-12-13 22:17:15 -08004365}
4366
locke-lunargd556cc32019-09-17 01:21:23 -06004367void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
4368 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
4369 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004370 auto semaphore_state = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
4371 if (semaphore_state) {
4372 semaphore_state->signaler.first = VK_NULL_HANDLE;
4373 semaphore_state->signaled = false;
locke-lunargd556cc32019-09-17 01:21:23 -06004374 }
4375 }
4376
4377 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
4378 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
4379 // confused itself just as much.
4380 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
4381 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
4382 // Mark the image as having been released to the WSI
4383 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
4384 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
John Zulauffaa7a522021-03-05 12:22:45 -07004385 IMAGE_STATE *image_state = swapchain_data->images[pPresentInfo->pImageIndices[i]].image_state;
locke-lunargd556cc32019-09-17 01:21:23 -06004386 if (image_state) {
4387 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004388 if (image_state->shared_presentable) {
4389 image_state->layout_locked = true;
4390 }
locke-lunargd556cc32019-09-17 01:21:23 -06004391 }
4392 }
4393 }
4394 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
4395 // its semaphore waits) /never/ participate in any completion proof.
4396}
4397
4398void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
4399 const VkSwapchainCreateInfoKHR *pCreateInfos,
4400 const VkAllocationCallbacks *pAllocator,
4401 VkSwapchainKHR *pSwapchains, VkResult result) {
4402 if (pCreateInfos) {
4403 for (uint32_t i = 0; i < swapchainCount; i++) {
4404 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
4405 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
4406 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
4407 }
4408 }
4409}
4410
4411void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4412 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004413 auto fence_state = GetFenceState(fence);
4414 if (fence_state && fence_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06004415 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
4416 // import
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004417 fence_state->state = FENCE_INFLIGHT;
4418 fence_state->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
locke-lunargd556cc32019-09-17 01:21:23 -06004419 }
4420
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004421 auto semaphore_state = GetSemaphoreState(semaphore);
4422 if (semaphore_state && semaphore_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06004423 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
4424 // temporary import
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004425 semaphore_state->signaled = true;
4426 semaphore_state->signaler.first = VK_NULL_HANDLE;
locke-lunargd556cc32019-09-17 01:21:23 -06004427 }
4428
4429 // Mark the image as acquired.
4430 auto swapchain_data = GetSwapchainState(swapchain);
4431 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
John Zulauffaa7a522021-03-05 12:22:45 -07004432 IMAGE_STATE *image_state = swapchain_data->images[*pImageIndex].image_state;
locke-lunargd556cc32019-09-17 01:21:23 -06004433 if (image_state) {
4434 image_state->acquired = true;
4435 image_state->shared_presentable = swapchain_data->shared_presentable;
4436 }
4437 }
4438}
4439
4440void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4441 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
4442 VkResult result) {
4443 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4444 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
4445}
4446
4447void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
4448 uint32_t *pImageIndex, VkResult result) {
4449 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4450 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
4451 pAcquireInfo->fence, pImageIndex);
4452}
4453
4454void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
4455 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
4456 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
4457 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
4458 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
4459 phys_device_state.phys_device = pPhysicalDevices[i];
4460 // Init actual features for each physical device
4461 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
4462 }
4463 }
4464}
4465
4466// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
4467static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004468 VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004469 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
4470
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004471 if (pQueueFamilyProperties) { // Save queue family properties
locke-lunargd556cc32019-09-17 01:21:23 -06004472 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
4473 for (uint32_t i = 0; i < count; ++i) {
4474 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
4475 }
4476 }
4477}
4478
4479void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
4480 uint32_t *pQueueFamilyPropertyCount,
4481 VkQueueFamilyProperties *pQueueFamilyProperties) {
4482 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4483 assert(physical_device_state);
Mike Schuchardt2df08912020-12-15 16:28:09 -08004484 VkQueueFamilyProperties2 *pqfp = nullptr;
4485 std::vector<VkQueueFamilyProperties2> qfp;
locke-lunargd556cc32019-09-17 01:21:23 -06004486 qfp.resize(*pQueueFamilyPropertyCount);
4487 if (pQueueFamilyProperties) {
4488 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06004489 qfp[i] = LvlInitStruct<VkQueueFamilyProperties2>();
locke-lunargd556cc32019-09-17 01:21:23 -06004490 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
4491 }
4492 pqfp = qfp.data();
4493 }
4494 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
4495}
4496
4497void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004498 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004499 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4500 assert(physical_device_state);
4501 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4502 pQueueFamilyProperties);
4503}
4504
4505void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004506 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004507 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4508 assert(physical_device_state);
4509 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4510 pQueueFamilyProperties);
4511}
4512void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
4513 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004514 if (!surface) return;
4515 auto surface_state = GetSurfaceState(surface);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004516 surface_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06004517 surface_map.erase(surface);
4518}
4519
4520void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004521 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06004522}
4523
4524void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
4525 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
4526 const VkAllocationCallbacks *pAllocator,
4527 VkSurfaceKHR *pSurface, VkResult result) {
4528 if (VK_SUCCESS != result) return;
4529 RecordVulkanSurface(pSurface);
4530}
4531
4532#ifdef VK_USE_PLATFORM_ANDROID_KHR
4533void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
4534 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
4535 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4536 VkResult result) {
4537 if (VK_SUCCESS != result) return;
4538 RecordVulkanSurface(pSurface);
4539}
4540#endif // VK_USE_PLATFORM_ANDROID_KHR
4541
4542#ifdef VK_USE_PLATFORM_IOS_MVK
4543void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
4544 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4545 VkResult result) {
4546 if (VK_SUCCESS != result) return;
4547 RecordVulkanSurface(pSurface);
4548}
4549#endif // VK_USE_PLATFORM_IOS_MVK
4550
4551#ifdef VK_USE_PLATFORM_MACOS_MVK
4552void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
4553 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
4554 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4555 VkResult result) {
4556 if (VK_SUCCESS != result) return;
4557 RecordVulkanSurface(pSurface);
4558}
4559#endif // VK_USE_PLATFORM_MACOS_MVK
4560
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07004561#ifdef VK_USE_PLATFORM_METAL_EXT
4562void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
4563 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
4564 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4565 VkResult result) {
4566 if (VK_SUCCESS != result) return;
4567 RecordVulkanSurface(pSurface);
4568}
4569#endif // VK_USE_PLATFORM_METAL_EXT
4570
locke-lunargd556cc32019-09-17 01:21:23 -06004571#ifdef VK_USE_PLATFORM_WAYLAND_KHR
4572void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
4573 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
4574 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4575 VkResult result) {
4576 if (VK_SUCCESS != result) return;
4577 RecordVulkanSurface(pSurface);
4578}
4579#endif // VK_USE_PLATFORM_WAYLAND_KHR
4580
4581#ifdef VK_USE_PLATFORM_WIN32_KHR
4582void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
4583 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
4584 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4585 VkResult result) {
4586 if (VK_SUCCESS != result) return;
4587 RecordVulkanSurface(pSurface);
4588}
4589#endif // VK_USE_PLATFORM_WIN32_KHR
4590
4591#ifdef VK_USE_PLATFORM_XCB_KHR
4592void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
4593 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4594 VkResult result) {
4595 if (VK_SUCCESS != result) return;
4596 RecordVulkanSurface(pSurface);
4597}
4598#endif // VK_USE_PLATFORM_XCB_KHR
4599
4600#ifdef VK_USE_PLATFORM_XLIB_KHR
4601void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
4602 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4603 VkResult result) {
4604 if (VK_SUCCESS != result) return;
4605 RecordVulkanSurface(pSurface);
4606}
4607#endif // VK_USE_PLATFORM_XLIB_KHR
4608
Niklas Haas8b84af12020-04-19 22:20:11 +02004609void ValidationStateTracker::PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance,
4610 const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
4611 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4612 VkResult result) {
4613 if (VK_SUCCESS != result) return;
4614 RecordVulkanSurface(pSurface);
4615}
4616
Cort23cf2282019-09-20 18:58:18 +02004617void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004618 VkPhysicalDeviceFeatures *pFeatures) {
4619 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Yilong Li358152a2020-07-08 02:16:45 -07004620 // Reset the features2 safe struct before setting up the features field.
4621 physical_device_state->features2 = safe_VkPhysicalDeviceFeatures2();
Cortffba2642019-09-20 22:09:41 +02004622 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02004623}
4624
4625void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004626 VkPhysicalDeviceFeatures2 *pFeatures) {
4627 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02004628 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004629}
4630
4631void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004632 VkPhysicalDeviceFeatures2 *pFeatures) {
4633 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02004634 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004635}
4636
locke-lunargd556cc32019-09-17 01:21:23 -06004637void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
4638 VkSurfaceKHR surface,
4639 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
4640 VkResult result) {
4641 if (VK_SUCCESS != result) return;
4642 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004643 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004644
4645 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
4646 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004647}
4648
4649void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
4650 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4651 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
4652 if (VK_SUCCESS != result) return;
4653 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004654 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004655
4656 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
4657 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004658}
4659
4660void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
4661 VkSurfaceKHR surface,
4662 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
4663 VkResult result) {
4664 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004665 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
4666 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
4667 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
4668 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
4669 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
4670 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
4671 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
4672 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
4673 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
4674 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004675
4676 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
4677 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004678}
4679
4680void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
4681 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
4682 VkBool32 *pSupported, VkResult result) {
4683 if (VK_SUCCESS != result) return;
4684 auto surface_state = GetSurfaceState(surface);
4685 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
4686}
4687
4688void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
4689 VkSurfaceKHR surface,
4690 uint32_t *pPresentModeCount,
4691 VkPresentModeKHR *pPresentModes,
4692 VkResult result) {
4693 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4694
4695 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
4696 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004697
4698 if (*pPresentModeCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004699 if (*pPresentModeCount > physical_device_state->present_modes.size()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004700 physical_device_state->present_modes.resize(*pPresentModeCount);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004701 }
locke-lunargd556cc32019-09-17 01:21:23 -06004702 }
4703 if (pPresentModes) {
locke-lunargd556cc32019-09-17 01:21:23 -06004704 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
4705 physical_device_state->present_modes[i] = pPresentModes[i];
4706 }
4707 }
4708}
4709
4710void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
4711 uint32_t *pSurfaceFormatCount,
4712 VkSurfaceFormatKHR *pSurfaceFormats,
4713 VkResult result) {
4714 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4715
4716 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004717
4718 if (*pSurfaceFormatCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004719 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004720 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004721 }
locke-lunargd556cc32019-09-17 01:21:23 -06004722 }
4723 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06004724 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4725 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
4726 }
4727 }
4728}
4729
4730void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
4731 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4732 uint32_t *pSurfaceFormatCount,
4733 VkSurfaceFormat2KHR *pSurfaceFormats,
4734 VkResult result) {
4735 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4736
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004737 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004738 if (*pSurfaceFormatCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004739 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size()) {
4740 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
4741 }
locke-lunargd556cc32019-09-17 01:21:23 -06004742 }
4743 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06004744 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004745 physical_device_state->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
locke-lunargd556cc32019-09-17 01:21:23 -06004746 }
4747 }
4748}
4749
4750void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4751 const VkDebugUtilsLabelEXT *pLabelInfo) {
4752 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4753}
4754
4755void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
4756 EndCmdDebugUtilsLabel(report_data, commandBuffer);
4757}
4758
4759void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4760 const VkDebugUtilsLabelEXT *pLabelInfo) {
4761 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4762
4763 // Squirrel away an easily accessible copy.
4764 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4765 cb_state->debug_label = LoggingLabel(pLabelInfo);
4766}
4767
4768void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004769 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004770 if (NULL != pPhysicalDeviceGroupProperties) {
4771 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
4772 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
4773 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
4774 auto &phys_device_state = physical_device_map[cur_phys_dev];
4775 phys_device_state.phys_device = cur_phys_dev;
4776 // Init actual features for each physical device
4777 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
4778 }
4779 }
4780 }
4781}
4782
4783void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004784 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties,
locke-lunargd556cc32019-09-17 01:21:23 -06004785 VkResult result) {
4786 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4787 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4788}
4789
4790void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004791 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties,
locke-lunargd556cc32019-09-17 01:21:23 -06004792 VkResult result) {
4793 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4794 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4795}
4796
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004797void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
4798 uint32_t queueFamilyIndex,
4799 uint32_t *pCounterCount,
4800 VkPerformanceCounterKHR *pCounters) {
4801 if (NULL == pCounters) return;
4802
4803 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4804 assert(physical_device_state);
4805
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004806 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queue_family_counters(new QUEUE_FAMILY_PERF_COUNTERS());
4807 queue_family_counters->counters.resize(*pCounterCount);
4808 for (uint32_t i = 0; i < *pCounterCount; i++) queue_family_counters->counters[i] = pCounters[i];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004809
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004810 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queue_family_counters);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004811}
4812
4813void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
4814 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
4815 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
4816 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4817 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
4818}
4819
4820void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
4821 VkResult result) {
4822 if (result == VK_SUCCESS) performance_lock_acquired = true;
4823}
4824
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004825void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
4826 performance_lock_acquired = false;
4827 for (auto &cmd_buffer : commandBufferMap) {
4828 cmd_buffer.second->performance_lock_released = true;
4829 }
4830}
4831
locke-lunargd556cc32019-09-17 01:21:23 -06004832void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004833 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004834 const VkAllocationCallbacks *pAllocator) {
4835 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004836 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4837 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004838 desc_template_map.erase(descriptorUpdateTemplate);
4839}
4840
4841void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004842 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004843 const VkAllocationCallbacks *pAllocator) {
4844 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004845 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4846 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004847 desc_template_map.erase(descriptorUpdateTemplate);
4848}
4849
Mike Schuchardt2df08912020-12-15 16:28:09 -08004850void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
4851 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate) {
locke-lunargd556cc32019-09-17 01:21:23 -06004852 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004853 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004854 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
4855}
4856
Mike Schuchardt2df08912020-12-15 16:28:09 -08004857void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(VkDevice device,
4858 const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
4859 const VkAllocationCallbacks *pAllocator,
4860 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate,
4861 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004862 if (VK_SUCCESS != result) return;
4863 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4864}
4865
4866void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004867 VkDevice device, const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4868 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004869 if (VK_SUCCESS != result) return;
4870 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4871}
4872
4873void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004874 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004875 const void *pData) {
4876 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
4877 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
4878 assert(0);
4879 } else {
4880 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
4881 // TODO: Record template push descriptor updates
4882 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
4883 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
4884 }
4885 }
4886}
4887
4888void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
4889 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4890 const void *pData) {
4891 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4892}
4893
4894void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004895 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004896 const void *pData) {
4897 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4898}
4899
Mike Schuchardt2df08912020-12-15 16:28:09 -08004900void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
4901 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4902 VkPipelineLayout layout, uint32_t set,
4903 const void *pData) {
locke-lunargd556cc32019-09-17 01:21:23 -06004904 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4905
4906 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4907 if (template_state) {
4908 auto layout_data = GetPipelineLayout(layout);
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -06004909 auto dsl = layout_data ? layout_data->GetDsl(set) : nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004910 const auto &template_ci = template_state->create_info;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004911 if (dsl && !dsl->Destroyed()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004912 // Decode the template into a set of write updates
4913 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
4914 dsl->GetDescriptorSetLayout());
4915 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
4916 static_cast<uint32_t>(decoded_template.desc_writes.size()),
4917 decoded_template.desc_writes.data());
4918 }
4919 }
4920}
4921
4922void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
4923 uint32_t *pPropertyCount, void *pProperties) {
4924 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4925 if (*pPropertyCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06004926 physical_device_state->display_plane_property_count = *pPropertyCount;
4927 }
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004928 if (*pPropertyCount || pProperties) {
4929 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004930 }
4931}
4932
4933void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
4934 uint32_t *pPropertyCount,
4935 VkDisplayPlanePropertiesKHR *pProperties,
4936 VkResult result) {
4937 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4938 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4939}
4940
4941void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
4942 uint32_t *pPropertyCount,
4943 VkDisplayPlaneProperties2KHR *pProperties,
4944 VkResult result) {
4945 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4946 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4947}
4948
4949void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4950 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
4951 QueryObject query_obj = {queryPool, query, index};
4952 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4953 RecordCmdBeginQuery(cb_state, query_obj);
4954}
4955
4956void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4957 uint32_t query, uint32_t index) {
4958 QueryObject query_obj = {queryPool, query, index};
4959 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4960 RecordCmdEndQuery(cb_state, query_obj);
4961}
4962
4963void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
4964 VkSamplerYcbcrConversion ycbcr_conversion) {
Jeremy Gebbenf4fb2a02021-07-08 09:57:46 -06004965 VkFormatFeatureFlags format_features = 0;
4966
4967 if (create_info->format != VK_FORMAT_UNDEFINED) {
4968 format_features = GetPotentialFormatFeatures(create_info->format);
4969 } else if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4970 // If format is VK_FORMAT_UNDEFINED, format_features will be set by external AHB features
4971 format_features = GetExternalFormatFeaturesANDROID(create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004972 }
Jeremy Gebbenf4fb2a02021-07-08 09:57:46 -06004973
4974 samplerYcbcrConversionMap[ycbcr_conversion] =
4975 std::make_shared<SAMPLER_YCBCR_CONVERSION_STATE>(ycbcr_conversion, create_info, format_features);
locke-lunargd556cc32019-09-17 01:21:23 -06004976}
4977
4978void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
4979 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4980 const VkAllocationCallbacks *pAllocator,
4981 VkSamplerYcbcrConversion *pYcbcrConversion,
4982 VkResult result) {
4983 if (VK_SUCCESS != result) return;
4984 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4985}
4986
4987void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
4988 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4989 const VkAllocationCallbacks *pAllocator,
4990 VkSamplerYcbcrConversion *pYcbcrConversion,
4991 VkResult result) {
4992 if (VK_SUCCESS != result) return;
4993 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4994}
4995
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004996void ValidationStateTracker::RecordDestroySamplerYcbcrConversionState(VkSamplerYcbcrConversion ycbcr_conversion) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004997 auto ycbcr_state = GetSamplerYcbcrConversionState(ycbcr_conversion);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004998 ycbcr_state->Destroy();
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004999 samplerYcbcrConversionMap.erase(ycbcr_conversion);
5000}
5001
locke-lunargd556cc32019-09-17 01:21:23 -06005002void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
5003 const VkAllocationCallbacks *pAllocator) {
5004 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005005 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005006}
5007
5008void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
5009 VkSamplerYcbcrConversion ycbcrConversion,
5010 const VkAllocationCallbacks *pAllocator) {
5011 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005012 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005013}
5014
Tony-LunarG977448c2019-12-02 14:52:02 -07005015void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5016 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005017 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07005018 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06005019
5020 // Do nothing if the query pool has been destroyed.
5021 auto query_pool_state = GetQueryPoolState(queryPool);
5022 if (!query_pool_state) return;
5023
5024 // Reset the state of existing entries.
5025 QueryObject query_obj{queryPool, 0};
5026 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
5027 for (uint32_t i = 0; i < max_query_count; ++i) {
5028 query_obj.query = firstQuery + i;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005029 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005030 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005031 for (uint32_t pass_index = 0; pass_index < query_pool_state->n_performance_passes; pass_index++) {
5032 query_obj.perf_pass = pass_index;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005033 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005034 }
5035 }
locke-lunargd556cc32019-09-17 01:21:23 -06005036 }
5037}
5038
Tony-LunarG977448c2019-12-02 14:52:02 -07005039void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5040 uint32_t queryCount) {
5041 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5042}
5043
5044void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5045 uint32_t queryCount) {
5046 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5047}
5048
locke-lunargd556cc32019-09-17 01:21:23 -06005049void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
5050 const TEMPLATE_STATE *template_state, const void *pData) {
5051 // Translate the templated update into a normal update for validation...
5052 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
5053 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
5054 decoded_update.desc_writes.data(), 0, NULL);
5055}
5056
5057// Update the common AllocateDescriptorSetsData
5058void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005059 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005060 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05005061 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06005062 if (layout) {
5063 ds_data->layout_nodes[i] = layout;
5064 // Count total descriptors required per type
5065 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
5066 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005067 uint32_t type_index = static_cast<uint32_t>(binding_layout->descriptorType);
5068 ds_data->required_descriptors_by_type[type_index] += binding_layout->descriptorCount;
locke-lunargd556cc32019-09-17 01:21:23 -06005069 }
5070 }
5071 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
5072 }
5073}
5074
5075// Decrement allocated sets from the pool and insert new sets into set_map
5076void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
5077 const VkDescriptorSet *descriptor_sets,
5078 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
5079 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
5080 // Account for sets and individual descriptors allocated from pool
5081 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
5082 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
5083 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
5084 }
5085
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07005086 const auto *variable_count_info = LvlFindInChain<VkDescriptorSetVariableDescriptorCountAllocateInfo>(p_alloc_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06005087 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
5088
5089 // Create tracking object for each descriptor set; insert into global map and the pool's set.
5090 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
5091 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
5092
Jeff Bolz41a1ced2019-10-11 11:40:49 -05005093 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07005094 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06005095 pool_state->sets.insert(new_ds.get());
locke-lunargd556cc32019-09-17 01:21:23 -06005096 setMap[descriptor_sets[i]] = std::move(new_ds);
5097 }
5098}
5099
5100// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
Jeremy Kniager05631e72020-06-08 14:21:35 -06005101void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type,
locke-lunarg540b2252020-08-03 13:23:36 -06005102 VkPipelineBindPoint bind_point, const char *function) {
5103 UpdateDrawState(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005104 cb_state->hasDispatchCmd = true;
5105}
5106
locke-lunargd556cc32019-09-17 01:21:23 -06005107// Generic function to handle state update for all CmdDraw* type functions
locke-lunarg540b2252020-08-03 13:23:36 -06005108void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, VkPipelineBindPoint bind_point,
5109 const char *function) {
5110 UpdateStateCmdDrawDispatchType(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005111 cb_state->hasDrawCmd = true;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005112
5113 // Update the consumed viewport/scissor count.
5114 uint32_t& used = cb_state->usedViewportScissorCount;
5115 used = std::max(used, cb_state->pipelineStaticViewportCount);
5116 used = std::max(used, cb_state->pipelineStaticScissorCount);
5117 cb_state->usedDynamicViewportCount |= !!(cb_state->dynamic_status & CBSTATUS_VIEWPORT_WITH_COUNT_SET); // !! silences MSVC warn
5118 cb_state->usedDynamicScissorCount |= !!(cb_state->dynamic_status & CBSTATUS_SCISSOR_WITH_COUNT_SET);
locke-lunargd556cc32019-09-17 01:21:23 -06005119}
5120
5121void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
5122 uint32_t firstVertex, uint32_t firstInstance) {
5123 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005124 UpdateStateCmdDrawType(cb_state, CMD_DRAW, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDraw()");
locke-lunargd556cc32019-09-17 01:21:23 -06005125}
5126
Tony-LunarG745150c2021-07-02 15:07:31 -06005127void ValidationStateTracker::PostCallRecordCmdDrawMultiEXT(VkCommandBuffer commandBuffer, uint32_t drawCount,
5128 const VkMultiDrawInfoEXT *pVertexInfo, uint32_t instanceCount,
5129 uint32_t firstInstance, uint32_t stride) {
5130 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5131 UpdateStateCmdDrawType(cb_state, CMD_DRAWMULTIEXT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawMultiEXT()");
5132}
5133
locke-lunargd556cc32019-09-17 01:21:23 -06005134void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
5135 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
5136 uint32_t firstInstance) {
5137 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005138 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXED, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexed()");
locke-lunargd556cc32019-09-17 01:21:23 -06005139}
5140
Tony-LunarG745150c2021-07-02 15:07:31 -06005141void ValidationStateTracker::PostCallRecordCmdDrawMultiIndexedEXT(VkCommandBuffer commandBuffer, uint32_t drawCount,
5142 const VkMultiDrawIndexedInfoEXT *pIndexInfo,
5143 uint32_t instanceCount, uint32_t firstInstance, uint32_t stride,
5144 const int32_t *pVertexOffset) {
5145 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5146 UpdateStateCmdDrawType(cb_state, CMD_DRAWMULTIINDEXEDEXT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawMultiIndexedEXT()");
5147}
5148
locke-lunargd556cc32019-09-17 01:21:23 -06005149void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5150 uint32_t count, uint32_t stride) {
5151 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5152 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005153 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndirect()");
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005154 if (!disabled[command_buffer_state]) {
5155 cb_state->AddChild(buffer_state);
5156 }
locke-lunargd556cc32019-09-17 01:21:23 -06005157}
5158
5159void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5160 VkDeviceSize offset, uint32_t count, uint32_t stride) {
5161 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5162 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005163 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexedIndirect()");
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005164 if (!disabled[command_buffer_state]) {
5165 cb_state->AddChild(buffer_state);
5166 }
locke-lunargd556cc32019-09-17 01:21:23 -06005167}
5168
5169void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
5170 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005171 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCH, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatch()");
locke-lunargd556cc32019-09-17 01:21:23 -06005172}
5173
5174void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5175 VkDeviceSize offset) {
5176 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005177 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCHINDIRECT, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatchIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06005178 BUFFER_STATE *buffer_state = GetBufferState(buffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005179 if (!disabled[command_buffer_state]) {
5180 cb_state->AddChild(buffer_state);
5181 }
locke-lunargd556cc32019-09-17 01:21:23 -06005182}
5183
Tony-LunarG977448c2019-12-02 14:52:02 -07005184void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5185 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
locke-lunarg540b2252020-08-03 13:23:36 -06005186 uint32_t stride, const char *function) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005187 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5188 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5189 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005190 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005191 if (!disabled[command_buffer_state]) {
5192 cb_state->AddChild(buffer_state);
5193 cb_state->AddChild(count_buffer_state);
5194 }
Tony-LunarG977448c2019-12-02 14:52:02 -07005195}
5196
locke-lunargd556cc32019-09-17 01:21:23 -06005197void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5198 VkDeviceSize offset, VkBuffer countBuffer,
5199 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5200 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005201 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5202 "vkCmdDrawIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005203}
5204
5205void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5206 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5207 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005208 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5209 "vkCmdDrawIndirectCount()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005210}
5211
5212void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5213 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
locke-lunarg540b2252020-08-03 13:23:36 -06005214 uint32_t maxDrawCount, uint32_t stride, const char *function) {
locke-lunargd556cc32019-09-17 01:21:23 -06005215 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5216 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5217 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005218 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005219 if (!disabled[command_buffer_state]) {
5220 cb_state->AddChild(buffer_state);
5221 cb_state->AddChild(count_buffer_state);
5222 }
locke-lunargd556cc32019-09-17 01:21:23 -06005223}
5224
5225void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5226 VkDeviceSize offset, VkBuffer countBuffer,
5227 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5228 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005229 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5230 "vkCmdDrawIndexedIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005231}
5232
5233void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
5234 VkDeviceSize offset, VkBuffer countBuffer,
5235 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5236 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005237 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5238 "vkCmdDrawIndexedIndirectCount()");
locke-lunargd556cc32019-09-17 01:21:23 -06005239}
5240
5241void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
5242 uint32_t firstTask) {
5243 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005244 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSNV, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawMeshTasksNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005245}
5246
5247void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5248 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
5249 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005250 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
5251 "vkCmdDrawMeshTasksIndirectNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005252 BUFFER_STATE *buffer_state = GetBufferState(buffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005253 if (!disabled[command_buffer_state] && buffer_state) {
5254 cb_state->AddChild(buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -06005255 }
5256}
5257
5258void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5259 VkDeviceSize offset, VkBuffer countBuffer,
5260 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5261 uint32_t stride) {
5262 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5263 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5264 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005265 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTCOUNTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
5266 "vkCmdDrawMeshTasksIndirectCountNV()");
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005267 if (!disabled[command_buffer_state]) {
5268 if (buffer_state) {
5269 cb_state->AddChild(buffer_state);
5270 }
5271 if (count_buffer_state) {
5272 cb_state->AddChild(count_buffer_state);
5273 }
locke-lunargd556cc32019-09-17 01:21:23 -06005274 }
5275}
5276
5277void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
5278 const VkAllocationCallbacks *pAllocator,
5279 VkShaderModule *pShaderModule, VkResult result,
5280 void *csm_state_data) {
5281 if (VK_SUCCESS != result) return;
5282 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
5283
Tony-LunarG8a51b7d2020-07-01 15:57:23 -06005284 spv_target_env spirv_environment = PickSpirvEnv(api_version, (device_extensions.vk_khr_spirv_1_4 != kNotEnabled));
locke-lunargd556cc32019-09-17 01:21:23 -06005285 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005286 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
5287 csm_state->unique_shader_id)
5288 : std::make_shared<SHADER_MODULE_STATE>();
sfricke-samsung962cad92021-04-13 00:46:29 -07005289 new_shader_module->SetPushConstantUsedInShader();
locke-lunargd556cc32019-09-17 01:21:23 -06005290 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
5291}
5292
5293void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06005294 PipelineStageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005295 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
locke-lunargde3f0fa2020-09-10 11:55:31 -06005296 stage_state->entry_point_name = pStage->pName;
5297 stage_state->shader_state = GetShared<SHADER_MODULE_STATE>(pStage->module);
5298 auto module = stage_state->shader_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -06005299 if (!module->has_valid_spirv) return;
5300
5301 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
sfricke-samsung962cad92021-04-13 00:46:29 -07005302 auto entrypoint = module->FindEntrypoint(pStage->pName, pStage->stage);
locke-lunargd556cc32019-09-17 01:21:23 -06005303 if (entrypoint == module->end()) return;
5304
locke-lunarg654e3692020-06-04 17:19:15 -06005305 stage_state->stage_flag = pStage->stage;
5306
locke-lunargd556cc32019-09-17 01:21:23 -06005307 // Mark accessible ids
sfricke-samsung962cad92021-04-13 00:46:29 -07005308 stage_state->accessible_ids = module->MarkAccessibleIds(entrypoint);
5309 module->ProcessExecutionModes(entrypoint, pipeline);
locke-lunargd556cc32019-09-17 01:21:23 -06005310
sfricke-samsung962cad92021-04-13 00:46:29 -07005311 stage_state->descriptor_uses = module->CollectInterfaceByDescriptorSlot(
5312 stage_state->accessible_ids, &stage_state->has_writable_descriptor, &stage_state->has_atomic_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06005313 // Capture descriptor uses for the pipeline
locke-lunarg36045992020-08-20 16:54:37 -06005314 for (const auto &use : stage_state->descriptor_uses) {
locke-lunargd556cc32019-09-17 01:21:23 -06005315 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06005316 const uint32_t slot = use.first.first;
locke-lunarg351c9d82020-10-23 14:43:21 -06005317 pipeline->active_slots[slot][use.first.second].is_writable |= use.second.is_writable;
locke-lunarg36045992020-08-20 16:54:37 -06005318 auto &reqs = pipeline->active_slots[slot][use.first.second].reqs;
sfricke-samsung962cad92021-04-13 00:46:29 -07005319 reqs = descriptor_req(reqs | module->DescriptorTypeToReqs(use.second.type_id));
locke-lunarg25b6c352020-08-06 17:44:18 -06005320 if (use.second.is_atomic_operation) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_VIEW_ATOMIC_OPERATION);
locke-lunarg12d20992020-09-21 12:46:49 -06005321 if (use.second.is_sampler_implicitLod_dref_proj) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_IMPLICITLOD_DREF_PROJ);
locke-lunargae2a43c2020-09-22 17:21:57 -06005322 if (use.second.is_sampler_bias_offset) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_BIAS_OFFSET);
locke-lunarg12d20992020-09-21 12:46:49 -06005323
John Zulauf649edd52019-10-02 14:39:41 -06005324 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunarg36045992020-08-20 16:54:37 -06005325 if (use.second.samplers_used_by_image.size()) {
locke-lunarg654a9052020-10-13 16:28:42 -06005326 auto &samplers_used_by_image = pipeline->active_slots[slot][use.first.second].samplers_used_by_image;
5327 if (use.second.samplers_used_by_image.size() > samplers_used_by_image.size()) {
5328 samplers_used_by_image.resize(use.second.samplers_used_by_image.size());
5329 }
locke-lunarg654a9052020-10-13 16:28:42 -06005330 uint32_t image_index = 0;
5331 for (const auto &samplers : use.second.samplers_used_by_image) {
5332 for (const auto &sampler : samplers) {
locke-lunargb8be8222020-10-20 00:34:37 -06005333 samplers_used_by_image[image_index].emplace(sampler, nullptr);
locke-lunarg654a9052020-10-13 16:28:42 -06005334 }
5335 ++image_index;
5336 }
locke-lunarg36045992020-08-20 16:54:37 -06005337 }
locke-lunargd556cc32019-09-17 01:21:23 -06005338 }
locke-lunarg78486832020-09-09 19:39:42 -06005339
locke-lunarg96dc9632020-06-10 17:22:18 -06005340 if (pStage->stage == VK_SHADER_STAGE_FRAGMENT_BIT) {
sfricke-samsung962cad92021-04-13 00:46:29 -07005341 pipeline->fragmentShader_writable_output_location_list = module->CollectWritableOutputLocationinFS(*pStage);
locke-lunarg96dc9632020-06-10 17:22:18 -06005342 }
locke-lunargd556cc32019-09-17 01:21:23 -06005343}
5344
sfricke-samsung70ad9ce2021-04-04 00:53:54 -07005345// Discussed in details in https://github.com/KhronosGroup/Vulkan-Docs/issues/1081
5346// Internal discussion and CTS were written to prove that this is not called after an incompatible vkCmdBindPipeline
5347// "Binding a pipeline with a layout that is not compatible with the push constant layout does not disturb the push constant values"
5348//
5349// vkCmdBindDescriptorSet has nothing to do with push constants and don't need to call this after neither
5350//
5351// Part of this assumes apps at draw/dispath/traceRays/etc time will have it properly compatabile or else other VU will be triggered
locke-lunargd556cc32019-09-17 01:21:23 -06005352void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
5353 if (cb_state == nullptr) {
5354 return;
5355 }
5356
5357 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
5358 if (pipeline_layout_state == nullptr) {
5359 return;
5360 }
5361
5362 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
5363 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
5364 cb_state->push_constant_data.clear();
locke-lunargde3f0fa2020-09-10 11:55:31 -06005365 cb_state->push_constant_data_update.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06005366 uint32_t size_needed = 0;
John Zulauf79f06582021-02-27 18:38:39 -07005367 for (const auto &push_constant_range : *cb_state->push_constant_data_ranges) {
locke-lunargde3f0fa2020-09-10 11:55:31 -06005368 auto size = push_constant_range.offset + push_constant_range.size;
5369 size_needed = std::max(size_needed, size);
5370
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005371 auto stage_flags = push_constant_range.stageFlags;
locke-lunargde3f0fa2020-09-10 11:55:31 -06005372 uint32_t bit_shift = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005373 while (stage_flags) {
5374 if (stage_flags & 1) {
locke-lunargde3f0fa2020-09-10 11:55:31 -06005375 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
5376 const auto it = cb_state->push_constant_data_update.find(flag);
5377
5378 if (it != cb_state->push_constant_data_update.end()) {
5379 if (it->second.size() < push_constant_range.offset) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06005380 it->second.resize(push_constant_range.offset, PC_Byte_Not_Set);
locke-lunargde3f0fa2020-09-10 11:55:31 -06005381 }
5382 if (it->second.size() < size) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06005383 it->second.resize(size, PC_Byte_Not_Updated);
locke-lunargde3f0fa2020-09-10 11:55:31 -06005384 }
5385 } else {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06005386 std::vector<uint8_t> bytes;
5387 bytes.resize(push_constant_range.offset, PC_Byte_Not_Set);
5388 bytes.resize(size, PC_Byte_Not_Updated);
locke-lunargde3f0fa2020-09-10 11:55:31 -06005389 cb_state->push_constant_data_update[flag] = bytes;
5390 }
5391 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005392 stage_flags = stage_flags >> 1;
locke-lunargde3f0fa2020-09-10 11:55:31 -06005393 ++bit_shift;
5394 }
locke-lunargd556cc32019-09-17 01:21:23 -06005395 }
5396 cb_state->push_constant_data.resize(size_needed, 0);
5397 }
5398}
John Zulauf22b0fbe2019-10-15 06:26:16 -06005399
5400void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
5401 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
5402 VkResult result) {
5403 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
5404 auto swapchain_state = GetSwapchainState(swapchain);
5405
5406 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
5407
5408 if (pSwapchainImages) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005409 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
John Zulauf29d00532021-03-04 13:28:54 -07005410 SWAPCHAIN_IMAGE &swapchain_image = swapchain_state->images[i];
John Zulauffaa7a522021-03-05 12:22:45 -07005411 if (swapchain_image.image_state) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06005412
5413 // Add imageMap entries for each swapchain image
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06005414 auto image_ci = LvlInitStruct<VkImageCreateInfo>();
5415 image_ci.pNext = LvlFindInChain<VkImageFormatListCreateInfo>(swapchain_state->createInfo.pNext);
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -06005416 image_ci.flags = 0; // to be updated below
John Zulauf22b0fbe2019-10-15 06:26:16 -06005417 image_ci.imageType = VK_IMAGE_TYPE_2D;
5418 image_ci.format = swapchain_state->createInfo.imageFormat;
5419 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
5420 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
5421 image_ci.extent.depth = 1;
5422 image_ci.mipLevels = 1;
5423 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
5424 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
5425 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
5426 image_ci.usage = swapchain_state->createInfo.imageUsage;
5427 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
5428 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
5429 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
5430 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5431
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005432 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005433 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005434 }
5435 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005436 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005437 }
5438 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08005439 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005440 }
John Zulauf22b0fbe2019-10-15 06:26:16 -06005441
locke-lunarg296a3c92020-03-25 01:04:29 -06005442 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(device, pSwapchainImages[i], &image_ci);
John Zulauffaa7a522021-03-05 12:22:45 -07005443 auto *image_state = imageMap[pSwapchainImages[i]].get();
5444 assert(image_state);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005445 image_state->valid = false;
5446 image_state->create_from_swapchain = swapchain;
5447 image_state->bind_swapchain = swapchain;
5448 image_state->bind_swapchain_imageIndex = i;
Tony-LunarGe64e4fe2020-02-17 16:21:55 -07005449 image_state->is_swapchain_image = true;
John Zulauf29d00532021-03-04 13:28:54 -07005450
5451 // Since swapchains can't be linear, we can create an encoder here, and SyncValNeeds a fake_base_address
5452 image_state->fragment_encoder = std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(
5453 new subresource_adapter::ImageRangeEncoder(*image_state));
5454
5455 if (swapchain_image.bound_images.empty()) {
5456 // First time "bind" allocates
5457 image_state->swapchain_fake_address = fake_memory.Alloc(image_state->fragment_encoder->TotalSize());
5458 } else {
5459 // All others reuse
5460 image_state->swapchain_fake_address = (*swapchain_image.bound_images.cbegin())->swapchain_fake_address;
5461 // Since there are others, need to update the aliasing information
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06005462 for (auto other_image : swapchain_image.bound_images) {
5463 image_state->AddAliasingImage(other_image);
5464 }
John Zulauf29d00532021-03-04 13:28:54 -07005465 }
5466
5467 swapchain_image.image_state = image_state; // Don't move, it's already a reference to the imageMap
John Zulauffaa7a522021-03-05 12:22:45 -07005468 swapchain_image.bound_images.emplace(image_state);
Petr Kraus44f1c482020-04-25 20:09:25 +02005469
5470 AddImageStateProps(*image_state, device, physical_device);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005471 }
5472 }
5473
5474 if (*pSwapchainImageCount) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005475 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
5476 }
5477}
sourav parmar35e7a002020-06-09 17:58:44 -07005478
sourav parmar35e7a002020-06-09 17:58:44 -07005479void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer,
5480 const VkCopyAccelerationStructureInfoKHR *pInfo) {
5481 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5482 if (cb_state) {
sourav parmarcd5fb182020-07-17 12:58:44 -07005483 ACCELERATION_STRUCTURE_STATE_KHR *src_as_state = GetAccelerationStructureStateKHR(pInfo->src);
5484 ACCELERATION_STRUCTURE_STATE_KHR *dst_as_state = GetAccelerationStructureStateKHR(pInfo->dst);
sourav parmar35e7a002020-06-09 17:58:44 -07005485 if (dst_as_state != nullptr && src_as_state != nullptr) {
5486 dst_as_state->built = true;
5487 dst_as_state->build_info_khr = src_as_state->build_info_khr;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005488 if (!disabled[command_buffer_state]) {
5489 cb_state->AddChild(dst_as_state);
5490 cb_state->AddChild(src_as_state);
5491 }
sourav parmar35e7a002020-06-09 17:58:44 -07005492 }
5493 }
5494}
Piers Daniell39842ee2020-07-10 16:42:33 -06005495
5496void ValidationStateTracker::PreCallRecordCmdSetCullModeEXT(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) {
5497 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5498 cb_state->status |= CBSTATUS_CULL_MODE_SET;
5499 cb_state->static_status &= ~CBSTATUS_CULL_MODE_SET;
5500}
5501
5502void ValidationStateTracker::PreCallRecordCmdSetFrontFaceEXT(VkCommandBuffer commandBuffer, VkFrontFace frontFace) {
5503 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5504 cb_state->status |= CBSTATUS_FRONT_FACE_SET;
5505 cb_state->static_status &= ~CBSTATUS_FRONT_FACE_SET;
5506}
5507
5508void ValidationStateTracker::PreCallRecordCmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer,
5509 VkPrimitiveTopology primitiveTopology) {
5510 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5511 cb_state->primitiveTopology = primitiveTopology;
5512 cb_state->status |= CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5513 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5514}
5515
5516void ValidationStateTracker::PreCallRecordCmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer, uint32_t viewportCount,
5517 const VkViewport *pViewports) {
5518 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07005519 uint32_t bits = (1u << viewportCount) - 1u;
5520 cb_state->viewportWithCountMask |= bits;
5521 cb_state->trashedViewportMask &= ~bits;
Tobias Hector6663c9b2020-11-05 10:18:02 +00005522 cb_state->viewportWithCountCount = viewportCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005523 cb_state->trashedViewportCount = false;
Piers Daniell39842ee2020-07-10 16:42:33 -06005524 cb_state->status |= CBSTATUS_VIEWPORT_WITH_COUNT_SET;
5525 cb_state->static_status &= ~CBSTATUS_VIEWPORT_WITH_COUNT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005526
5527 cb_state->dynamicViewports.resize(std::max(size_t(viewportCount), cb_state->dynamicViewports.size()));
5528 for (size_t i = 0; i < viewportCount; ++i) {
5529 cb_state->dynamicViewports[i] = pViewports[i];
5530 }
Piers Daniell39842ee2020-07-10 16:42:33 -06005531}
5532
5533void ValidationStateTracker::PreCallRecordCmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer, uint32_t scissorCount,
5534 const VkRect2D *pScissors) {
5535 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07005536 uint32_t bits = (1u << scissorCount) - 1u;
5537 cb_state->scissorWithCountMask |= bits;
5538 cb_state->trashedScissorMask &= ~bits;
5539 cb_state->scissorWithCountCount = scissorCount;
5540 cb_state->trashedScissorCount = false;
Piers Daniell39842ee2020-07-10 16:42:33 -06005541 cb_state->status |= CBSTATUS_SCISSOR_WITH_COUNT_SET;
5542 cb_state->static_status &= ~CBSTATUS_SCISSOR_WITH_COUNT_SET;
5543}
5544
5545void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer, uint32_t firstBinding,
5546 uint32_t bindingCount, const VkBuffer *pBuffers,
5547 const VkDeviceSize *pOffsets, const VkDeviceSize *pSizes,
5548 const VkDeviceSize *pStrides) {
5549 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5550 if (pStrides) {
5551 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5552 cb_state->static_status &= ~CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5553 }
5554
5555 uint32_t end = firstBinding + bindingCount;
5556 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
5557 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
5558 }
5559
5560 for (uint32_t i = 0; i < bindingCount; ++i) {
5561 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07005562 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
Piers Daniell39842ee2020-07-10 16:42:33 -06005563 vertex_buffer_binding.offset = pOffsets[i];
5564 vertex_buffer_binding.size = (pSizes) ? pSizes[i] : VK_WHOLE_SIZE;
5565 vertex_buffer_binding.stride = (pStrides) ? pStrides[i] : 0;
5566 // Add binding for this vertex buffer to this commandbuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005567 if (!disabled[command_buffer_state] && pBuffers[i]) {
5568 cb_state->AddChild(vertex_buffer_binding.buffer_state.get());
Piers Daniell39842ee2020-07-10 16:42:33 -06005569 }
5570 }
5571}
5572
5573void ValidationStateTracker::PreCallRecordCmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) {
5574 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5575 cb_state->status |= CBSTATUS_DEPTH_TEST_ENABLE_SET;
5576 cb_state->static_status &= ~CBSTATUS_DEPTH_TEST_ENABLE_SET;
5577}
5578
5579void ValidationStateTracker::PreCallRecordCmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) {
5580 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5581 cb_state->status |= CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5582 cb_state->static_status &= ~CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5583}
5584
5585void ValidationStateTracker::PreCallRecordCmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) {
5586 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5587 cb_state->status |= CBSTATUS_DEPTH_COMPARE_OP_SET;
5588 cb_state->static_status &= ~CBSTATUS_DEPTH_COMPARE_OP_SET;
5589}
5590
5591void ValidationStateTracker::PreCallRecordCmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer,
5592 VkBool32 depthBoundsTestEnable) {
5593 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5594 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5595 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5596}
5597void ValidationStateTracker::PreCallRecordCmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) {
5598 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5599 cb_state->status |= CBSTATUS_STENCIL_TEST_ENABLE_SET;
5600 cb_state->static_status &= ~CBSTATUS_STENCIL_TEST_ENABLE_SET;
5601}
5602
5603void ValidationStateTracker::PreCallRecordCmdSetStencilOpEXT(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
5604 VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp,
5605 VkCompareOp compareOp) {
5606 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5607 cb_state->status |= CBSTATUS_STENCIL_OP_SET;
5608 cb_state->static_status &= ~CBSTATUS_STENCIL_OP_SET;
5609}
locke-lunarg4189aa22020-10-21 00:23:48 -06005610
5611void ValidationStateTracker::PreCallRecordCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle,
5612 uint32_t discardRectangleCount,
5613 const VkRect2D *pDiscardRectangles) {
5614 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5615 cb_state->status |= CBSTATUS_DISCARD_RECTANGLE_SET;
5616 cb_state->static_status &= ~CBSTATUS_DISCARD_RECTANGLE_SET;
5617}
5618
5619void ValidationStateTracker::PreCallRecordCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,
5620 const VkSampleLocationsInfoEXT *pSampleLocationsInfo) {
5621 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5622 cb_state->status |= CBSTATUS_SAMPLE_LOCATIONS_SET;
5623 cb_state->static_status &= ~CBSTATUS_SAMPLE_LOCATIONS_SET;
5624}
5625
5626void ValidationStateTracker::PreCallRecordCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer,
5627 VkCoarseSampleOrderTypeNV sampleOrderType,
5628 uint32_t customSampleOrderCount,
5629 const VkCoarseSampleOrderCustomNV *pCustomSampleOrders) {
5630 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5631 cb_state->status |= CBSTATUS_COARSE_SAMPLE_ORDER_SET;
5632 cb_state->static_status &= ~CBSTATUS_COARSE_SAMPLE_ORDER_SET;
5633}
Vikram Kushwahaa57b0c32021-04-19 12:21:46 -07005634
5635void ValidationStateTracker::PreCallRecordCmdSetPatchControlPointsEXT(VkCommandBuffer commandBuffer, uint32_t patchControlPoints) {
5636 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5637 cb_state->status |= CBSTATUS_PATCH_CONTROL_POINTS_SET;
5638 cb_state->static_status &= ~CBSTATUS_PATCH_CONTROL_POINTS_SET;
5639}
5640
5641void ValidationStateTracker::PreCallRecordCmdSetLogicOpEXT(VkCommandBuffer commandBuffer, VkLogicOp logicOp) {
5642 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5643 cb_state->status |= CBSTATUS_LOGIC_OP_SET;
5644 cb_state->static_status &= ~CBSTATUS_LOGIC_OP_SET;
5645}
5646
5647void ValidationStateTracker::PreCallRecordCmdSetRasterizerDiscardEnableEXT(VkCommandBuffer commandBuffer,
5648 VkBool32 rasterizerDiscardEnable) {
5649 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5650 cb_state->status |= CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET;
5651 cb_state->static_status &= ~CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET;
5652}
5653
5654void ValidationStateTracker::PreCallRecordCmdSetDepthBiasEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable) {
5655 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5656 cb_state->status |= CBSTATUS_DEPTH_BIAS_ENABLE_SET;
5657 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_ENABLE_SET;
5658}
5659
5660void ValidationStateTracker::PreCallRecordCmdSetPrimitiveRestartEnableEXT(VkCommandBuffer commandBuffer,
5661 VkBool32 primitiveRestartEnable) {
5662 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5663 cb_state->status |= CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET;
5664 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005665}
Piers Daniell924cd832021-05-18 13:48:47 -06005666
5667void ValidationStateTracker::PreCallRecordCmdSetVertexInputEXT(
5668 VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount,
5669 const VkVertexInputBindingDescription2EXT *pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount,
5670 const VkVertexInputAttributeDescription2EXT *pVertexAttributeDescriptions) {
5671 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5672 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET | CBSTATUS_VERTEX_INPUT_SET;
5673 cb_state->static_status &= ~(CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET | CBSTATUS_VERTEX_INPUT_SET);
5674}
Nathaniel Cesario42ac6ca2021-06-15 17:23:05 -06005675
5676void ValidationStateTracker::RecordGetBufferDeviceAddress(const VkBufferDeviceAddressInfo *pInfo, VkDeviceAddress address) {
5677 BUFFER_STATE *buffer_state = GetBufferState(pInfo->buffer);
5678 if (buffer_state) {
5679 // address is used for GPU-AV and ray tracing buffer validation
5680 buffer_state->deviceAddress = address;
5681 buffer_address_map_.emplace(address, buffer_state);
5682 }
5683}
5684
5685void ValidationStateTracker::PostCallRecordGetBufferDeviceAddress(VkDevice device, const VkBufferDeviceAddressInfo *pInfo,
5686 VkDeviceAddress address) {
5687 RecordGetBufferDeviceAddress(pInfo, address);
5688}
5689
5690void ValidationStateTracker::PostCallRecordGetBufferDeviceAddressKHR(VkDevice device, const VkBufferDeviceAddressInfo *pInfo,
5691 VkDeviceAddress address) {
5692 RecordGetBufferDeviceAddress(pInfo, address);
5693}
5694
5695void ValidationStateTracker::PostCallRecordGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfo *pInfo,
5696 VkDeviceAddress address) {
5697 RecordGetBufferDeviceAddress(pInfo, address);
Nathaniel Cesario39152e62021-07-02 13:04:16 -06005698}
5699
5700std::shared_ptr<SWAPCHAIN_NODE> ValidationStateTracker::CreateSwapchainState(const VkSwapchainCreateInfoKHR *create_info,
5701 VkSwapchainKHR swapchain) {
5702 return std::make_shared<SWAPCHAIN_NODE>(create_info, swapchain);
5703}