blob: b6114ce4e6198150bb7d76554f6b1f15c58ec893 [file] [log] [blame]
sfricke-samsung486a51e2021-01-02 00:10:15 -08001/* Copyright (c) 2015-2021 The Khronos Group Inc.
2 * Copyright (c) 2015-2021 Valve Corporation
3 * Copyright (c) 2015-2021 LunarG, Inc.
4 * Copyright (C) 2015-2021 Google Inc.
Tobias Hector6663c9b2020-11-05 10:18:02 +00005 * Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved.
locke-lunargd556cc32019-09-17 01:21:23 -06006 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Mark Lobodzinski <mark@lunarg.com>
20 * Author: Dave Houlton <daveh@lunarg.com>
21 * Shannon McPherson <shannon@lunarg.com>
Tobias Hector6663c9b2020-11-05 10:18:02 +000022 * Author: Tobias Hector <tobias.hector@amd.com>
locke-lunargd556cc32019-09-17 01:21:23 -060023 */
24
David Zhao Akeley44139b12021-04-26 16:16:13 -070025#include <algorithm>
locke-lunargd556cc32019-09-17 01:21:23 -060026#include <cmath>
locke-lunargd556cc32019-09-17 01:21:23 -060027
28#include "vk_enum_string_helper.h"
29#include "vk_format_utils.h"
30#include "vk_layer_data.h"
31#include "vk_layer_utils.h"
32#include "vk_layer_logging.h"
33#include "vk_typemap_helper.h"
34
35#include "chassis.h"
36#include "state_tracker.h"
37#include "shader_validation.h"
Jeremy Gebben74aa7622020-12-15 11:18:00 -070038#include "sync_utils.h"
Jeremy Gebben159b3cc2021-06-03 09:09:03 -060039#include "cmd_buffer_state.h"
40#include "render_pass_state.h"
locke-lunarg4189aa22020-10-21 00:23:48 -060041
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -060042void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
43 if (add_obj) {
44 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
45 // Call base class
46 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
47 }
48}
49
John Zulauf2bc1fde2020-04-24 15:09:51 -060050// NOTE: Beware the lifespan of the rp_begin when holding the return. If the rp_begin isn't a "safe" copy, "IMAGELESS"
51// attachments won't persist past the API entry point exit.
Jeremy Gebben88f58142021-06-01 10:07:52 -060052static std::pair<uint32_t, const VkImageView *> GetFramebufferAttachments(const VkRenderPassBeginInfo &rp_begin,
53 const FRAMEBUFFER_STATE &fb_state) {
John Zulauf2bc1fde2020-04-24 15:09:51 -060054 const VkImageView *attachments = fb_state.createInfo.pAttachments;
55 uint32_t count = fb_state.createInfo.attachmentCount;
56 if (fb_state.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -070057 const auto *framebuffer_attachments = LvlFindInChain<VkRenderPassAttachmentBeginInfo>(rp_begin.pNext);
John Zulauf2bc1fde2020-04-24 15:09:51 -060058 if (framebuffer_attachments) {
59 attachments = framebuffer_attachments->pAttachments;
60 count = framebuffer_attachments->attachmentCount;
61 }
62 }
63 return std::make_pair(count, attachments);
64}
65
John Zulauf64ffe552021-02-06 10:25:07 -070066template <typename ImageViewPointer, typename Get>
67std::vector<ImageViewPointer> GetAttachmentViewsImpl(const VkRenderPassBeginInfo &rp_begin, const FRAMEBUFFER_STATE &fb_state,
68 const Get &get_fn) {
69 std::vector<ImageViewPointer> views;
John Zulauf2bc1fde2020-04-24 15:09:51 -060070
71 const auto count_attachment = GetFramebufferAttachments(rp_begin, fb_state);
72 const auto attachment_count = count_attachment.first;
73 const auto *attachments = count_attachment.second;
74 views.resize(attachment_count, nullptr);
75 for (uint32_t i = 0; i < attachment_count; i++) {
76 if (attachments[i] != VK_NULL_HANDLE) {
John Zulauf64ffe552021-02-06 10:25:07 -070077 views[i] = get_fn(attachments[i]);
John Zulauf2bc1fde2020-04-24 15:09:51 -060078 }
79 }
80 return views;
81}
82
John Zulauf64ffe552021-02-06 10:25:07 -070083std::vector<std::shared_ptr<const IMAGE_VIEW_STATE>> ValidationStateTracker::GetSharedAttachmentViews(
84 const VkRenderPassBeginInfo &rp_begin, const FRAMEBUFFER_STATE &fb_state) const {
85 auto get_fn = [this](VkImageView handle) { return this->GetShared<IMAGE_VIEW_STATE>(handle); };
86 return GetAttachmentViewsImpl<std::shared_ptr<const IMAGE_VIEW_STATE>>(rp_begin, fb_state, get_fn);
87}
88
locke-lunargd556cc32019-09-17 01:21:23 -060089#ifdef VK_USE_PLATFORM_ANDROID_KHR
90// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
91// This could also move into a seperate core_validation_android.cpp file... ?
92
Jeremy Gebbenf4fb2a02021-07-08 09:57:46 -060093template <typename CreateInfo>
94VkFormatFeatureFlags ValidationStateTracker::GetExternalFormatFeaturesANDROID(const CreateInfo *create_info) const {
95 VkFormatFeatureFlags format_features = 0;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -070096 const VkExternalFormatANDROID *ext_fmt_android = LvlFindInChain<VkExternalFormatANDROID>(create_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -060097 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
Spencer Fricke6bba8c72020-04-06 07:41:21 -070098 // VUID 01894 will catch if not found in map
99 auto it = ahb_ext_formats_map.find(ext_fmt_android->externalFormat);
100 if (it != ahb_ext_formats_map.end()) {
Jeremy Gebbenf4fb2a02021-07-08 09:57:46 -0600101 format_features = it->second;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700102 }
locke-lunargd556cc32019-09-17 01:21:23 -0600103 }
Jeremy Gebbenf4fb2a02021-07-08 09:57:46 -0600104 return format_features;
locke-lunargd556cc32019-09-17 01:21:23 -0600105}
106
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700107void ValidationStateTracker::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
108 VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties, VkResult result) {
109 if (VK_SUCCESS != result) return;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700110 auto ahb_format_props = LvlFindInChain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700111 if (ahb_format_props) {
Jeremy Gebbenfc6f8152021-03-18 16:58:55 -0600112 ahb_ext_formats_map.emplace(ahb_format_props->externalFormat, ahb_format_props->formatFeatures);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700113 }
114}
115
locke-lunargd556cc32019-09-17 01:21:23 -0600116#else
117
Jeremy Gebbenf4fb2a02021-07-08 09:57:46 -0600118template <typename CreateInfo>
119VkFormatFeatureFlags ValidationStateTracker::GetExternalFormatFeaturesANDROID(const CreateInfo *create_info) const {
120 return 0;
121}
locke-lunargd556cc32019-09-17 01:21:23 -0600122
123#endif // VK_USE_PLATFORM_ANDROID_KHR
124
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600125VkFormatFeatureFlags GetImageFormatFeatures(VkPhysicalDevice physical_device, VkDevice device, VkImage image, VkFormat format,
126 VkImageTiling tiling) {
127 VkFormatFeatureFlags format_features = 0;
Petr Kraus44f1c482020-04-25 20:09:25 +0200128 // Add feature support according to Image Format Features (vkspec.html#resources-image-format-features)
129 // if format is AHB external format then the features are already set
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600130 if (tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
131 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
132 nullptr};
133 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image, &drm_format_properties);
Petr Kraus44f1c482020-04-25 20:09:25 +0200134
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600135 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
136 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
137 nullptr};
138 format_properties_2.pNext = (void *)&drm_properties_list;
139 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
140 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
141 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
142 drm_properties_list.pDrmFormatModifierProperties = &drm_properties[0];
143 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
Petr Kraus44f1c482020-04-25 20:09:25 +0200144
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600145 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
146 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier == drm_format_properties.drmFormatModifier) {
147 format_features = drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
148 break;
Petr Kraus44f1c482020-04-25 20:09:25 +0200149 }
Petr Kraus44f1c482020-04-25 20:09:25 +0200150 }
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600151 } else {
152 VkFormatProperties format_properties;
153 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
154 format_features =
155 (tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures : format_properties.optimalTilingFeatures;
Petr Kraus44f1c482020-04-25 20:09:25 +0200156 }
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600157 return format_features;
Petr Kraus44f1c482020-04-25 20:09:25 +0200158}
159
locke-lunargd556cc32019-09-17 01:21:23 -0600160void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
161 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
162 if (VK_SUCCESS != result) return;
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600163 VkFormatFeatureFlags format_features = 0;
locke-lunargd556cc32019-09-17 01:21:23 -0600164 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600165 format_features = GetExternalFormatFeaturesANDROID(pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600166 }
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600167 if (format_features == 0) {
168 format_features = GetImageFormatFeatures(physical_device, device, *pImage, pCreateInfo->format, pCreateInfo->tiling);
locke-lunargd556cc32019-09-17 01:21:23 -0600169 }
170
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600171 auto is_node = std::make_shared<IMAGE_STATE>(device, *pImage, pCreateInfo, format_features);
locke-lunargd556cc32019-09-17 01:21:23 -0600172 // Record the memory requirements in case they won't be queried
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700173 // External AHB memory can't be queried until after memory is bound
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600174 if (is_node->IsExternalAHB() == false) {
sfricke-samsung71bc6572020-04-29 15:49:43 -0700175 if (is_node->disjoint == false) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600176 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements[0]);
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700177 } else {
178 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
179 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
180 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
181 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
182 mem_req_info2.pNext = &image_plane_req;
183 mem_req_info2.image = *pImage;
184
185 assert(plane_count != 0); // assumes each format has at least first plane
186 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
187 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600188 is_node->requirements[0] = mem_reqs2.memoryRequirements;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700189
190 if (plane_count >= 2) {
191 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
192 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600193 is_node->requirements[1] = mem_reqs2.memoryRequirements;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700194 }
195 if (plane_count >= 3) {
196 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
197 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600198 is_node->requirements[2] = mem_reqs2.memoryRequirements;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700199 }
200 }
locke-lunargd556cc32019-09-17 01:21:23 -0600201 }
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700202
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600203 imageMap[*pImage] = std::move(is_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600204}
205
206void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
207 if (!image) return;
208 IMAGE_STATE *image_state = GetImageState(image);
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600209 if (!image_state) return;
210
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600211 image_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -0600212 imageMap.erase(image);
213}
214
215void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
216 VkImageLayout imageLayout, const VkClearColorValue *pColor,
217 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600218
219 if (disabled[command_buffer_state]) return;
220
locke-lunargd556cc32019-09-17 01:21:23 -0600221 auto cb_node = GetCBState(commandBuffer);
222 auto image_state = GetImageState(image);
223 if (cb_node && image_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600224 cb_node->AddChild(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600225 }
226}
227
228void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
229 VkImageLayout imageLayout,
230 const VkClearDepthStencilValue *pDepthStencil,
231 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600232 if (disabled[command_buffer_state]) return;
233
locke-lunargd556cc32019-09-17 01:21:23 -0600234 auto cb_node = GetCBState(commandBuffer);
235 auto image_state = GetImageState(image);
236 if (cb_node && image_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600237 cb_node->AddChild(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600238 }
239}
240
241void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
242 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
243 uint32_t regionCount, const VkImageCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600244 if (disabled[command_buffer_state]) return;
245
locke-lunargd556cc32019-09-17 01:21:23 -0600246 auto cb_node = GetCBState(commandBuffer);
247 auto src_image_state = GetImageState(srcImage);
248 auto dst_image_state = GetImageState(dstImage);
249
250 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600251 cb_node->AddChild(src_image_state);
252 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600253}
254
Jeff Leger178b1e52020-10-05 12:22:23 -0400255void ValidationStateTracker::PreCallRecordCmdCopyImage2KHR(VkCommandBuffer commandBuffer,
256 const VkCopyImageInfo2KHR *pCopyImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600257 if (disabled[command_buffer_state]) return;
258
Jeff Leger178b1e52020-10-05 12:22:23 -0400259 auto cb_node = GetCBState(commandBuffer);
260 auto src_image_state = GetImageState(pCopyImageInfo->srcImage);
261 auto dst_image_state = GetImageState(pCopyImageInfo->dstImage);
262
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600263 cb_node->AddChild(src_image_state);
264 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400265}
266
locke-lunargd556cc32019-09-17 01:21:23 -0600267void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
268 VkImageLayout srcImageLayout, VkImage dstImage,
269 VkImageLayout dstImageLayout, uint32_t regionCount,
270 const VkImageResolve *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600271 if (disabled[command_buffer_state]) return;
272
locke-lunargd556cc32019-09-17 01:21:23 -0600273 auto cb_node = GetCBState(commandBuffer);
274 auto src_image_state = GetImageState(srcImage);
275 auto dst_image_state = GetImageState(dstImage);
276
277 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600278 cb_node->AddChild(src_image_state);
279 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600280}
281
Jeff Leger178b1e52020-10-05 12:22:23 -0400282void ValidationStateTracker::PreCallRecordCmdResolveImage2KHR(VkCommandBuffer commandBuffer,
283 const VkResolveImageInfo2KHR *pResolveImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600284 if (disabled[command_buffer_state]) return;
285
Jeff Leger178b1e52020-10-05 12:22:23 -0400286 auto cb_node = GetCBState(commandBuffer);
287 auto src_image_state = GetImageState(pResolveImageInfo->srcImage);
288 auto dst_image_state = GetImageState(pResolveImageInfo->dstImage);
289
290 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600291 cb_node->AddChild(src_image_state);
292 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400293}
294
locke-lunargd556cc32019-09-17 01:21:23 -0600295void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
296 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
297 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600298 if (disabled[command_buffer_state]) return;
299
locke-lunargd556cc32019-09-17 01:21:23 -0600300 auto cb_node = GetCBState(commandBuffer);
301 auto src_image_state = GetImageState(srcImage);
302 auto dst_image_state = GetImageState(dstImage);
303
304 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600305 cb_node->AddChild(src_image_state);
306 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600307}
308
Jeff Leger178b1e52020-10-05 12:22:23 -0400309void ValidationStateTracker::PreCallRecordCmdBlitImage2KHR(VkCommandBuffer commandBuffer,
310 const VkBlitImageInfo2KHR *pBlitImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600311 if (disabled[command_buffer_state]) return;
312
Jeff Leger178b1e52020-10-05 12:22:23 -0400313 auto cb_node = GetCBState(commandBuffer);
314 auto src_image_state = GetImageState(pBlitImageInfo->srcImage);
315 auto dst_image_state = GetImageState(pBlitImageInfo->dstImage);
316
317 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600318 cb_node->AddChild(src_image_state);
319 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400320}
321
locke-lunargd556cc32019-09-17 01:21:23 -0600322void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
323 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
324 VkResult result) {
325 if (result != VK_SUCCESS) return;
Jeremy Gebbenf2912cd2021-07-07 07:57:39 -0600326
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500327 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600328
329 // Get a set of requirements in the case the app does not
sfricke-samsungad90e722020-07-08 20:54:24 -0700330 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
locke-lunargd556cc32019-09-17 01:21:23 -0600331
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700332 bufferMap.emplace(*pBuffer, std::move(buffer_state));
locke-lunargd556cc32019-09-17 01:21:23 -0600333}
334
335void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
336 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
337 VkResult result) {
338 if (result != VK_SUCCESS) return;
Jeremy Gebbenf2912cd2021-07-07 07:57:39 -0600339
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500340 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
locke-lunarg25b6c352020-08-06 17:44:18 -0600341
342 VkFormatProperties format_properties;
343 DispatchGetPhysicalDeviceFormatProperties(physical_device, pCreateInfo->format, &format_properties);
locke-lunarg25b6c352020-08-06 17:44:18 -0600344
Jeremy Gebbenf2912cd2021-07-07 07:57:39 -0600345 bufferViewMap[*pView] =
346 std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo, format_properties.bufferFeatures);
locke-lunargd556cc32019-09-17 01:21:23 -0600347}
348
349void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
350 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
351 VkResult result) {
352 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500353 auto image_state = GetImageShared(pCreateInfo->image);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700354
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600355 VkFormatFeatureFlags format_features = 0;
356 if (image_state->HasAHBFormat() == true) {
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700357 // The ImageView uses same Image's format feature since they share same AHB
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600358 format_features = image_state->format_features;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700359 } else {
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600360 format_features = GetImageFormatFeatures(physical_device, device, image_state->image(), pCreateInfo->format,
361 image_state->createInfo.tiling);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700362 }
363
locke-lunarg9939d4b2020-10-26 20:11:08 -0600364 // filter_cubic_props is used in CmdDraw validation. But it takes a lot of performance if it does in CmdDraw.
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600365 auto filter_cubic_props = LvlInitStruct<VkFilterCubicImageViewImageFormatPropertiesEXT>();
locke-lunarg9939d4b2020-10-26 20:11:08 -0600366 if (IsExtEnabled(device_extensions.vk_ext_filter_cubic)) {
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700367 auto imageview_format_info = LvlInitStruct<VkPhysicalDeviceImageViewImageFormatInfoEXT>();
locke-lunarg9939d4b2020-10-26 20:11:08 -0600368 imageview_format_info.imageViewType = pCreateInfo->viewType;
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700369 auto image_format_info = LvlInitStruct<VkPhysicalDeviceImageFormatInfo2>(&imageview_format_info);
locke-lunarg9939d4b2020-10-26 20:11:08 -0600370 image_format_info.type = image_state->createInfo.imageType;
371 image_format_info.format = image_state->createInfo.format;
372 image_format_info.tiling = image_state->createInfo.tiling;
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600373 auto usage_create_info = LvlFindInChain<VkImageViewUsageCreateInfo>(pCreateInfo->pNext);
374 image_format_info.usage = usage_create_info ? usage_create_info->usage : image_state->createInfo.usage;
locke-lunarg9939d4b2020-10-26 20:11:08 -0600375 image_format_info.flags = image_state->createInfo.flags;
376
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600377 auto image_format_properties = LvlInitStruct<VkImageFormatProperties2>(&filter_cubic_props);
locke-lunarg9939d4b2020-10-26 20:11:08 -0600378
379 DispatchGetPhysicalDeviceImageFormatProperties2(physical_device, &image_format_info, &image_format_properties);
380 }
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600381
382 imageViewMap[*pView] =
383 std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo, format_features, filter_cubic_props);
locke-lunargd556cc32019-09-17 01:21:23 -0600384}
385
386void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
387 uint32_t regionCount, const VkBufferCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600388 if (disabled[command_buffer_state]) return;
389
locke-lunargd556cc32019-09-17 01:21:23 -0600390 auto cb_node = GetCBState(commandBuffer);
391 auto src_buffer_state = GetBufferState(srcBuffer);
392 auto dst_buffer_state = GetBufferState(dstBuffer);
393
394 // Update bindings between buffers and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600395 cb_node->AddChild(src_buffer_state);
396 cb_node->AddChild(dst_buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600397}
398
Jeff Leger178b1e52020-10-05 12:22:23 -0400399void ValidationStateTracker::PreCallRecordCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer,
400 const VkCopyBufferInfo2KHR *pCopyBufferInfos) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600401 if (disabled[command_buffer_state]) return;
402
Jeff Leger178b1e52020-10-05 12:22:23 -0400403 auto cb_node = GetCBState(commandBuffer);
404 auto src_buffer_state = GetBufferState(pCopyBufferInfos->srcBuffer);
405 auto dst_buffer_state = GetBufferState(pCopyBufferInfos->dstBuffer);
406
407 // Update bindings between buffers and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600408 cb_node->AddChild(src_buffer_state);
409 cb_node->AddChild(dst_buffer_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400410}
411
locke-lunargd556cc32019-09-17 01:21:23 -0600412void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
413 const VkAllocationCallbacks *pAllocator) {
414 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
415 if (!image_view_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -0600416
417 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600418 image_view_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -0600419 imageViewMap.erase(imageView);
420}
421
422void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
423 if (!buffer) return;
424 auto buffer_state = GetBufferState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -0600425
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600426 buffer_state->Destroy();
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600427 bufferMap.erase(buffer_state->buffer());
locke-lunargd556cc32019-09-17 01:21:23 -0600428}
429
430void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
431 const VkAllocationCallbacks *pAllocator) {
432 if (!bufferView) return;
433 auto buffer_view_state = GetBufferViewState(bufferView);
locke-lunargd556cc32019-09-17 01:21:23 -0600434
435 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600436 buffer_view_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -0600437 bufferViewMap.erase(bufferView);
438}
439
440void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
441 VkDeviceSize size, uint32_t data) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600442 if (disabled[command_buffer_state]) return;
443
locke-lunargd556cc32019-09-17 01:21:23 -0600444 auto cb_node = GetCBState(commandBuffer);
445 auto buffer_state = GetBufferState(dstBuffer);
446 // Update bindings between buffer and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600447 cb_node->AddChild(buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600448}
449
450void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
451 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
452 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600453 if (disabled[command_buffer_state]) return;
454
locke-lunargd556cc32019-09-17 01:21:23 -0600455 auto cb_node = GetCBState(commandBuffer);
456 auto src_image_state = GetImageState(srcImage);
457 auto dst_buffer_state = GetBufferState(dstBuffer);
458
459 // Update bindings between buffer/image and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600460 cb_node->AddChild(src_image_state);
461 cb_node->AddChild(dst_buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600462}
463
Jeff Leger178b1e52020-10-05 12:22:23 -0400464void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
465 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600466 if (disabled[command_buffer_state]) return;
467
Jeff Leger178b1e52020-10-05 12:22:23 -0400468 auto cb_node = GetCBState(commandBuffer);
469 auto src_image_state = GetImageState(pCopyImageToBufferInfo->srcImage);
470 auto dst_buffer_state = GetBufferState(pCopyImageToBufferInfo->dstBuffer);
471
472 // Update bindings between buffer/image and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600473 cb_node->AddChild(src_image_state);
474 cb_node->AddChild(dst_buffer_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400475}
476
locke-lunargd556cc32019-09-17 01:21:23 -0600477void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
478 VkImageLayout dstImageLayout, uint32_t regionCount,
479 const VkBufferImageCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600480 if (disabled[command_buffer_state]) return;
481
locke-lunargd556cc32019-09-17 01:21:23 -0600482 auto cb_node = GetCBState(commandBuffer);
483 auto src_buffer_state = GetBufferState(srcBuffer);
484 auto dst_image_state = GetImageState(dstImage);
485
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600486 cb_node->AddChild(src_buffer_state);
487 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600488}
489
Jeff Leger178b1e52020-10-05 12:22:23 -0400490void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
491 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600492
493 if (disabled[command_buffer_state]) return;
494
Jeff Leger178b1e52020-10-05 12:22:23 -0400495 auto cb_node = GetCBState(commandBuffer);
496 auto src_buffer_state = GetBufferState(pCopyBufferToImageInfo->srcBuffer);
497 auto dst_image_state = GetImageState(pCopyBufferToImageInfo->dstImage);
498
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600499 cb_node->AddChild(src_buffer_state);
500 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400501}
502
Jeremy Gebben159b3cc2021-06-03 09:09:03 -0600503QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
504 auto it = queueMap.find(queue);
505 if (it == queueMap.end()) {
506 return nullptr;
507 }
508 return &it->second;
locke-lunargd556cc32019-09-17 01:21:23 -0600509}
510
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600511const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
512 auto it = queueMap.find(queue);
513 if (it == queueMap.cend()) {
514 return nullptr;
515 }
516 return &it->second;
locke-lunargd556cc32019-09-17 01:21:23 -0600517}
518
locke-lunargd556cc32019-09-17 01:21:23 -0600519const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
520 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
521 auto it = phys_dev_map->find(phys);
522 if (it == phys_dev_map->end()) {
523 return nullptr;
524 }
525 return &it->second;
526}
527
528PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
529 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
530 auto it = phys_dev_map->find(phys);
531 if (it == phys_dev_map->end()) {
532 return nullptr;
533 }
534 return &it->second;
535}
536
537PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
538const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
539
540// Return ptr to memory binding for given handle of specified type
541template <typename State, typename Result>
542static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
543 switch (typed_handle.type) {
544 case kVulkanObjectTypeImage:
545 return state->GetImageState(typed_handle.Cast<VkImage>());
546 case kVulkanObjectTypeBuffer:
547 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
548 case kVulkanObjectTypeAccelerationStructureNV:
sourav parmarcd5fb182020-07-17 12:58:44 -0700549 return state->GetAccelerationStructureStateNV(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -0600550 default:
551 break;
552 }
553 return nullptr;
554}
555
556const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
557 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
558}
559
560BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
561 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
562}
563
locke-lunarg540b2252020-08-03 13:23:36 -0600564void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, const VkPipelineBindPoint bind_point,
565 const char *function) {
locke-lunargb8d7a7a2020-10-25 16:01:52 -0600566 const auto lv_bind_point = ConvertToLvlBindPoint(bind_point);
567 auto &state = cb_state->lastBound[lv_bind_point];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700568 PIPELINE_STATE *pipe = state.pipeline_state;
locke-lunargd556cc32019-09-17 01:21:23 -0600569 if (VK_NULL_HANDLE != state.pipeline_layout) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700570 for (const auto &set_binding_pair : pipe->active_slots) {
571 uint32_t set_index = set_binding_pair.first;
locke-lunargd556cc32019-09-17 01:21:23 -0600572 // Pull the set node
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700573 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[set_index].bound_descriptor_set;
locke-lunargd556cc32019-09-17 01:21:23 -0600574
Tony-LunarG77822802020-05-28 16:35:46 -0600575 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
locke-lunargd556cc32019-09-17 01:21:23 -0600576
Tony-LunarG77822802020-05-28 16:35:46 -0600577 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
578 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
579 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700580 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pipe);
Tony-LunarG77822802020-05-28 16:35:46 -0600581
582 if (reduced_map.IsManyDescriptors()) {
583 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700584 descriptor_set->UpdateValidationCache(*cb_state, *pipe, binding_req_map);
Tony-LunarG77822802020-05-28 16:35:46 -0600585 }
586
587 // We can skip updating the state if "nothing" has changed since the last validation.
588 // See CoreChecks::ValidateCmdBufDrawState for more details.
589 bool descriptor_set_changed =
590 !reduced_map.IsManyDescriptors() ||
591 // Update if descriptor set (or contents) has changed
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700592 state.per_set[set_index].validated_set != descriptor_set ||
593 state.per_set[set_index].validated_set_change_count != descriptor_set->GetChangeCount() ||
Tony-LunarG77822802020-05-28 16:35:46 -0600594 (!disabled[image_layout_validation] &&
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700595 state.per_set[set_index].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
Tony-LunarG77822802020-05-28 16:35:46 -0600596 bool need_update = descriptor_set_changed ||
597 // Update if previous bindingReqMap doesn't include new bindingReqMap
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700598 !std::includes(state.per_set[set_index].validated_set_binding_req_map.begin(),
599 state.per_set[set_index].validated_set_binding_req_map.end(), binding_req_map.begin(),
Tony-LunarG77822802020-05-28 16:35:46 -0600600 binding_req_map.end());
601
602 if (need_update) {
603 // Bind this set and its active descriptor resources to the command buffer
604 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
605 // Only record the bindings that haven't already been recorded
606 BindingReqMap delta_reqs;
607 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700608 state.per_set[set_index].validated_set_binding_req_map.begin(),
609 state.per_set[set_index].validated_set_binding_req_map.end(),
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700610 layer_data::insert_iterator<BindingReqMap>(delta_reqs, delta_reqs.begin()));
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700611 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pipe, delta_reqs, function);
Tony-LunarG77822802020-05-28 16:35:46 -0600612 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700613 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pipe, binding_req_map, function);
locke-lunargd556cc32019-09-17 01:21:23 -0600614 }
615
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700616 state.per_set[set_index].validated_set = descriptor_set;
617 state.per_set[set_index].validated_set_change_count = descriptor_set->GetChangeCount();
618 state.per_set[set_index].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
Tony-LunarG77822802020-05-28 16:35:46 -0600619 if (reduced_map.IsManyDescriptors()) {
620 // Check whether old == new before assigning, the equality check is much cheaper than
621 // freeing and reallocating the map.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700622 if (state.per_set[set_index].validated_set_binding_req_map != set_binding_pair.second) {
623 state.per_set[set_index].validated_set_binding_req_map = set_binding_pair.second;
Jeff Bolz56308942019-10-06 22:05:23 -0500624 }
Tony-LunarG77822802020-05-28 16:35:46 -0600625 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700626 state.per_set[set_index].validated_set_binding_req_map = BindingReqMap();
locke-lunargd556cc32019-09-17 01:21:23 -0600627 }
628 }
629 }
630 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700631 if (!pipe->vertex_binding_descriptions_.empty()) {
locke-lunargd556cc32019-09-17 01:21:23 -0600632 cb_state->vertex_buffer_used = true;
633 }
634}
635
636// Remove set from setMap and delete the set
637void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500638 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600639 descriptor_set->Destroy();
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500640
locke-lunargd556cc32019-09-17 01:21:23 -0600641 setMap.erase(descriptor_set->GetSet());
642}
643
644// Free all DS Pools including their Sets & related sub-structs
645// NOTE : Calls to this function should be wrapped in mutex
646void ValidationStateTracker::DeleteDescriptorSetPools() {
647 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
648 // Remove this pools' sets from setMap and delete them
John Zulauf79f06582021-02-27 18:38:39 -0700649 for (auto *ds : ii->second->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -0600650 FreeDescriptorSet(ds);
651 }
652 ii->second->sets.clear();
653 ii = descriptorPoolMap.erase(ii);
654 }
655}
656
657// For given object struct return a ptr of BASE_NODE type for its wrapping struct
658BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500659 if (object_struct.node) {
660#ifdef _DEBUG
661 // assert that lookup would find the same object
662 VulkanTypedHandle other = object_struct;
663 other.node = nullptr;
664 assert(object_struct.node == GetStateStructPtrFromObject(other));
665#endif
666 return object_struct.node;
667 }
locke-lunargd556cc32019-09-17 01:21:23 -0600668 BASE_NODE *base_ptr = nullptr;
669 switch (object_struct.type) {
670 case kVulkanObjectTypeDescriptorSet: {
671 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
672 break;
673 }
674 case kVulkanObjectTypeSampler: {
675 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
676 break;
677 }
678 case kVulkanObjectTypeQueryPool: {
679 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
680 break;
681 }
682 case kVulkanObjectTypePipeline: {
683 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
684 break;
685 }
686 case kVulkanObjectTypeBuffer: {
687 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
688 break;
689 }
690 case kVulkanObjectTypeBufferView: {
691 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
692 break;
693 }
694 case kVulkanObjectTypeImage: {
695 base_ptr = GetImageState(object_struct.Cast<VkImage>());
696 break;
697 }
698 case kVulkanObjectTypeImageView: {
699 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
700 break;
701 }
702 case kVulkanObjectTypeEvent: {
703 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
704 break;
705 }
706 case kVulkanObjectTypeDescriptorPool: {
707 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
708 break;
709 }
710 case kVulkanObjectTypeCommandPool: {
711 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
712 break;
713 }
714 case kVulkanObjectTypeFramebuffer: {
715 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
716 break;
717 }
718 case kVulkanObjectTypeRenderPass: {
719 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
720 break;
721 }
722 case kVulkanObjectTypeDeviceMemory: {
723 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
724 break;
725 }
726 case kVulkanObjectTypeAccelerationStructureNV: {
sourav parmarcd5fb182020-07-17 12:58:44 -0700727 base_ptr = GetAccelerationStructureStateNV(object_struct.Cast<VkAccelerationStructureNV>());
728 break;
729 }
730 case kVulkanObjectTypeAccelerationStructureKHR: {
731 base_ptr = GetAccelerationStructureStateKHR(object_struct.Cast<VkAccelerationStructureKHR>());
locke-lunargd556cc32019-09-17 01:21:23 -0600732 break;
733 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500734 case kVulkanObjectTypeUnknown:
735 // This can happen if an element of the object_bindings vector has been
736 // zeroed out, after an object is destroyed.
737 break;
locke-lunargd556cc32019-09-17 01:21:23 -0600738 default:
739 // TODO : Any other objects to be handled here?
740 assert(0);
741 break;
742 }
743 return base_ptr;
744}
745
sfricke-samsungbf1a2ed2020-06-14 23:31:00 -0700746// Gets union of all features defined by Potential Format Features
747// except, does not handle the external format case for AHB as that only can be used for sampled images
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700748VkFormatFeatureFlags ValidationStateTracker::GetPotentialFormatFeatures(VkFormat format) const {
749 VkFormatFeatureFlags format_features = 0;
750
751 if (format != VK_FORMAT_UNDEFINED) {
752 VkFormatProperties format_properties;
753 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
754 format_features |= format_properties.linearTilingFeatures;
755 format_features |= format_properties.optimalTilingFeatures;
756 if (device_extensions.vk_ext_image_drm_format_modifier) {
757 // VK_KHR_get_physical_device_properties2 is required in this case
758 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2};
759 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
760 nullptr};
761 format_properties_2.pNext = (void *)&drm_properties_list;
Marc Alcala Prieto773871c2021-02-04 19:24:43 +0100762
763 // First call is to get the number of modifiers compatible with the queried format
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700764 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
Marc Alcala Prieto773871c2021-02-04 19:24:43 +0100765
766 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
767 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
768 drm_properties_list.pDrmFormatModifierProperties = drm_properties.data();
769
770 // Second call, now with an allocated array in pDrmFormatModifierProperties, is to get the modifiers
771 // compatible with the queried format
772 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
773
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700774 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
775 format_features |= drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
776 }
777 }
778 }
779
780 return format_features;
781}
782
locke-lunargd556cc32019-09-17 01:21:23 -0600783// Reset the command buffer state
784// Maintain the createInfo and set state to CB_NEW, but clear all other state
785void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700786 CMD_BUFFER_STATE *cb_state = GetCBState(cb);
787 if (cb_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600788 cb_state->Reset();
locke-lunargd556cc32019-09-17 01:21:23 -0600789 // Clean up the label data
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600790 ResetCmdDebugUtilsLabel(report_data, cb_state->commandBuffer());
locke-lunargd556cc32019-09-17 01:21:23 -0600791 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600792
locke-lunargd556cc32019-09-17 01:21:23 -0600793 if (command_buffer_reset_callback) {
794 (*command_buffer_reset_callback)(cb);
795 }
796}
797
798void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
799 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
800 VkResult result) {
801 if (VK_SUCCESS != result) return;
802
Locke Linf3873542021-04-26 11:25:10 -0600803 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
804 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
805 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
806
locke-lunargd556cc32019-09-17 01:21:23 -0600807 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
808 if (nullptr == enabled_features_found) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700809 const auto *features2 = LvlFindInChain<VkPhysicalDeviceFeatures2>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600810 if (features2) {
811 enabled_features_found = &(features2->features);
Locke Linf3873542021-04-26 11:25:10 -0600812
813 const auto *provoking_vertex_features = lvl_find_in_chain<VkPhysicalDeviceProvokingVertexFeaturesEXT>(features2->pNext);
814 if (provoking_vertex_features) {
815 state_tracker->enabled_features.provoking_vertex_features = *provoking_vertex_features;
816 }
locke-lunargd556cc32019-09-17 01:21:23 -0600817 }
818 }
819
locke-lunargd556cc32019-09-17 01:21:23 -0600820 if (nullptr == enabled_features_found) {
821 state_tracker->enabled_features.core = {};
822 } else {
823 state_tracker->enabled_features.core = *enabled_features_found;
824 }
825
826 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
827 // previously set them through an explicit API call.
828 uint32_t count;
829 auto pd_state = GetPhysicalDeviceState(gpu);
830 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
831 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
832 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
833 // Save local link to this device's physical device state
834 state_tracker->physical_device_state = pd_state;
835
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700836 const auto *vulkan_12_features = LvlFindInChain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700837 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700838 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700839 } else {
sfricke-samsung27c70722020-05-02 08:42:39 -0700840 // Set Extension Feature Aliases to false as there is no struct to check
841 state_tracker->enabled_features.core12.drawIndirectCount = VK_FALSE;
842 state_tracker->enabled_features.core12.samplerMirrorClampToEdge = VK_FALSE;
843 state_tracker->enabled_features.core12.descriptorIndexing = VK_FALSE;
844 state_tracker->enabled_features.core12.samplerFilterMinmax = VK_FALSE;
845 state_tracker->enabled_features.core12.shaderOutputLayer = VK_FALSE;
846 state_tracker->enabled_features.core12.shaderOutputViewportIndex = VK_FALSE;
sfricke-samsunga4143ac2020-12-18 00:00:53 -0800847 state_tracker->enabled_features.core12.subgroupBroadcastDynamicId = VK_FALSE;
sfricke-samsung27c70722020-05-02 08:42:39 -0700848
849 // These structs are only allowed in pNext chain if there is no VkPhysicalDeviceVulkan12Features
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700850
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700851 const auto *eight_bit_storage_features = LvlFindInChain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700852 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700853 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
854 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
855 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
856 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700857 }
858
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700859 const auto *float16_int8_features = LvlFindInChain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700860 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700861 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
862 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700863 }
864
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700865 const auto *descriptor_indexing_features = LvlFindInChain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700866 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700867 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
868 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
869 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
870 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
871 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
872 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
873 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
874 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
875 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
876 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
877 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
878 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
879 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
880 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
881 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
882 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
883 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
884 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
885 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
886 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
887 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
888 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
889 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
890 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
891 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
892 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
893 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
894 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
895 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
896 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
897 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
898 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
899 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
900 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
901 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
902 descriptor_indexing_features->descriptorBindingPartiallyBound;
903 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
904 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
905 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700906 }
907
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700908 const auto *scalar_block_layout_features = LvlFindInChain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700909 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700910 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700911 }
912
913 const auto *imageless_framebuffer_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700914 LvlFindInChain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700915 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700916 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700917 }
918
919 const auto *uniform_buffer_standard_layout_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700920 LvlFindInChain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700921 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700922 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
923 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700924 }
925
926 const auto *subgroup_extended_types_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700927 LvlFindInChain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700928 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700929 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
930 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700931 }
932
933 const auto *separate_depth_stencil_layouts_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700934 LvlFindInChain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700935 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700936 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
937 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700938 }
939
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700940 const auto *host_query_reset_features = LvlFindInChain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700941 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700942 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700943 }
944
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700945 const auto *timeline_semaphore_features = LvlFindInChain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700946 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700947 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700948 }
949
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700950 const auto *buffer_device_address = LvlFindInChain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700951 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700952 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
953 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
954 buffer_device_address->bufferDeviceAddressCaptureReplay;
955 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
956 buffer_device_address->bufferDeviceAddressMultiDevice;
957 }
sfricke-samsunga4143ac2020-12-18 00:00:53 -0800958
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700959 const auto *atomic_int64_features = LvlFindInChain<VkPhysicalDeviceShaderAtomicInt64Features>(pCreateInfo->pNext);
sfricke-samsunga4143ac2020-12-18 00:00:53 -0800960 if (atomic_int64_features) {
961 state_tracker->enabled_features.core12.shaderBufferInt64Atomics = atomic_int64_features->shaderBufferInt64Atomics;
962 state_tracker->enabled_features.core12.shaderSharedInt64Atomics = atomic_int64_features->shaderSharedInt64Atomics;
963 }
964
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700965 const auto *memory_model_features = LvlFindInChain<VkPhysicalDeviceVulkanMemoryModelFeatures>(pCreateInfo->pNext);
sfricke-samsunga4143ac2020-12-18 00:00:53 -0800966 if (memory_model_features) {
967 state_tracker->enabled_features.core12.vulkanMemoryModel = memory_model_features->vulkanMemoryModel;
968 state_tracker->enabled_features.core12.vulkanMemoryModelDeviceScope =
969 memory_model_features->vulkanMemoryModelDeviceScope;
970 state_tracker->enabled_features.core12.vulkanMemoryModelAvailabilityVisibilityChains =
971 memory_model_features->vulkanMemoryModelAvailabilityVisibilityChains;
972 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700973 }
974
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700975 const auto *vulkan_11_features = LvlFindInChain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700976 if (vulkan_11_features) {
977 state_tracker->enabled_features.core11 = *vulkan_11_features;
978 } else {
979 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
980
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700981 const auto *sixteen_bit_storage_features = LvlFindInChain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700982 if (sixteen_bit_storage_features) {
983 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
984 sixteen_bit_storage_features->storageBuffer16BitAccess;
985 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
986 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
987 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
988 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
989 }
990
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700991 const auto *multiview_features = LvlFindInChain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700992 if (multiview_features) {
993 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
994 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
995 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
996 }
997
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700998 const auto *variable_pointers_features = LvlFindInChain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700999 if (variable_pointers_features) {
1000 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1001 variable_pointers_features->variablePointersStorageBuffer;
1002 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1003 }
1004
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001005 const auto *protected_memory_features = LvlFindInChain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001006 if (protected_memory_features) {
1007 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1008 }
1009
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001010 const auto *ycbcr_conversion_features = LvlFindInChain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001011 if (ycbcr_conversion_features) {
1012 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1013 }
1014
1015 const auto *shader_draw_parameters_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001016 LvlFindInChain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001017 if (shader_draw_parameters_features) {
1018 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001019 }
1020 }
1021
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001022 const auto *device_group_ci = LvlFindInChain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
Tony-LunarGca4891a2020-08-10 15:46:46 -06001023 if (device_group_ci) {
1024 state_tracker->physical_device_count = device_group_ci->physicalDeviceCount;
1025 state_tracker->device_group_create_info = *device_group_ci;
1026 } else {
1027 state_tracker->physical_device_count = 1;
1028 }
locke-lunargd556cc32019-09-17 01:21:23 -06001029
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001030 const auto *exclusive_scissor_features = LvlFindInChain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001031 if (exclusive_scissor_features) {
1032 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1033 }
1034
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001035 const auto *shading_rate_image_features = LvlFindInChain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001036 if (shading_rate_image_features) {
1037 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1038 }
1039
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001040 const auto *mesh_shader_features = LvlFindInChain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001041 if (mesh_shader_features) {
1042 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1043 }
1044
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001045 const auto *inline_uniform_block_features = LvlFindInChain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001046 if (inline_uniform_block_features) {
1047 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1048 }
1049
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001050 const auto *transform_feedback_features = LvlFindInChain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001051 if (transform_feedback_features) {
1052 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1053 }
1054
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001055 const auto *vtx_attrib_div_features = LvlFindInChain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001056 if (vtx_attrib_div_features) {
1057 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1058 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001059
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001060 const auto *buffer_device_address_ext = LvlFindInChain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001061 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001062 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001063 }
1064
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001065 const auto *cooperative_matrix_features = LvlFindInChain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001066 if (cooperative_matrix_features) {
1067 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1068 }
1069
locke-lunargd556cc32019-09-17 01:21:23 -06001070 const auto *compute_shader_derivatives_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001071 LvlFindInChain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001072 if (compute_shader_derivatives_features) {
1073 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1074 }
1075
1076 const auto *fragment_shader_barycentric_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001077 LvlFindInChain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001078 if (fragment_shader_barycentric_features) {
1079 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1080 }
1081
1082 const auto *shader_image_footprint_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001083 LvlFindInChain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001084 if (shader_image_footprint_features) {
1085 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1086 }
1087
1088 const auto *fragment_shader_interlock_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001089 LvlFindInChain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001090 if (fragment_shader_interlock_features) {
1091 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1092 }
1093
1094 const auto *demote_to_helper_invocation_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001095 LvlFindInChain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001096 if (demote_to_helper_invocation_features) {
1097 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1098 }
1099
1100 const auto *texel_buffer_alignment_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001101 LvlFindInChain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001102 if (texel_buffer_alignment_features) {
1103 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1104 }
1105
locke-lunargd556cc32019-09-17 01:21:23 -06001106 const auto *pipeline_exe_props_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001107 LvlFindInChain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001108 if (pipeline_exe_props_features) {
1109 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1110 }
1111
Jeff Bolz82f854d2019-09-17 14:56:47 -05001112 const auto *dedicated_allocation_image_aliasing_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001113 LvlFindInChain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
Jeff Bolz82f854d2019-09-17 14:56:47 -05001114 if (dedicated_allocation_image_aliasing_features) {
1115 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1116 *dedicated_allocation_image_aliasing_features;
1117 }
1118
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001119 const auto *performance_query_features = LvlFindInChain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001120 if (performance_query_features) {
1121 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1122 }
1123
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001124 const auto *device_coherent_memory_features = LvlFindInChain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
Tobias Hector782bcde2019-11-28 16:19:42 +00001125 if (device_coherent_memory_features) {
1126 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1127 }
1128
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001129 const auto *ycbcr_image_array_features = LvlFindInChain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsungcead0802020-01-30 22:20:10 -08001130 if (ycbcr_image_array_features) {
1131 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1132 }
1133
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001134 const auto *ray_query_features = LvlFindInChain<VkPhysicalDeviceRayQueryFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001135 if (ray_query_features) {
1136 state_tracker->enabled_features.ray_query_features = *ray_query_features;
1137 }
1138
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001139 const auto *ray_tracing_pipeline_features = LvlFindInChain<VkPhysicalDeviceRayTracingPipelineFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001140 if (ray_tracing_pipeline_features) {
1141 state_tracker->enabled_features.ray_tracing_pipeline_features = *ray_tracing_pipeline_features;
1142 }
1143
1144 const auto *ray_tracing_acceleration_structure_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001145 LvlFindInChain<VkPhysicalDeviceAccelerationStructureFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001146 if (ray_tracing_acceleration_structure_features) {
1147 state_tracker->enabled_features.ray_tracing_acceleration_structure_features = *ray_tracing_acceleration_structure_features;
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001148 }
1149
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001150 const auto *robustness2_features = LvlFindInChain<VkPhysicalDeviceRobustness2FeaturesEXT>(pCreateInfo->pNext);
Jeff Bolz165818a2020-05-08 11:19:03 -05001151 if (robustness2_features) {
1152 state_tracker->enabled_features.robustness2_features = *robustness2_features;
1153 }
1154
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001155 const auto *fragment_density_map_features = LvlFindInChain<VkPhysicalDeviceFragmentDensityMapFeaturesEXT>(pCreateInfo->pNext);
janharaldfredriksen-arm3b793772020-05-12 18:55:53 +02001156 if (fragment_density_map_features) {
1157 state_tracker->enabled_features.fragment_density_map_features = *fragment_density_map_features;
1158 }
1159
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001160 const auto *fragment_density_map_features2 = LvlFindInChain<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT>(pCreateInfo->pNext);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001161 if (fragment_density_map_features2) {
1162 state_tracker->enabled_features.fragment_density_map2_features = *fragment_density_map_features2;
1163 }
1164
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001165 const auto *astc_decode_features = LvlFindInChain<VkPhysicalDeviceASTCDecodeFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0c4a06f2020-06-27 01:24:32 -07001166 if (astc_decode_features) {
1167 state_tracker->enabled_features.astc_decode_features = *astc_decode_features;
1168 }
1169
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001170 const auto *custom_border_color_features = LvlFindInChain<VkPhysicalDeviceCustomBorderColorFeaturesEXT>(pCreateInfo->pNext);
Tony-LunarG7337b312020-04-15 16:40:25 -06001171 if (custom_border_color_features) {
1172 state_tracker->enabled_features.custom_border_color_features = *custom_border_color_features;
1173 }
1174
sfricke-samsungfd661d62020-05-16 00:57:27 -07001175 const auto *pipeline_creation_cache_control_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001176 LvlFindInChain<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsungfd661d62020-05-16 00:57:27 -07001177 if (pipeline_creation_cache_control_features) {
1178 state_tracker->enabled_features.pipeline_creation_cache_control_features = *pipeline_creation_cache_control_features;
1179 }
1180
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001181 const auto *fragment_shading_rate_features = LvlFindInChain<VkPhysicalDeviceFragmentShadingRateFeaturesKHR>(pCreateInfo->pNext);
Tobias Hector6663c9b2020-11-05 10:18:02 +00001182 if (fragment_shading_rate_features) {
1183 state_tracker->enabled_features.fragment_shading_rate_features = *fragment_shading_rate_features;
1184 }
1185
Piers Daniell39842ee2020-07-10 16:42:33 -06001186 const auto *extended_dynamic_state_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001187 LvlFindInChain<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT>(pCreateInfo->pNext);
Piers Daniell39842ee2020-07-10 16:42:33 -06001188 if (extended_dynamic_state_features) {
1189 state_tracker->enabled_features.extended_dynamic_state_features = *extended_dynamic_state_features;
1190 }
1191
Vikram Kushwahaa57b0c32021-04-19 12:21:46 -07001192 const auto *extended_dynamic_state2_features =
1193 LvlFindInChain<VkPhysicalDeviceExtendedDynamicState2FeaturesEXT>(pCreateInfo->pNext);
1194 if (extended_dynamic_state2_features) {
1195 state_tracker->enabled_features.extended_dynamic_state2_features = *extended_dynamic_state2_features;
1196 }
1197
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001198 const auto *multiview_features = LvlFindInChain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
locke-lunarg3fa463a2020-10-23 16:39:04 -06001199 if (multiview_features) {
1200 state_tracker->enabled_features.multiview_features = *multiview_features;
1201 }
1202
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001203 const auto *portability_features = LvlFindInChain<VkPhysicalDevicePortabilitySubsetFeaturesKHR>(pCreateInfo->pNext);
Nathaniel Cesariob3f2d702020-11-09 09:20:49 -07001204 if (portability_features) {
1205 state_tracker->enabled_features.portability_subset_features = *portability_features;
1206 }
1207
sfricke-samsung0065ce02020-12-03 22:46:37 -08001208 const auto *shader_integer_functions2_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001209 LvlFindInChain<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001210 if (shader_integer_functions2_features) {
1211 state_tracker->enabled_features.shader_integer_functions2_features = *shader_integer_functions2_features;
1212 }
1213
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001214 const auto *shader_sm_builtins_feature = LvlFindInChain<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001215 if (shader_sm_builtins_feature) {
1216 state_tracker->enabled_features.shader_sm_builtins_feature = *shader_sm_builtins_feature;
1217 }
1218
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001219 const auto *shader_atomic_float_feature = LvlFindInChain<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001220 if (shader_atomic_float_feature) {
1221 state_tracker->enabled_features.shader_atomic_float_feature = *shader_atomic_float_feature;
1222 }
1223
1224 const auto *shader_image_atomic_int64_feature =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001225 LvlFindInChain<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001226 if (shader_image_atomic_int64_feature) {
1227 state_tracker->enabled_features.shader_image_atomic_int64_feature = *shader_image_atomic_int64_feature;
1228 }
1229
sfricke-samsung486a51e2021-01-02 00:10:15 -08001230 const auto *shader_clock_feature = LvlFindInChain<VkPhysicalDeviceShaderClockFeaturesKHR>(pCreateInfo->pNext);
1231 if (shader_clock_feature) {
1232 state_tracker->enabled_features.shader_clock_feature = *shader_clock_feature;
1233 }
1234
Jeremy Gebben5f585ae2021-02-02 09:03:06 -07001235 const auto *conditional_rendering_features =
1236 LvlFindInChain<VkPhysicalDeviceConditionalRenderingFeaturesEXT>(pCreateInfo->pNext);
1237 if (conditional_rendering_features) {
1238 state_tracker->enabled_features.conditional_rendering = *conditional_rendering_features;
1239 }
1240
Shannon McPhersondb287d42021-02-02 15:27:32 -07001241 const auto *workgroup_memory_explicit_layout_features =
1242 LvlFindInChain<VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>(pCreateInfo->pNext);
1243 if (workgroup_memory_explicit_layout_features) {
1244 state_tracker->enabled_features.workgroup_memory_explicit_layout_features = *workgroup_memory_explicit_layout_features;
1245 }
1246
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001247 const auto *synchronization2_features =
1248 LvlFindInChain<VkPhysicalDeviceSynchronization2FeaturesKHR>(pCreateInfo->pNext);
1249 if (synchronization2_features) {
1250 state_tracker->enabled_features.synchronization2_features = *synchronization2_features;
1251 }
1252
Locke Linf3873542021-04-26 11:25:10 -06001253 const auto *provoking_vertex_features = lvl_find_in_chain<VkPhysicalDeviceProvokingVertexFeaturesEXT>(pCreateInfo->pNext);
1254 if (provoking_vertex_features) {
1255 state_tracker->enabled_features.provoking_vertex_features = *provoking_vertex_features;
1256 }
1257
Piers Daniellcb6d8032021-04-19 18:51:26 -06001258 const auto *vertex_input_dynamic_state_features =
1259 LvlFindInChain<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT>(pCreateInfo->pNext);
1260 if (vertex_input_dynamic_state_features) {
1261 state_tracker->enabled_features.vertex_input_dynamic_state_features = *vertex_input_dynamic_state_features;
1262 }
1263
David Zhao Akeley44139b12021-04-26 16:16:13 -07001264 const auto *inherited_viewport_scissor_features =
1265 LvlFindInChain<VkPhysicalDeviceInheritedViewportScissorFeaturesNV>(pCreateInfo->pNext);
1266 if (inherited_viewport_scissor_features) {
1267 state_tracker->enabled_features.inherited_viewport_scissor_features = *inherited_viewport_scissor_features;
1268 }
1269
Tony-LunarG4490de42021-06-21 15:49:19 -06001270 const auto *multi_draw_features = LvlFindInChain<VkPhysicalDeviceMultiDrawFeaturesEXT>(pCreateInfo->pNext);
1271 if (multi_draw_features) {
1272 state_tracker->enabled_features.multi_draw_features = *multi_draw_features;
1273 }
1274
locke-lunargd556cc32019-09-17 01:21:23 -06001275 // Store physical device properties and physical device mem limits into CoreChecks structs
1276 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1277 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001278 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1279 &state_tracker->phys_dev_props_core11);
1280 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1281 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001282
1283 const auto &dev_ext = state_tracker->device_extensions;
1284 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1285
1286 if (dev_ext.vk_khr_push_descriptor) {
1287 // Get the needed push_descriptor limits
1288 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1289 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1290 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1291 }
1292
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001293 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001294 VkPhysicalDeviceDescriptorIndexingProperties descriptor_indexing_prop;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001295 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1296 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1297 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1298 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1299 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1300 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1301 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1302 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1303 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1304 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1305 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1306 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1307 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1308 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1309 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1310 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1311 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1312 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1313 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1314 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1315 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1316 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1317 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1318 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1319 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1320 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1321 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
1322 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1323 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
1324 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
1325 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
1326 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
1327 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
1328 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
1329 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1330 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1331 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
1332 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
1333 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1334 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
1335 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
1336 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
1337 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
1338 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
1339 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
1340 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
1341 }
1342
locke-lunargd556cc32019-09-17 01:21:23 -06001343 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1344 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1345 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1346 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001347
1348 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001349 VkPhysicalDeviceDepthStencilResolveProperties depth_stencil_resolve_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001350 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
1351 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
1352 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
1353 depth_stencil_resolve_props.supportedStencilResolveModes;
1354 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
1355 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
1356 }
1357
locke-lunargd556cc32019-09-17 01:21:23 -06001358 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001359 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
sourav parmarcd5fb182020-07-17 12:58:44 -07001360 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing_pipeline, &phys_dev_props->ray_tracing_propsKHR);
1361 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_acceleration_structure, &phys_dev_props->acc_structure_props);
locke-lunargd556cc32019-09-17 01:21:23 -06001362 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1363 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001364 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map_2, &phys_dev_props->fragment_density_map2_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001365 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
sfricke-samsung8f658d42020-05-03 20:12:24 -07001366 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_sample_locations, &phys_dev_props->sample_locations_props);
Tony-LunarG7337b312020-04-15 16:40:25 -06001367 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_custom_border_color, &phys_dev_props->custom_border_color_props);
locke-lunarg3fa463a2020-10-23 16:39:04 -06001368 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_multiview, &phys_dev_props->multiview_props);
Nathaniel Cesario3291c912020-11-17 16:54:41 -07001369 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_portability_subset, &phys_dev_props->portability_props);
Locke Lin016d8482021-05-27 12:11:31 -06001370 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_provoking_vertex, &phys_dev_props->provoking_vertex_props);
Tony-LunarG4490de42021-06-21 15:49:19 -06001371 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_multi_draw, &phys_dev_props->multi_draw_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001372
1373 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001374 VkPhysicalDeviceTimelineSemaphoreProperties timeline_semaphore_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001375 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
1376 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
1377 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
1378 }
1379
1380 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001381 VkPhysicalDeviceFloatControlsProperties float_controls_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001382 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
1383 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
1384 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
1385 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
1386 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
1387 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
1388 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
1389 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
1390 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
1391 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
1392 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
1393 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
1394 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
1395 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
1396 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
1397 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
1398 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
1399 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
1400 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
1401 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
1402 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
1403 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07001404
locke-lunargd556cc32019-09-17 01:21:23 -06001405 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1406 // Get the needed cooperative_matrix properties
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07001407 auto cooperative_matrix_props = LvlInitStruct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1408 auto prop2 = LvlInitStruct<VkPhysicalDeviceProperties2>(&cooperative_matrix_props);
locke-lunargd556cc32019-09-17 01:21:23 -06001409 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1410 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1411
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001412 uint32_t num_cooperative_matrix_properties = 0;
1413 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &num_cooperative_matrix_properties, NULL);
1414 state_tracker->cooperative_matrix_properties.resize(num_cooperative_matrix_properties,
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07001415 LvlInitStruct<VkCooperativeMatrixPropertiesNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06001416
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001417 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &num_cooperative_matrix_properties,
locke-lunargd556cc32019-09-17 01:21:23 -06001418 state_tracker->cooperative_matrix_properties.data());
1419 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001420 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06001421 // Get the needed subgroup limits
Locke Lin016d8482021-05-27 12:11:31 -06001422 auto subgroup_prop = LvlInitStruct<VkPhysicalDeviceSubgroupProperties>();
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07001423 auto prop2 = LvlInitStruct<VkPhysicalDeviceProperties2>(&subgroup_prop);
locke-lunargd556cc32019-09-17 01:21:23 -06001424 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1425
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001426 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
1427 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
1428 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
1429 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06001430 }
1431
Tobias Hector6663c9b2020-11-05 10:18:02 +00001432 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_fragment_shading_rate, &phys_dev_props->fragment_shading_rate_props);
1433
locke-lunargd556cc32019-09-17 01:21:23 -06001434 // Store queue family data
1435 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1436 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001437 const VkDeviceQueueCreateInfo &queue_create_info = pCreateInfo->pQueueCreateInfos[i];
sfricke-samsungb585ec12021-05-06 03:10:13 -07001438 state_tracker->queue_family_index_set.insert(queue_create_info.queueFamilyIndex);
1439 state_tracker->device_queue_info_list.push_back(
1440 {i, queue_create_info.queueFamilyIndex, queue_create_info.flags, queue_create_info.queueCount});
locke-lunargd556cc32019-09-17 01:21:23 -06001441 }
1442 }
1443}
1444
1445void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1446 if (!device) return;
1447
locke-lunargd556cc32019-09-17 01:21:23 -06001448 // Reset all command buffers before destroying them, to unlink object_bindings.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001449 for (auto &command_buffer : commandBufferMap) {
1450 ResetCommandBufferState(command_buffer.first);
locke-lunargd556cc32019-09-17 01:21:23 -06001451 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001452 pipelineMap.clear();
1453 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001454 commandBufferMap.clear();
1455
1456 // This will also delete all sets in the pool & remove them from setMap
1457 DeleteDescriptorSetPools();
1458 // All sets should be removed
1459 assert(setMap.empty());
1460 descriptorSetLayoutMap.clear();
1461 imageViewMap.clear();
1462 imageMap.clear();
1463 bufferViewMap.clear();
1464 bufferMap.clear();
1465 // Queues persist until device is destroyed
1466 queueMap.clear();
1467}
1468
locke-lunargd556cc32019-09-17 01:21:23 -06001469// Track which resources are in-flight by atomically incrementing their "in_use" count
1470void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1471 cb_node->submitCount++;
locke-lunargd556cc32019-09-17 01:21:23 -06001472
locke-lunargd556cc32019-09-17 01:21:23 -06001473 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1474 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1475 // should then be flagged prior to calling this function
1476 for (auto event : cb_node->writeEventsBeforeWait) {
1477 auto event_state = GetEventState(event);
1478 if (event_state) event_state->write_in_use++;
1479 }
1480}
1481
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001482void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
Jeremy Gebbencbf22862021-03-03 12:01:22 -07001483 layer_data::unordered_map<VkQueue, uint64_t> other_queue_seqs;
1484 layer_data::unordered_map<VkSemaphore, uint64_t> timeline_semaphore_counters;
locke-lunargd556cc32019-09-17 01:21:23 -06001485
1486 // Roll this queue forward, one submission at a time.
1487 while (pQueue->seq < seq) {
1488 auto &submission = pQueue->submissions.front();
1489
1490 for (auto &wait : submission.waitSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001491 auto semaphore_state = GetSemaphoreState(wait.semaphore);
1492 if (semaphore_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001493 semaphore_state->EndUse();
locke-lunargd556cc32019-09-17 01:21:23 -06001494 }
Mike Schuchardt2df08912020-12-15 16:28:09 -08001495 if (wait.type == VK_SEMAPHORE_TYPE_TIMELINE) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001496 auto &last_counter = timeline_semaphore_counters[wait.semaphore];
1497 last_counter = std::max(last_counter, wait.payload);
Marshall Drew-Brook03847582020-11-06 15:10:45 -08001498 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001499 auto &last_seq = other_queue_seqs[wait.queue];
1500 last_seq = std::max(last_seq, wait.seq);
Marshall Drew-Brook03847582020-11-06 15:10:45 -08001501 }
locke-lunargd556cc32019-09-17 01:21:23 -06001502 }
1503
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001504 for (auto &signal : submission.signalSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001505 auto semaphore_state = GetSemaphoreState(signal.semaphore);
1506 if (semaphore_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001507 semaphore_state->EndUse();
Mike Schuchardt2df08912020-12-15 16:28:09 -08001508 if (semaphore_state->type == VK_SEMAPHORE_TYPE_TIMELINE && semaphore_state->payload < signal.payload) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001509 semaphore_state->payload = signal.payload;
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001510 }
locke-lunargd556cc32019-09-17 01:21:23 -06001511 }
1512 }
1513
1514 for (auto &semaphore : submission.externalSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001515 auto semaphore_state = GetSemaphoreState(semaphore);
1516 if (semaphore_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001517 semaphore_state->EndUse();
locke-lunargd556cc32019-09-17 01:21:23 -06001518 }
1519 }
1520
1521 for (auto cb : submission.cbs) {
1522 auto cb_node = GetCBState(cb);
1523 if (!cb_node) {
1524 continue;
1525 }
1526 // First perform decrement on general case bound objects
locke-lunargd556cc32019-09-17 01:21:23 -06001527 for (auto event : cb_node->writeEventsBeforeWait) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001528 auto event_node = eventMap.find(event);
1529 if (event_node != eventMap.end()) {
John Zulauf48057322020-12-02 11:59:31 -07001530 event_node->second->write_in_use--;
locke-lunargd556cc32019-09-17 01:21:23 -06001531 }
1532 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001533 QueryMap local_query_to_state_map;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001534 VkQueryPool first_pool = VK_NULL_HANDLE;
Jeff Bolz310775c2019-10-09 00:46:33 -05001535 for (auto &function : cb_node->queryUpdates) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001536 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &local_query_to_state_map);
Jeff Bolz310775c2019-10-09 00:46:33 -05001537 }
1538
John Zulauf79f06582021-02-27 18:38:39 -07001539 for (const auto &query_state_pair : local_query_to_state_map) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001540 if (query_state_pair.second == QUERYSTATE_ENDED) {
1541 queryToStateMap[query_state_pair.first] = QUERYSTATE_AVAILABLE;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001542 }
locke-lunargd556cc32019-09-17 01:21:23 -06001543 }
Jeremy Gebben5570abe2021-05-16 18:35:13 -06001544 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
1545 cb_node->EndUse();
1546 }
locke-lunargd556cc32019-09-17 01:21:23 -06001547 }
1548
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001549 auto fence_state = GetFenceState(submission.fence);
1550 if (fence_state && fence_state->scope == kSyncScopeInternal) {
1551 fence_state->state = FENCE_RETIRED;
locke-lunargd556cc32019-09-17 01:21:23 -06001552 }
1553
1554 pQueue->submissions.pop_front();
1555 pQueue->seq++;
1556 }
1557
1558 // Roll other queues forward to the highest seq we saw a wait for
John Zulauf79f06582021-02-27 18:38:39 -07001559 for (const auto &qs : other_queue_seqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001560 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001561 }
John Zulauf79f06582021-02-27 18:38:39 -07001562 for (const auto &sc : timeline_semaphore_counters) {
Marshall Drew-Brook03847582020-11-06 15:10:45 -08001563 RetireTimelineSemaphore(sc.first, sc.second);
1564 }
locke-lunargd556cc32019-09-17 01:21:23 -06001565}
1566
1567// Submit a fence to a queue, delimiting previous fences and previous untracked
1568// work by it.
1569static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1570 pFence->state = FENCE_INFLIGHT;
1571 pFence->signaler.first = pQueue->queue;
1572 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1573}
1574
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001575uint64_t ValidationStateTracker::RecordSubmitFence(QUEUE_STATE *queue_state, VkFence fence, uint32_t submit_count) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001576 auto fence_state = GetFenceState(fence);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001577 uint64_t early_retire_seq = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001578 if (fence_state) {
1579 if (fence_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06001580 // Mark fence in use
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001581 SubmitFence(queue_state, fence_state, std::max(1u, submit_count));
1582 if (!submit_count) {
locke-lunargd556cc32019-09-17 01:21:23 -06001583 // If no submissions, but just dropping a fence on the end of the queue,
1584 // record an empty submission with just the fence, so we can determine
1585 // its completion.
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001586 CB_SUBMISSION submission;
1587 submission.fence = fence;
1588 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06001589 }
1590 } else {
1591 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001592 early_retire_seq = queue_state->seq + queue_state->submissions.size();
locke-lunargd556cc32019-09-17 01:21:23 -06001593 }
1594 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001595 return early_retire_seq;
1596}
1597
1598void ValidationStateTracker::RecordSubmitCommandBuffer(CB_SUBMISSION &submission, VkCommandBuffer command_buffer) {
1599 auto cb_node = GetCBState(command_buffer);
1600 if (cb_node) {
1601 submission.cbs.push_back(command_buffer);
John Zulauf79f06582021-02-27 18:38:39 -07001602 for (auto *secondary_cmd_buffer : cb_node->linkedCommandBuffers) {
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06001603 submission.cbs.push_back(secondary_cmd_buffer->commandBuffer());
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001604 IncrementResources(secondary_cmd_buffer);
1605 }
1606 IncrementResources(cb_node);
Jeremy Gebben5570abe2021-05-16 18:35:13 -06001607 // increment use count for all bound objects including secondary cbs
1608 cb_node->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001609
1610 VkQueryPool first_pool = VK_NULL_HANDLE;
1611 EventToStageMap local_event_to_stage_map;
1612 QueryMap local_query_to_state_map;
1613 for (auto &function : cb_node->queryUpdates) {
1614 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &local_query_to_state_map);
1615 }
1616
John Zulauf79f06582021-02-27 18:38:39 -07001617 for (const auto &query_state_pair : local_query_to_state_map) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001618 queryToStateMap[query_state_pair.first] = query_state_pair.second;
1619 }
1620
John Zulauf79f06582021-02-27 18:38:39 -07001621 for (const auto &function : cb_node->eventUpdates) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001622 function(nullptr, /*do_validate*/ false, &local_event_to_stage_map);
1623 }
1624
John Zulauf79f06582021-02-27 18:38:39 -07001625 for (const auto &eventStagePair : local_event_to_stage_map) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001626 eventMap[eventStagePair.first]->stageMask = eventStagePair.second;
1627 }
1628 }
1629}
1630
1631void ValidationStateTracker::RecordSubmitWaitSemaphore(CB_SUBMISSION &submission, VkQueue queue, VkSemaphore semaphore,
1632 uint64_t value, uint64_t next_seq) {
1633 auto semaphore_state = GetSemaphoreState(semaphore);
1634 if (semaphore_state) {
1635 if (semaphore_state->scope == kSyncScopeInternal) {
1636 SEMAPHORE_WAIT wait;
1637 wait.semaphore = semaphore;
1638 wait.type = semaphore_state->type;
1639 if (semaphore_state->type == VK_SEMAPHORE_TYPE_BINARY) {
1640 if (semaphore_state->signaler.first != VK_NULL_HANDLE) {
1641 wait.queue = semaphore_state->signaler.first;
1642 wait.seq = semaphore_state->signaler.second;
1643 submission.waitSemaphores.emplace_back(std::move(wait));
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001644 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001645 }
1646 semaphore_state->signaler.first = VK_NULL_HANDLE;
1647 semaphore_state->signaled = false;
1648 } else if (semaphore_state->payload < value) {
1649 wait.queue = queue;
1650 wait.seq = next_seq;
1651 wait.payload = value;
1652 submission.waitSemaphores.emplace_back(std::move(wait));
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001653 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001654 }
1655 } else {
1656 submission.externalSemaphores.push_back(semaphore);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001657 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001658 if (semaphore_state->scope == kSyncScopeExternalTemporary) {
1659 semaphore_state->scope = kSyncScopeInternal;
1660 }
1661 }
1662 }
1663}
1664
1665bool ValidationStateTracker::RecordSubmitSignalSemaphore(CB_SUBMISSION &submission, VkQueue queue, VkSemaphore semaphore,
1666 uint64_t value, uint64_t next_seq) {
1667 bool retire_early = false;
1668 auto semaphore_state = GetSemaphoreState(semaphore);
1669 if (semaphore_state) {
1670 if (semaphore_state->scope == kSyncScopeInternal) {
1671 SEMAPHORE_SIGNAL signal;
1672 signal.semaphore = semaphore;
1673 signal.seq = next_seq;
1674 if (semaphore_state->type == VK_SEMAPHORE_TYPE_BINARY) {
1675 semaphore_state->signaler.first = queue;
1676 semaphore_state->signaler.second = next_seq;
1677 semaphore_state->signaled = true;
1678 } else {
1679 signal.payload = value;
1680 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001681 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001682 submission.signalSemaphores.emplace_back(std::move(signal));
1683 } else {
1684 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1685 retire_early = true;
1686 }
1687 }
1688 return retire_early;
1689}
1690
1691void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1692 VkFence fence, VkResult result) {
1693 if (result != VK_SUCCESS) return;
1694 auto queue_state = GetQueueState(queue);
1695
1696 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, submitCount);
locke-lunargd556cc32019-09-17 01:21:23 -06001697
1698 // Now process each individual submit
1699 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001700 CB_SUBMISSION submission;
locke-lunargd556cc32019-09-17 01:21:23 -06001701 const VkSubmitInfo *submit = &pSubmits[submit_idx];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001702 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001703 auto *timeline_semaphore_submit = LvlFindInChain<VkTimelineSemaphoreSubmitInfo>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001704 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
Jeremy Gebben4ae5cc72021-03-10 15:34:44 -07001705 uint64_t value = 0;
1706 if (timeline_semaphore_submit && timeline_semaphore_submit->pWaitSemaphoreValues != nullptr &&
1707 (i < timeline_semaphore_submit->waitSemaphoreValueCount)) {
1708 value = timeline_semaphore_submit->pWaitSemaphoreValues[i];
1709 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001710 RecordSubmitWaitSemaphore(submission, queue, submit->pWaitSemaphores[i], value, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001711 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001712
1713 bool retire_early = false;
locke-lunargd556cc32019-09-17 01:21:23 -06001714 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
Jeremy Gebben4ae5cc72021-03-10 15:34:44 -07001715 uint64_t value = 0;
1716 if (timeline_semaphore_submit && timeline_semaphore_submit->pSignalSemaphoreValues != nullptr &&
1717 (i < timeline_semaphore_submit->signalSemaphoreValueCount)) {
1718 value = timeline_semaphore_submit->pSignalSemaphoreValues[i];
1719 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001720 retire_early |= RecordSubmitSignalSemaphore(submission, queue, submit->pSignalSemaphores[i], value, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001721 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001722 if (retire_early) {
1723 early_retire_seq = std::max(early_retire_seq, next_seq);
1724 }
1725
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001726 const auto perf_submit = LvlFindInChain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001727 submission.perf_submit_pass = perf_submit ? perf_submit->counterPassIndex : 0;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001728
locke-lunargd556cc32019-09-17 01:21:23 -06001729 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001730 RecordSubmitCommandBuffer(submission, submit->pCommandBuffers[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06001731 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001732 submission.fence = submit_idx == (submitCount - 1) ? fence : VK_NULL_HANDLE;
1733 queue_state->submissions.emplace_back(std::move(submission));
1734 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001735
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001736 if (early_retire_seq) {
1737 RetireWorkOnQueue(queue_state, early_retire_seq);
1738 }
1739}
1740
1741void ValidationStateTracker::PostCallRecordQueueSubmit2KHR(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR *pSubmits,
1742 VkFence fence, VkResult result) {
1743 if (result != VK_SUCCESS) return;
1744 auto queue_state = GetQueueState(queue);
1745
1746 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, submitCount);
1747
1748 // Now process each individual submit
1749 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1750 CB_SUBMISSION submission;
1751 const VkSubmitInfo2KHR *submit = &pSubmits[submit_idx];
1752 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
1753 for (uint32_t i = 0; i < submit->waitSemaphoreInfoCount; ++i) {
1754 const auto &sem_info = submit->pWaitSemaphoreInfos[i];
1755 RecordSubmitWaitSemaphore(submission, queue, sem_info.semaphore, sem_info.value, next_seq);
1756 }
1757 bool retire_early = false;
1758 for (uint32_t i = 0; i < submit->signalSemaphoreInfoCount; ++i) {
1759 const auto &sem_info = submit->pSignalSemaphoreInfos[i];
1760 retire_early |= RecordSubmitSignalSemaphore(submission, queue, sem_info.semaphore, sem_info.value, next_seq);
1761 }
1762 if (retire_early) {
1763 early_retire_seq = std::max(early_retire_seq, next_seq);
1764 }
1765 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
1766 submission.perf_submit_pass = perf_submit ? perf_submit->counterPassIndex : 0;
1767
1768 for (uint32_t i = 0; i < submit->commandBufferInfoCount; i++) {
1769 RecordSubmitCommandBuffer(submission, submit->pCommandBufferInfos[i].commandBuffer);
1770 }
1771 submission.fence = submit_idx == (submitCount - 1) ? fence : VK_NULL_HANDLE;
1772 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06001773 }
1774
1775 if (early_retire_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001776 RetireWorkOnQueue(queue_state, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001777 }
1778}
1779
1780void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1781 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1782 VkResult result) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06001783 if (VK_SUCCESS != result) {
1784 return;
locke-lunargd556cc32019-09-17 01:21:23 -06001785 }
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06001786 const auto &memory_type = phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex];
1787 const auto &memory_heap = phys_dev_mem_props.memoryHeaps[memory_type.heapIndex];
1788 auto fake_address = fake_memory.Alloc(pAllocateInfo->allocationSize);
1789
1790 layer_data::optional<DedicatedBinding> dedicated_binding;
1791
1792 auto dedicated = LvlFindInChain<VkMemoryDedicatedAllocateInfo>(pAllocateInfo->pNext);
1793 if (dedicated) {
1794 if (dedicated->buffer) {
1795 const auto *buffer_state = GetBufferState(dedicated->buffer);
1796 assert(buffer_state);
1797 if (!buffer_state) {
1798 return;
1799 }
1800 dedicated_binding.emplace(dedicated->buffer, buffer_state->createInfo);
1801 } else if (dedicated->image) {
1802 const auto *image_state = GetImageState(dedicated->image);
1803 assert(image_state);
1804 if (!image_state) {
1805 return;
1806 }
1807 dedicated_binding.emplace(dedicated->image, image_state->createInfo);
1808 }
1809 }
1810 memObjMap[*pMemory] = std::make_shared<DEVICE_MEMORY_STATE>(*pMemory, pAllocateInfo, fake_address, memory_type, memory_heap,
1811 std::move(dedicated_binding));
locke-lunargd556cc32019-09-17 01:21:23 -06001812 return;
1813}
1814
1815void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1816 if (!mem) return;
1817 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06001818 if (!mem_info) return;
locke-lunargd556cc32019-09-17 01:21:23 -06001819 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001820 mem_info->Destroy();
John Zulauf79952712020-04-07 11:25:54 -06001821 fake_memory.Free(mem_info->fake_base_address);
locke-lunargd556cc32019-09-17 01:21:23 -06001822 memObjMap.erase(mem);
1823}
1824
1825void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
1826 VkFence fence, VkResult result) {
1827 if (result != VK_SUCCESS) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001828 auto queue_state = GetQueueState(queue);
locke-lunargd556cc32019-09-17 01:21:23 -06001829
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001830 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, bindInfoCount);
locke-lunargd556cc32019-09-17 01:21:23 -06001831
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001832 for (uint32_t bind_idx = 0; bind_idx < bindInfoCount; ++bind_idx) {
1833 const VkBindSparseInfo &bind_info = pBindInfo[bind_idx];
locke-lunargd556cc32019-09-17 01:21:23 -06001834 // Track objects tied to memory
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001835 for (uint32_t j = 0; j < bind_info.bufferBindCount; j++) {
1836 for (uint32_t k = 0; k < bind_info.pBufferBinds[j].bindCount; k++) {
1837 auto sparse_binding = bind_info.pBufferBinds[j].pBinds[k];
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001838 auto buffer_state = GetBufferState(bind_info.pBufferBinds[j].buffer);
1839 auto mem_state = GetDevMemShared(sparse_binding.memory);
1840 if (buffer_state && mem_state) {
1841 buffer_state->SetSparseMemBinding(mem_state, sparse_binding.memoryOffset, sparse_binding.size);
1842 }
locke-lunargd556cc32019-09-17 01:21:23 -06001843 }
1844 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001845 for (uint32_t j = 0; j < bind_info.imageOpaqueBindCount; j++) {
1846 for (uint32_t k = 0; k < bind_info.pImageOpaqueBinds[j].bindCount; k++) {
1847 auto sparse_binding = bind_info.pImageOpaqueBinds[j].pBinds[k];
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001848 auto image_state = GetImageState(bind_info.pImageOpaqueBinds[j].image);
1849 auto mem_state = GetDevMemShared(sparse_binding.memory);
1850 if (image_state && mem_state) {
1851 image_state->SetSparseMemBinding(mem_state, sparse_binding.memoryOffset, sparse_binding.size);
1852 }
locke-lunargd556cc32019-09-17 01:21:23 -06001853 }
1854 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001855 for (uint32_t j = 0; j < bind_info.imageBindCount; j++) {
1856 for (uint32_t k = 0; k < bind_info.pImageBinds[j].bindCount; k++) {
1857 auto sparse_binding = bind_info.pImageBinds[j].pBinds[k];
locke-lunargd556cc32019-09-17 01:21:23 -06001858 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
1859 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001860 auto image_state = GetImageState(bind_info.pImageBinds[j].image);
1861 auto mem_state = GetDevMemShared(sparse_binding.memory);
1862 if (image_state && mem_state) {
1863 image_state->SetSparseMemBinding(mem_state, sparse_binding.memoryOffset, size);
1864 }
locke-lunargd556cc32019-09-17 01:21:23 -06001865 }
1866 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001867 CB_SUBMISSION submission;
1868 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001869 for (uint32_t i = 0; i < bind_info.waitSemaphoreCount; ++i) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001870 RecordSubmitWaitSemaphore(submission, queue, bind_info.pWaitSemaphores[i], 0, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001871 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001872 bool retire_early = false;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001873 for (uint32_t i = 0; i < bind_info.signalSemaphoreCount; ++i) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001874 retire_early |= RecordSubmitSignalSemaphore(submission, queue, bind_info.pSignalSemaphores[i], 0, next_seq);
1875 }
1876 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1877 if (retire_early) {
1878 early_retire_seq = std::max(early_retire_seq, queue_state->seq + queue_state->submissions.size() + 1);
locke-lunargd556cc32019-09-17 01:21:23 -06001879 }
1880
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001881 submission.fence = bind_idx == (bindInfoCount - 1) ? fence : VK_NULL_HANDLE;
1882 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06001883 }
1884
1885 if (early_retire_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001886 RetireWorkOnQueue(queue_state, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001887 }
1888}
1889
1890void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
1891 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
1892 VkResult result) {
1893 if (VK_SUCCESS != result) return;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001894 semaphoreMap[*pSemaphore] = std::make_shared<SEMAPHORE_STATE>(*pSemaphore, LvlFindInChain<VkSemaphoreTypeCreateInfo>(pCreateInfo->pNext));
locke-lunargd556cc32019-09-17 01:21:23 -06001895}
1896
Mike Schuchardt2df08912020-12-15 16:28:09 -08001897void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBits handle_type,
1898 VkSemaphoreImportFlags flags) {
locke-lunargd556cc32019-09-17 01:21:23 -06001899 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
1900 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001901 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) &&
locke-lunargd556cc32019-09-17 01:21:23 -06001902 sema_node->scope == kSyncScopeInternal) {
1903 sema_node->scope = kSyncScopeExternalTemporary;
1904 } else {
1905 sema_node->scope = kSyncScopeExternalPermanent;
1906 }
1907 }
1908}
1909
Mike Schuchardt2df08912020-12-15 16:28:09 -08001910void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfo *pSignalInfo,
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001911 VkResult result) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001912 auto *semaphore_state = GetSemaphoreState(pSignalInfo->semaphore);
1913 semaphore_state->payload = pSignalInfo->value;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001914}
1915
locke-lunargd556cc32019-09-17 01:21:23 -06001916void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
1917 auto mem_info = GetDevMemState(mem);
1918 if (mem_info) {
1919 mem_info->mapped_range.offset = offset;
1920 mem_info->mapped_range.size = size;
1921 mem_info->p_driver_data = *ppData;
1922 }
1923}
1924
1925void ValidationStateTracker::RetireFence(VkFence fence) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001926 auto fence_state = GetFenceState(fence);
1927 if (fence_state && fence_state->scope == kSyncScopeInternal) {
1928 if (fence_state->signaler.first != VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -06001929 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001930 RetireWorkOnQueue(GetQueueState(fence_state->signaler.first), fence_state->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001931 } else {
1932 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
1933 // the fence as retired.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001934 fence_state->state = FENCE_RETIRED;
locke-lunargd556cc32019-09-17 01:21:23 -06001935 }
1936 }
1937}
1938
1939void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
1940 VkBool32 waitAll, uint64_t timeout, VkResult result) {
1941 if (VK_SUCCESS != result) return;
1942
1943 // When we know that all fences are complete we can clean/remove their CBs
1944 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
1945 for (uint32_t i = 0; i < fenceCount; i++) {
1946 RetireFence(pFences[i]);
1947 }
1948 }
1949 // NOTE : Alternate case not handled here is when some fences have completed. In
1950 // this case for app to guarantee which fences completed it will have to call
1951 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
1952}
1953
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001954void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001955 auto semaphore_state = GetSemaphoreState(semaphore);
1956 if (semaphore_state) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001957 for (auto &pair : queueMap) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001958 QUEUE_STATE &queue_state = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06001959 uint64_t max_seq = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001960 for (const auto &submission : queue_state.submissions) {
1961 for (const auto &signal_semaphore : submission.signalSemaphores) {
1962 if (signal_semaphore.semaphore == semaphore && signal_semaphore.payload <= until_payload) {
1963 if (signal_semaphore.seq > max_seq) {
1964 max_seq = signal_semaphore.seq;
Tony-LunarG47d5e272020-04-07 15:35:55 -06001965 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001966 }
1967 }
1968 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06001969 if (max_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001970 RetireWorkOnQueue(&queue_state, max_seq);
Tony-LunarG47d5e272020-04-07 15:35:55 -06001971 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001972 }
1973 }
1974}
1975
John Zulauff89de662020-04-13 18:57:34 -06001976void ValidationStateTracker::RecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
1977 VkResult result) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001978 if (VK_SUCCESS != result) return;
1979
1980 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
1981 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
1982 }
1983}
1984
John Zulauff89de662020-04-13 18:57:34 -06001985void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
1986 VkResult result) {
1987 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
1988}
1989
1990void ValidationStateTracker::PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo,
1991 uint64_t timeout, VkResult result) {
1992 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
1993}
1994
Adrian Coca Lorentec7d76102020-09-28 13:58:16 +02001995void ValidationStateTracker::RecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
1996 VkResult result) {
1997 if (VK_SUCCESS != result) return;
1998
1999 RetireTimelineSemaphore(semaphore, *pValue);
2000}
2001
2002void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2003 VkResult result) {
2004 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2005}
2006void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2007 VkResult result) {
2008 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2009}
2010
locke-lunargd556cc32019-09-17 01:21:23 -06002011void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
2012 if (VK_SUCCESS != result) return;
2013 RetireFence(fence);
2014}
2015
2016void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
Jeremy Gebben5573a8c2021-06-04 08:55:10 -06002017 queueMap.emplace(queue, QUEUE_STATE(queue, queue_family_index));
locke-lunargd556cc32019-09-17 01:21:23 -06002018}
2019
2020void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2021 VkQueue *pQueue) {
2022 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2023}
2024
2025void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2026 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2027}
2028
2029void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2030 if (VK_SUCCESS != result) return;
2031 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002032 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002033}
2034
2035void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2036 if (VK_SUCCESS != result) return;
2037 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002038 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002039 }
2040}
2041
2042void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2043 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002044 auto fence_state = GetFenceState(fence);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002045 fence_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002046 fenceMap.erase(fence);
2047}
2048
2049void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2050 const VkAllocationCallbacks *pAllocator) {
2051 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002052 auto semaphore_state = GetSemaphoreState(semaphore);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002053 semaphore_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002054 semaphoreMap.erase(semaphore);
2055}
2056
2057void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2058 if (!event) return;
John Zulauf48057322020-12-02 11:59:31 -07002059 EVENT_STATE *event_state = Get<EVENT_STATE>(event);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002060 event_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002061 eventMap.erase(event);
2062}
2063
2064void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2065 const VkAllocationCallbacks *pAllocator) {
2066 if (!queryPool) return;
2067 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002068 qp_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002069 queryPoolMap.erase(queryPool);
2070}
2071
locke-lunargd556cc32019-09-17 01:21:23 -06002072void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2073 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2074 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002075 // Track objects tied to memory
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002076 auto mem_state = GetDevMemShared(mem);
2077 if (mem_state) {
2078 buffer_state->SetMemBinding(mem_state, memoryOffset);
2079 }
locke-lunargd556cc32019-09-17 01:21:23 -06002080 }
2081}
2082
2083void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2084 VkDeviceSize memoryOffset, VkResult result) {
2085 if (VK_SUCCESS != result) return;
2086 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2087}
2088
2089void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002090 const VkBindBufferMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06002091 for (uint32_t i = 0; i < bindInfoCount; i++) {
2092 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2093 }
2094}
2095
2096void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002097 const VkBindBufferMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06002098 for (uint32_t i = 0; i < bindInfoCount; i++) {
2099 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2100 }
2101}
2102
Spencer Fricke6c127102020-04-16 06:25:20 -07002103void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer) {
locke-lunargd556cc32019-09-17 01:21:23 -06002104 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2105 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002106 buffer_state->memory_requirements_checked = true;
2107 }
2108}
2109
2110void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2111 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002112 RecordGetBufferMemoryRequirementsState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002113}
2114
2115void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002116 const VkBufferMemoryRequirementsInfo2 *pInfo,
2117 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002118 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002119}
2120
2121void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002122 const VkBufferMemoryRequirementsInfo2 *pInfo,
2123 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002124 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002125}
2126
Spencer Fricke6c127102020-04-16 06:25:20 -07002127void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002128 const VkImagePlaneMemoryRequirementsInfo *plane_info =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002129 (pInfo == nullptr) ? nullptr : LvlFindInChain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002130 IMAGE_STATE *image_state = GetImageState(image);
2131 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002132 if (plane_info != nullptr) {
2133 // Multi-plane image
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002134 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002135 image_state->memory_requirements_checked[0] = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002136 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002137 image_state->memory_requirements_checked[1] = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002138 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002139 image_state->memory_requirements_checked[2] = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002140 }
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002141 } else if (!image_state->disjoint) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002142 // Single Plane image
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002143 image_state->memory_requirements_checked[0] = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002144 }
locke-lunargd556cc32019-09-17 01:21:23 -06002145 }
2146}
2147
2148void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2149 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002150 RecordGetImageMemoryRequirementsState(image, nullptr);
locke-lunargd556cc32019-09-17 01:21:23 -06002151}
2152
2153void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2154 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002155 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002156}
2157
2158void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2159 const VkImageMemoryRequirementsInfo2 *pInfo,
2160 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002161 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002162}
2163
2164static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2165 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2166 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2167 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2168 image_state->sparse_metadata_required = true;
2169 }
2170}
2171
2172void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2173 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2174 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2175 auto image_state = GetImageState(image);
2176 image_state->get_sparse_reqs_called = true;
2177 if (!pSparseMemoryRequirements) return;
2178 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2179 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2180 }
2181}
2182
2183void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
Mike Schuchardt2df08912020-12-15 16:28:09 -08002184 VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount,
2185 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements) {
locke-lunargd556cc32019-09-17 01:21:23 -06002186 auto image_state = GetImageState(pInfo->image);
2187 image_state->get_sparse_reqs_called = true;
2188 if (!pSparseMemoryRequirements) return;
2189 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2190 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2191 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2192 }
2193}
2194
2195void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08002196 VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount,
2197 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements) {
locke-lunargd556cc32019-09-17 01:21:23 -06002198 auto image_state = GetImageState(pInfo->image);
2199 image_state->get_sparse_reqs_called = true;
2200 if (!pSparseMemoryRequirements) return;
2201 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2202 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2203 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2204 }
2205}
2206
2207void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2208 const VkAllocationCallbacks *pAllocator) {
2209 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002210 auto shader_module_state = GetShaderModuleState(shaderModule);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002211 shader_module_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002212 shaderModuleMap.erase(shaderModule);
2213}
2214
2215void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2216 const VkAllocationCallbacks *pAllocator) {
2217 if (!pipeline) return;
2218 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
locke-lunargd556cc32019-09-17 01:21:23 -06002219 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002220 pipeline_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002221 pipelineMap.erase(pipeline);
2222}
2223
2224void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2225 const VkAllocationCallbacks *pAllocator) {
2226 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002227 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002228 pipeline_layout_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002229 pipelineLayoutMap.erase(pipelineLayout);
2230}
2231
2232void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2233 const VkAllocationCallbacks *pAllocator) {
2234 if (!sampler) return;
2235 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
locke-lunargd556cc32019-09-17 01:21:23 -06002236 // Any bound cmd buffers are now invalid
2237 if (sampler_state) {
Yuly Novikov424cdd52020-05-26 16:45:12 -04002238 if (sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2239 sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
2240 custom_border_color_sampler_count--;
2241 }
2242
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002243 sampler_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002244 }
2245 samplerMap.erase(sampler);
2246}
2247
2248void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2249 const VkAllocationCallbacks *pAllocator) {
2250 if (!descriptorSetLayout) return;
2251 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2252 if (layout_it != descriptorSetLayoutMap.end()) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002253 layout_it->second.get()->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002254 descriptorSetLayoutMap.erase(layout_it);
2255 }
2256}
2257
2258void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2259 const VkAllocationCallbacks *pAllocator) {
2260 if (!descriptorPool) return;
2261 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002262 if (desc_pool_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002263 // Free sets that were in this pool
John Zulauf79f06582021-02-27 18:38:39 -07002264 for (auto *ds : desc_pool_state->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06002265 FreeDescriptorSet(ds);
2266 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002267 desc_pool_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002268 descriptorPoolMap.erase(descriptorPool);
2269 }
2270}
2271
2272// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2273void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2274 const VkCommandBuffer *command_buffers) {
2275 for (uint32_t i = 0; i < command_buffer_count; i++) {
John Zulaufd1f85d42020-04-15 12:23:15 -06002276 // Allow any derived class to clean up command buffer state
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002277 if (command_buffer_reset_callback) {
2278 (*command_buffer_reset_callback)(command_buffers[i]);
2279 }
John Zulaufd1f85d42020-04-15 12:23:15 -06002280 if (command_buffer_free_callback) {
2281 (*command_buffer_free_callback)(command_buffers[i]);
2282 }
2283
locke-lunargd556cc32019-09-17 01:21:23 -06002284 auto cb_state = GetCBState(command_buffers[i]);
2285 // Remove references to command buffer's state and delete
2286 if (cb_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002287 // Remove the cb_state's references from COMMAND_POOL_STATEs
2288 pool_state->commandBuffers.erase(command_buffers[i]);
2289 // Remove the cb debug labels
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06002290 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer());
locke-lunargd556cc32019-09-17 01:21:23 -06002291 // Remove CBState from CB map
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002292 cb_state->Destroy();
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06002293 commandBufferMap.erase(cb_state->commandBuffer());
locke-lunargd556cc32019-09-17 01:21:23 -06002294 }
2295 }
2296}
2297
2298void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2299 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002300 auto pool = GetCommandPoolState(commandPool);
2301 FreeCommandBufferStates(pool, commandBufferCount, pCommandBuffers);
locke-lunargd556cc32019-09-17 01:21:23 -06002302}
2303
2304void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2305 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2306 VkResult result) {
2307 if (VK_SUCCESS != result) return;
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06002308 auto queue_flags = GetPhysicalDeviceState()->queue_family_properties[pCreateInfo->queueFamilyIndex].queueFlags;
2309 commandPoolMap[*pCommandPool] = std::make_shared<COMMAND_POOL_STATE>(*pCommandPool, pCreateInfo, queue_flags);
locke-lunargd556cc32019-09-17 01:21:23 -06002310}
2311
2312void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2313 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2314 VkResult result) {
2315 if (VK_SUCCESS != result) return;
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002316
2317 uint32_t index_count = 0, n_perf_pass = 0;
2318 bool has_cb = false, has_rb = false;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002319 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002320 const auto *perf = LvlFindInChain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002321 index_count = perf->counterIndexCount;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002322
Mark Lobodzinski7e948e42020-09-09 10:23:36 -06002323 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002324 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2325 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2326 switch (counter.scope) {
2327 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002328 has_cb = true;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002329 break;
2330 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002331 has_rb = true;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002332 break;
2333 default:
2334 break;
2335 }
2336 }
2337
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002338 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf, &n_perf_pass);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002339 }
2340
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002341 queryPoolMap[*pQueryPool] =
2342 std::make_shared<QUERY_POOL_STATE>(*pQueryPool, pCreateInfo, index_count, n_perf_pass, has_cb, has_rb);
locke-lunargd556cc32019-09-17 01:21:23 -06002343
2344 QueryObject query_obj{*pQueryPool, 0u};
2345 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2346 query_obj.query = i;
2347 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2348 }
2349}
2350
2351void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2352 const VkAllocationCallbacks *pAllocator) {
2353 if (!commandPool) return;
2354 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2355 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2356 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2357 if (cp_state) {
2358 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2359 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2360 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002361 cp_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002362 commandPoolMap.erase(commandPool);
2363 }
2364}
2365
2366void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2367 VkCommandPoolResetFlags flags, VkResult result) {
2368 if (VK_SUCCESS != result) return;
2369 // Reset all of the CBs allocated from this pool
2370 auto command_pool_state = GetCommandPoolState(commandPool);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002371 for (auto cmd_buffer : command_pool_state->commandBuffers) {
2372 ResetCommandBufferState(cmd_buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002373 }
2374}
2375
2376void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2377 VkResult result) {
2378 for (uint32_t i = 0; i < fenceCount; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002379 auto fence_state = GetFenceState(pFences[i]);
2380 if (fence_state) {
2381 if (fence_state->scope == kSyncScopeInternal) {
2382 fence_state->state = FENCE_UNSIGNALED;
2383 } else if (fence_state->scope == kSyncScopeExternalTemporary) {
2384 fence_state->scope = kSyncScopeInternal;
locke-lunargd556cc32019-09-17 01:21:23 -06002385 }
2386 }
2387 }
2388}
2389
locke-lunargd556cc32019-09-17 01:21:23 -06002390void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2391 const VkAllocationCallbacks *pAllocator) {
2392 if (!framebuffer) return;
2393 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002394 framebuffer_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002395 frameBufferMap.erase(framebuffer);
2396}
2397
2398void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2399 const VkAllocationCallbacks *pAllocator) {
2400 if (!renderPass) return;
2401 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002402 rp_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002403 renderPassMap.erase(renderPass);
2404}
2405
2406void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2407 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2408 if (VK_SUCCESS != result) return;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002409 fenceMap[*pFence] = std::make_shared<FENCE_STATE>(*pFence, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002410}
2411
2412bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2413 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2414 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002415 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002416 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2417 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2418 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2419 cgpl_state->pipe_state.reserve(count);
2420 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002421 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002422 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002423 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002424 }
2425 return false;
2426}
2427
2428void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2429 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2430 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2431 VkResult result, void *cgpl_state_data) {
2432 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2433 // This API may create pipelines regardless of the return value
2434 for (uint32_t i = 0; i < count; i++) {
2435 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002436 (cgpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002437 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2438 }
2439 }
2440 cgpl_state->pipe_state.clear();
2441}
2442
2443bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2444 const VkComputePipelineCreateInfo *pCreateInfos,
2445 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002446 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002447 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2448 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2449 ccpl_state->pipe_state.reserve(count);
2450 for (uint32_t i = 0; i < count; i++) {
2451 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002452 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002453 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002454 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002455 }
2456 return false;
2457}
2458
2459void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2460 const VkComputePipelineCreateInfo *pCreateInfos,
2461 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2462 VkResult result, void *ccpl_state_data) {
2463 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2464
2465 // This API may create pipelines regardless of the return value
2466 for (uint32_t i = 0; i < count; i++) {
2467 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002468 (ccpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002469 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2470 }
2471 }
2472 ccpl_state->pipe_state.clear();
2473}
2474
2475bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2476 uint32_t count,
2477 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2478 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002479 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002480 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2481 crtpl_state->pipe_state.reserve(count);
2482 for (uint32_t i = 0; i < count; i++) {
2483 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002484 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002485 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002486 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002487 }
2488 return false;
2489}
2490
2491void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2492 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2493 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2494 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2495 // This API may create pipelines regardless of the return value
2496 for (uint32_t i = 0; i < count; i++) {
2497 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002498 (crtpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002499 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2500 }
2501 }
2502 crtpl_state->pipe_state.clear();
2503}
2504
sourav parmarcd5fb182020-07-17 12:58:44 -07002505bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
2506 VkPipelineCache pipelineCache, uint32_t count,
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002507 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2508 const VkAllocationCallbacks *pAllocator,
2509 VkPipeline *pPipelines, void *crtpl_state_data) const {
2510 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2511 crtpl_state->pipe_state.reserve(count);
2512 for (uint32_t i = 0; i < count; i++) {
2513 // Create and initialize internal tracking data structure
2514 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
2515 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
2516 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
2517 }
2518 return false;
2519}
2520
sourav parmarcd5fb182020-07-17 12:58:44 -07002521void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
2522 VkPipelineCache pipelineCache, uint32_t count,
2523 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2524 const VkAllocationCallbacks *pAllocator,
2525 VkPipeline *pPipelines, VkResult result,
2526 void *crtpl_state_data) {
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002527 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2528 // This API may create pipelines regardless of the return value
2529 for (uint32_t i = 0; i < count; i++) {
2530 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002531 (crtpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002532 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2533 }
2534 }
2535 crtpl_state->pipe_state.clear();
2536}
2537
locke-lunargd556cc32019-09-17 01:21:23 -06002538void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2539 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2540 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002541 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002542 if (pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2543 pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
Tony-LunarG7337b312020-04-15 16:40:25 -06002544 custom_border_color_sampler_count++;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002545 }
locke-lunargd556cc32019-09-17 01:21:23 -06002546}
2547
2548void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2549 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2550 const VkAllocationCallbacks *pAllocator,
2551 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2552 if (VK_SUCCESS != result) return;
2553 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2554}
2555
2556// For repeatable sorting, not very useful for "memory in range" search
2557struct PushConstantRangeCompare {
2558 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2559 if (lhs->offset == rhs->offset) {
2560 if (lhs->size == rhs->size) {
2561 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2562 return lhs->stageFlags < rhs->stageFlags;
2563 }
2564 // If the offsets are the same then sorting by the end of range is useful for validation
2565 return lhs->size < rhs->size;
2566 }
2567 return lhs->offset < rhs->offset;
2568 }
2569};
2570
2571static PushConstantRangesDict push_constant_ranges_dict;
2572
2573PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2574 if (!info->pPushConstantRanges) {
2575 // Hand back the empty entry (creating as needed)...
2576 return push_constant_ranges_dict.look_up(PushConstantRanges());
2577 }
2578
2579 // Sort the input ranges to ensure equivalent ranges map to the same id
2580 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2581 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2582 sorted.insert(info->pPushConstantRanges + i);
2583 }
2584
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07002585 PushConstantRanges ranges;
2586 ranges.reserve(sorted.size());
John Zulauf79f06582021-02-27 18:38:39 -07002587 for (const auto *range : sorted) {
locke-lunargd556cc32019-09-17 01:21:23 -06002588 ranges.emplace_back(*range);
2589 }
2590 return push_constant_ranges_dict.look_up(std::move(ranges));
2591}
2592
2593// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2594static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2595
2596// Dictionary of canonical form of the "compatible for set" records
2597static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2598
2599static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2600 const PipelineLayoutSetLayoutsId set_layouts_id) {
2601 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2602}
2603
2604void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2605 const VkAllocationCallbacks *pAllocator,
2606 VkPipelineLayout *pPipelineLayout, VkResult result) {
2607 if (VK_SUCCESS != result) return;
2608
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002609 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>(*pPipelineLayout);
locke-lunargd556cc32019-09-17 01:21:23 -06002610 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2611 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2612 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002613 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002614 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2615 }
2616
2617 // Get canonical form IDs for the "compatible for set" contents
2618 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
2619 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
2620 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
2621
2622 // Create table of "compatible for set N" cannonical forms for trivial accept validation
2623 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2624 pipeline_layout_state->compat_for_set.emplace_back(
2625 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
2626 }
2627 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
2628}
2629
2630void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
2631 const VkAllocationCallbacks *pAllocator,
2632 VkDescriptorPool *pDescriptorPool, VkResult result) {
2633 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002634 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002635}
2636
2637void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2638 VkDescriptorPoolResetFlags flags, VkResult result) {
2639 if (VK_SUCCESS != result) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002640 DESCRIPTOR_POOL_STATE *pool = GetDescriptorPoolState(descriptorPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002641 // TODO: validate flags
2642 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
John Zulauf79f06582021-02-27 18:38:39 -07002643 for (auto *ds : pool->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06002644 FreeDescriptorSet(ds);
2645 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002646 pool->sets.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06002647 // Reset available count for each type and available sets for this pool
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002648 for (auto it = pool->availableDescriptorTypeCount.begin(); it != pool->availableDescriptorTypeCount.end(); ++it) {
2649 pool->availableDescriptorTypeCount[it->first] = pool->maxDescriptorTypeCount[it->first];
locke-lunargd556cc32019-09-17 01:21:23 -06002650 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002651 pool->availableSets = pool->maxSets;
locke-lunargd556cc32019-09-17 01:21:23 -06002652}
2653
2654bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
2655 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002656 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002657 // Always update common data
2658 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2659 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2660 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
2661
2662 return false;
2663}
2664
2665// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
2666void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
2667 VkDescriptorSet *pDescriptorSets, VkResult result,
2668 void *ads_state_data) {
2669 if (VK_SUCCESS != result) return;
2670 // All the updates are contained in a single cvdescriptorset function
2671 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2672 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2673 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
2674}
2675
2676void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
2677 const VkDescriptorSet *pDescriptorSets) {
2678 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
2679 // Update available descriptor sets in pool
2680 pool_state->availableSets += count;
2681
2682 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
2683 for (uint32_t i = 0; i < count; ++i) {
2684 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
2685 auto descriptor_set = setMap[pDescriptorSets[i]].get();
2686 uint32_t type_index = 0, descriptor_count = 0;
2687 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
2688 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
2689 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
2690 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
2691 }
2692 FreeDescriptorSet(descriptor_set);
2693 pool_state->sets.erase(descriptor_set);
2694 }
2695 }
2696}
2697
2698void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
2699 const VkWriteDescriptorSet *pDescriptorWrites,
2700 uint32_t descriptorCopyCount,
2701 const VkCopyDescriptorSet *pDescriptorCopies) {
2702 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
2703 pDescriptorCopies);
2704}
2705
2706void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
2707 VkCommandBuffer *pCommandBuffer, VkResult result) {
2708 if (VK_SUCCESS != result) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002709 auto pool = GetCommandPoolShared(pCreateInfo->commandPool);
2710 if (pool) {
locke-lunargd556cc32019-09-17 01:21:23 -06002711 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
2712 // Add command buffer to its commandPool map
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002713 pool->commandBuffers.insert(pCommandBuffer[i]);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002714 auto cb_state = std::make_shared<CMD_BUFFER_STATE>(pCommandBuffer[i], pCreateInfo);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002715 cb_state->command_pool = pool;
2716 cb_state->unprotected = pool->unprotected;
locke-lunargd556cc32019-09-17 01:21:23 -06002717 // Add command buffer to map
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002718 commandBufferMap[pCommandBuffer[i]] = std::move(cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002719 ResetCommandBufferState(pCommandBuffer[i]);
2720 }
2721 }
2722}
2723
locke-lunargfc78e932020-11-19 17:06:24 -07002724void UpdateSubpassAttachments(const safe_VkSubpassDescription2 &subpass, std::vector<SUBPASS_INFO> &subpasses) {
2725 for (uint32_t index = 0; index < subpass.inputAttachmentCount; ++index) {
2726 const uint32_t attachment_index = subpass.pInputAttachments[index].attachment;
2727 if (attachment_index != VK_ATTACHMENT_UNUSED) {
2728 subpasses[attachment_index].used = true;
2729 subpasses[attachment_index].usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
2730 subpasses[attachment_index].layout = subpass.pInputAttachments[index].layout;
2731 }
2732 }
2733
2734 for (uint32_t index = 0; index < subpass.colorAttachmentCount; ++index) {
2735 const uint32_t attachment_index = subpass.pColorAttachments[index].attachment;
2736 if (attachment_index != VK_ATTACHMENT_UNUSED) {
2737 subpasses[attachment_index].used = true;
2738 subpasses[attachment_index].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2739 subpasses[attachment_index].layout = subpass.pColorAttachments[index].layout;
2740 }
2741 if (subpass.pResolveAttachments) {
2742 const uint32_t attachment_index2 = subpass.pResolveAttachments[index].attachment;
2743 if (attachment_index2 != VK_ATTACHMENT_UNUSED) {
2744 subpasses[attachment_index2].used = true;
2745 subpasses[attachment_index2].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2746 subpasses[attachment_index2].layout = subpass.pResolveAttachments[index].layout;
2747 }
2748 }
2749 }
2750
2751 if (subpass.pDepthStencilAttachment) {
2752 const uint32_t attachment_index = subpass.pDepthStencilAttachment->attachment;
2753 if (attachment_index != VK_ATTACHMENT_UNUSED) {
2754 subpasses[attachment_index].used = true;
2755 subpasses[attachment_index].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
2756 subpasses[attachment_index].layout = subpass.pDepthStencilAttachment->layout;
2757 }
2758 }
2759}
2760
2761void UpdateAttachmentsView(ValidationStateTracker &tracker, CMD_BUFFER_STATE &cb_state, const FRAMEBUFFER_STATE &framebuffer,
2762 const VkRenderPassBeginInfo *pRenderPassBegin) {
2763 auto &attachments = *(cb_state.active_attachments.get());
2764 const bool imageless = (framebuffer.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) ? true : false;
2765 const VkRenderPassAttachmentBeginInfo *attachment_info_struct = nullptr;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002766 if (pRenderPassBegin) attachment_info_struct = LvlFindInChain<VkRenderPassAttachmentBeginInfo>(pRenderPassBegin->pNext);
locke-lunargfc78e932020-11-19 17:06:24 -07002767
2768 for (uint32_t i = 0; i < attachments.size(); ++i) {
2769 if (imageless) {
2770 if (attachment_info_struct && i < attachment_info_struct->attachmentCount) {
2771 auto res = cb_state.attachments_view_states.insert(
2772 tracker.GetShared<IMAGE_VIEW_STATE>(attachment_info_struct->pAttachments[i]));
2773 attachments[i] = res.first->get();
2774 }
2775 } else {
2776 auto res = cb_state.attachments_view_states.insert(framebuffer.attachments_view_state[i]);
2777 attachments[i] = res.first->get();
2778 }
2779 }
2780}
2781
locke-lunargd556cc32019-09-17 01:21:23 -06002782void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
2783 const VkCommandBufferBeginInfo *pBeginInfo) {
2784 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2785 if (!cb_state) return;
locke-lunargfc78e932020-11-19 17:06:24 -07002786
locke-lunargd556cc32019-09-17 01:21:23 -06002787 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
2788 ResetCommandBufferState(commandBuffer);
2789 }
2790 // Set updated state here in case implicit reset occurs above
2791 cb_state->state = CB_RECORDING;
2792 cb_state->beginInfo = *pBeginInfo;
Tony-LunarG3c287f62020-12-17 12:39:49 -07002793 if (cb_state->beginInfo.pInheritanceInfo && (cb_state->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY)) {
locke-lunargd556cc32019-09-17 01:21:23 -06002794 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
2795 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
2796 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
2797 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
2798 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
locke-lunargaecf2152020-05-12 17:15:41 -06002799 cb_state->activeRenderPass = GetShared<RENDER_PASS_STATE>(cb_state->beginInfo.pInheritanceInfo->renderPass);
locke-lunargd556cc32019-09-17 01:21:23 -06002800 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
locke-lunargfc78e932020-11-19 17:06:24 -07002801
locke-lunargaecf2152020-05-12 17:15:41 -06002802 if (cb_state->beginInfo.pInheritanceInfo->framebuffer) {
2803 cb_state->activeFramebuffer = GetShared<FRAMEBUFFER_STATE>(cb_state->beginInfo.pInheritanceInfo->framebuffer);
locke-lunargfc78e932020-11-19 17:06:24 -07002804 cb_state->active_subpasses = nullptr;
2805 cb_state->active_attachments = nullptr;
2806
2807 if (cb_state->activeFramebuffer) {
2808 cb_state->framebuffers.insert(cb_state->activeFramebuffer);
2809
2810 // Set cb_state->active_subpasses
2811 cb_state->active_subpasses =
2812 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
2813 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
2814 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
2815
2816 // Set cb_state->active_attachments & cb_state->attachments_view_states
2817 cb_state->active_attachments =
2818 std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
2819 UpdateAttachmentsView(*this, *cb_state, *cb_state->activeFramebuffer, nullptr);
2820
2821 // Connect this framebuffer and its children to this cmdBuffer
Jeremy Gebben5570abe2021-05-16 18:35:13 -06002822 if (!disabled[command_buffer_state]) {
2823 cb_state->AddChild(cb_state->activeFramebuffer.get());
2824 }
locke-lunargfc78e932020-11-19 17:06:24 -07002825 }
locke-lunargaecf2152020-05-12 17:15:41 -06002826 }
David Zhao Akeley44139b12021-04-26 16:16:13 -07002827
2828 // Check for VkCommandBufferInheritanceViewportScissorInfoNV (VK_NV_inherited_viewport_scissor)
2829 auto p_inherited_viewport_scissor_info =
2830 LvlFindInChain<VkCommandBufferInheritanceViewportScissorInfoNV>(cb_state->beginInfo.pInheritanceInfo->pNext);
2831 if (p_inherited_viewport_scissor_info != nullptr && p_inherited_viewport_scissor_info->viewportScissor2D) {
2832 auto pViewportDepths = p_inherited_viewport_scissor_info->pViewportDepths;
2833 cb_state->inheritedViewportDepths.assign(
2834 pViewportDepths, pViewportDepths + p_inherited_viewport_scissor_info->viewportDepthCount);
2835 }
locke-lunargd556cc32019-09-17 01:21:23 -06002836 }
2837 }
2838
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002839 auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002840 if (chained_device_group_struct) {
2841 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
2842 } else {
2843 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
2844 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002845
2846 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06002847}
2848
2849void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
2850 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2851 if (!cb_state) return;
2852 // Cached validation is specific to a specific recording of a specific command buffer.
John Zulauf79f06582021-02-27 18:38:39 -07002853 for (auto *descriptor_set : cb_state->validated_descriptor_sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06002854 descriptor_set->ClearCachedValidation(cb_state);
2855 }
2856 cb_state->validated_descriptor_sets.clear();
2857 if (VK_SUCCESS == result) {
2858 cb_state->state = CB_RECORDED;
2859 }
2860}
2861
2862void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
2863 VkResult result) {
2864 if (VK_SUCCESS == result) {
2865 ResetCommandBufferState(commandBuffer);
2866 }
2867}
2868
2869CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
2870 // initially assume everything is static state
2871 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
2872
2873 if (ds) {
2874 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
locke-lunarg4189aa22020-10-21 00:23:48 -06002875 flags &= ~ConvertToCBStatusFlagBits(ds->pDynamicStates[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002876 }
2877 }
locke-lunargd556cc32019-09-17 01:21:23 -06002878 return flags;
2879}
2880
2881// Validation cache:
2882// CV is the bottommost implementor of this extension. Don't pass calls down.
2883// utility function to set collective state for pipeline
2884void SetPipelineState(PIPELINE_STATE *pPipe) {
2885 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
2886 if (pPipe->graphicsPipelineCI.pColorBlendState) {
2887 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
2888 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
2889 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2890 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2891 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2892 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2893 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2894 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2895 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2896 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
2897 pPipe->blendConstantsEnabled = true;
2898 }
2899 }
2900 }
2901 }
sfricke-samsung8f658d42020-05-03 20:12:24 -07002902 // Check if sample location is enabled
2903 if (pPipe->graphicsPipelineCI.pMultisampleState) {
2904 const VkPipelineSampleLocationsStateCreateInfoEXT *sample_location_state =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002905 LvlFindInChain<VkPipelineSampleLocationsStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pMultisampleState->pNext);
sfricke-samsung8f658d42020-05-03 20:12:24 -07002906 if (sample_location_state != nullptr) {
2907 pPipe->sample_location_enabled = sample_location_state->sampleLocationsEnable;
2908 }
2909 }
locke-lunargd556cc32019-09-17 01:21:23 -06002910}
2911
2912void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
2913 VkPipeline pipeline) {
2914 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2915 assert(cb_state);
2916
2917 auto pipe_state = GetPipelineState(pipeline);
2918 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002919 bool rasterization_enabled = VK_FALSE == pipe_state->graphicsPipelineCI.ptr()->pRasterizationState->rasterizerDiscardEnable;
David Zhao Akeley44139b12021-04-26 16:16:13 -07002920 const auto* viewport_state = pipe_state->graphicsPipelineCI.ptr()->pViewportState;
2921 const auto* dynamic_state = pipe_state->graphicsPipelineCI.ptr()->pDynamicState;
locke-lunargd556cc32019-09-17 01:21:23 -06002922 cb_state->status &= ~cb_state->static_status;
David Zhao Akeley44139b12021-04-26 16:16:13 -07002923 cb_state->static_status = MakeStaticStateMask(dynamic_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002924 cb_state->status |= cb_state->static_status;
locke-lunarg4189aa22020-10-21 00:23:48 -06002925 cb_state->dynamic_status = CBSTATUS_ALL_STATE_SET & (~cb_state->static_status);
David Zhao Akeley44139b12021-04-26 16:16:13 -07002926
2927 // Used to calculate CMD_BUFFER_STATE::usedViewportScissorCount upon draw command with this graphics pipeline.
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002928 // If rasterization disabled (no viewport/scissors used), or the actual number of viewports/scissors is dynamic (unknown at
2929 // this time), then these are set to 0 to disable this checking.
David Zhao Akeley44139b12021-04-26 16:16:13 -07002930 auto has_dynamic_viewport_count = cb_state->dynamic_status & CBSTATUS_VIEWPORT_WITH_COUNT_SET;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002931 auto has_dynamic_scissor_count = cb_state->dynamic_status & CBSTATUS_SCISSOR_WITH_COUNT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07002932 cb_state->pipelineStaticViewportCount =
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002933 has_dynamic_viewport_count || !rasterization_enabled ? 0 : viewport_state->viewportCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07002934 cb_state->pipelineStaticScissorCount =
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002935 has_dynamic_scissor_count || !rasterization_enabled ? 0 : viewport_state->scissorCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07002936
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002937 // Trash dynamic viewport/scissor state if pipeline defines static state and enabled rasterization.
David Zhao Akeley44139b12021-04-26 16:16:13 -07002938 // akeley98 NOTE: There's a bit of an ambiguity in the spec, whether binding such a pipeline overwrites
2939 // the entire viewport (scissor) array, or only the subsection defined by the viewport (scissor) count.
2940 // I am taking the latter interpretation based on the implementation details of NVIDIA's Vulkan driver.
David Zhao Akeley44139b12021-04-26 16:16:13 -07002941 if (!has_dynamic_viewport_count) {
2942 cb_state->trashedViewportCount = true;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002943 if (rasterization_enabled && (cb_state->static_status & CBSTATUS_VIEWPORT_SET)) {
David Zhao Akeley44139b12021-04-26 16:16:13 -07002944 cb_state->trashedViewportMask |= (uint32_t(1) << viewport_state->viewportCount) - 1u;
2945 // should become = ~uint32_t(0) if the other interpretation is correct.
2946 }
2947 }
2948 if (!has_dynamic_scissor_count) {
2949 cb_state->trashedScissorCount = true;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002950 if (rasterization_enabled && (cb_state->static_status & CBSTATUS_SCISSOR_SET)) {
David Zhao Akeley44139b12021-04-26 16:16:13 -07002951 cb_state->trashedScissorMask |= (uint32_t(1) << viewport_state->scissorCount) - 1u;
2952 // should become = ~uint32_t(0) if the other interpretation is correct.
2953 }
2954 }
locke-lunargd556cc32019-09-17 01:21:23 -06002955 }
locke-lunargb8d7a7a2020-10-25 16:01:52 -06002956 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
2957 cb_state->lastBound[lv_bind_point].pipeline_state = pipe_state;
locke-lunargd556cc32019-09-17 01:21:23 -06002958 SetPipelineState(pipe_state);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002959 if (!disabled[command_buffer_state]) {
2960 cb_state->AddChild(pipe_state);
2961 }
locke-lunargb8be8222020-10-20 00:34:37 -06002962 for (auto &slot : pipe_state->active_slots) {
2963 for (auto &req : slot.second) {
2964 for (auto &sampler : req.second.samplers_used_by_image) {
2965 for (auto &des : sampler) {
2966 des.second = nullptr;
2967 }
2968 }
2969 }
2970 }
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -06002971 cb_state->lastBound[lv_bind_point].UpdateSamplerDescriptorsUsedByImage();
locke-lunargd556cc32019-09-17 01:21:23 -06002972}
2973
2974void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2975 uint32_t viewportCount, const VkViewport *pViewports) {
2976 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07002977 uint32_t bits = ((1u << viewportCount) - 1u) << firstViewport;
2978 cb_state->viewportMask |= bits;
2979 cb_state->trashedViewportMask &= ~bits;
locke-lunargd556cc32019-09-17 01:21:23 -06002980 cb_state->status |= CBSTATUS_VIEWPORT_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06002981 cb_state->static_status &= ~CBSTATUS_VIEWPORT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07002982
2983 cb_state->dynamicViewports.resize(std::max(size_t(firstViewport + viewportCount), cb_state->dynamicViewports.size()));
2984 for (size_t i = 0; i < viewportCount; ++i) {
2985 cb_state->dynamicViewports[firstViewport + i] = pViewports[i];
2986 }
locke-lunargd556cc32019-09-17 01:21:23 -06002987}
2988
2989void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
2990 uint32_t exclusiveScissorCount,
2991 const VkRect2D *pExclusiveScissors) {
2992 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2993 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
2994 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
2995 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06002996 cb_state->static_status &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06002997}
2998
2999void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3000 VkImageLayout imageLayout) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003001 if (disabled[command_buffer_state]) return;
3002
locke-lunargd556cc32019-09-17 01:21:23 -06003003 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3004
3005 if (imageView != VK_NULL_HANDLE) {
3006 auto view_state = GetImageViewState(imageView);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003007 cb_state->AddChild(view_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003008 }
3009}
3010
3011void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3012 uint32_t viewportCount,
3013 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3014 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3015 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3016 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3017 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003018 cb_state->static_status &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003019}
3020
3021void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3022 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3023 const VkAllocationCallbacks *pAllocator,
3024 VkAccelerationStructureNV *pAccelerationStructure,
3025 VkResult result) {
3026 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003027 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003028
3029 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003030 auto as_memory_requirements_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003031 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06003032 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure();
locke-lunargd556cc32019-09-17 01:21:23 -06003033 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3034
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003035 auto scratch_memory_req_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003036 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06003037 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure();
locke-lunargd556cc32019-09-17 01:21:23 -06003038 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3039 &as_state->build_scratch_memory_requirements);
3040
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003041 auto update_memory_req_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003042 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06003043 update_memory_req_info.accelerationStructure = as_state->acceleration_structure();
locke-lunargd556cc32019-09-17 01:21:23 -06003044 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3045 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003046 as_state->allocator = pAllocator;
locke-lunargd556cc32019-09-17 01:21:23 -06003047 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3048}
3049
Jeff Bolz95176d02020-04-01 00:36:16 -05003050void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3051 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3052 const VkAllocationCallbacks *pAllocator,
3053 VkAccelerationStructureKHR *pAccelerationStructure,
3054 VkResult result) {
3055 if (VK_SUCCESS != result) return;
sourav parmarcd5fb182020-07-17 12:58:44 -07003056 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE_KHR>(*pAccelerationStructure, pCreateInfo);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003057 as_state->allocator = pAllocator;
sourav parmarcd5fb182020-07-17 12:58:44 -07003058 accelerationStructureMap_khr[*pAccelerationStructure] = std::move(as_state);
Jeff Bolz95176d02020-04-01 00:36:16 -05003059}
3060
sourav parmarcd5fb182020-07-17 12:58:44 -07003061void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructuresKHR(
3062 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
3063 const VkAccelerationStructureBuildRangeInfoKHR *const *ppBuildRangeInfos) {
3064 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3065 if (cb_state == nullptr) {
3066 return;
3067 }
3068 for (uint32_t i = 0; i < infoCount; ++i) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003069 auto *dst_as_state = GetAccelerationStructureStateKHR(pInfos[i].dstAccelerationStructure);
sourav parmarcd5fb182020-07-17 12:58:44 -07003070 if (dst_as_state != nullptr) {
3071 dst_as_state->built = true;
3072 dst_as_state->build_info_khr.initialize(&pInfos[i]);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003073 if (!disabled[command_buffer_state]) {
3074 cb_state->AddChild(dst_as_state);
3075 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003076 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003077 if (!disabled[command_buffer_state]) {
3078 auto *src_as_state = GetAccelerationStructureStateKHR(pInfos[i].srcAccelerationStructure);
3079 if (src_as_state != nullptr) {
3080 cb_state->AddChild(src_as_state);
3081 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003082 }
3083 }
3084 cb_state->hasBuildAccelerationStructureCmd = true;
3085}
3086
3087void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructuresIndirectKHR(
3088 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
3089 const VkDeviceAddress *pIndirectDeviceAddresses, const uint32_t *pIndirectStrides,
3090 const uint32_t *const *ppMaxPrimitiveCounts) {
3091 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3092 if (cb_state == nullptr) {
3093 return;
3094 }
3095 for (uint32_t i = 0; i < infoCount; ++i) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003096 auto *dst_as_state = GetAccelerationStructureStateKHR(pInfos[i].dstAccelerationStructure);
sourav parmarcd5fb182020-07-17 12:58:44 -07003097 if (dst_as_state != nullptr) {
3098 dst_as_state->built = true;
3099 dst_as_state->build_info_khr.initialize(&pInfos[i]);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003100 if (!disabled[command_buffer_state]) {
3101 cb_state->AddChild(dst_as_state);
3102 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003103 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003104 if (!disabled[command_buffer_state]) {
3105 auto *src_as_state = GetAccelerationStructureStateKHR(pInfos[i].srcAccelerationStructure);
3106 if (src_as_state != nullptr) {
3107 cb_state->AddChild(src_as_state);
3108 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003109 }
3110 }
3111 cb_state->hasBuildAccelerationStructureCmd = true;
3112}
locke-lunargd556cc32019-09-17 01:21:23 -06003113void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
Mike Schuchardt2df08912020-12-15 16:28:09 -08003114 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2 *pMemoryRequirements) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003115 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureStateNV(pInfo->accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003116 if (as_state != nullptr) {
3117 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
3118 as_state->memory_requirements = *pMemoryRequirements;
3119 as_state->memory_requirements_checked = true;
3120 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
3121 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
3122 as_state->build_scratch_memory_requirements_checked = true;
3123 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
3124 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
3125 as_state->update_scratch_memory_requirements_checked = true;
3126 }
3127 }
3128}
3129
sourav parmarcd5fb182020-07-17 12:58:44 -07003130void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
3131 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06003132 if (VK_SUCCESS != result) return;
3133 for (uint32_t i = 0; i < bindInfoCount; i++) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003134 const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06003135
sourav parmarcd5fb182020-07-17 12:58:44 -07003136 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureStateNV(info.accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003137 if (as_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06003138 // Track objects tied to memory
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003139 auto mem_state = GetDevMemShared(info.memory);
3140 if (mem_state) {
3141 as_state->SetMemBinding(mem_state, info.memoryOffset);
3142 }
locke-lunargd556cc32019-09-17 01:21:23 -06003143
3144 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05003145 // XXX TODO: Query device address for KHR extension
sourav parmarcd5fb182020-07-17 12:58:44 -07003146 if (enabled[gpu_validation]) {
locke-lunargd556cc32019-09-17 01:21:23 -06003147 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
3148 }
3149 }
3150 }
3151}
3152
3153void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
3154 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
3155 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
3156 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3157 if (cb_state == nullptr) {
3158 return;
3159 }
3160
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003161 auto *dst_as_state = GetAccelerationStructureStateNV(dst);
locke-lunargd556cc32019-09-17 01:21:23 -06003162 if (dst_as_state != nullptr) {
3163 dst_as_state->built = true;
3164 dst_as_state->build_info.initialize(pInfo);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003165 if (!disabled[command_buffer_state]) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003166 cb_state->AddChild(dst_as_state);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003167 }
locke-lunargd556cc32019-09-17 01:21:23 -06003168 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003169 if (!disabled[command_buffer_state]) {
3170 auto *src_as_state = GetAccelerationStructureStateNV(src);
3171 if (src_as_state != nullptr) {
3172 cb_state->AddChild(src_as_state);
3173 }
locke-lunargd556cc32019-09-17 01:21:23 -06003174 }
3175 cb_state->hasBuildAccelerationStructureCmd = true;
3176}
3177
3178void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
3179 VkAccelerationStructureNV dst,
3180 VkAccelerationStructureNV src,
3181 VkCopyAccelerationStructureModeNV mode) {
3182 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3183 if (cb_state) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003184 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureStateNV(src);
3185 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureStateNV(dst);
locke-lunargd556cc32019-09-17 01:21:23 -06003186 if (dst_as_state != nullptr && src_as_state != nullptr) {
3187 dst_as_state->built = true;
3188 dst_as_state->build_info = src_as_state->build_info;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003189 if (!disabled[command_buffer_state]) {
3190 cb_state->AddChild(dst_as_state);
3191 cb_state->AddChild(src_as_state);
3192 }
locke-lunargd556cc32019-09-17 01:21:23 -06003193 }
3194 }
3195}
3196
Jeff Bolz95176d02020-04-01 00:36:16 -05003197void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
3198 VkAccelerationStructureKHR accelerationStructure,
3199 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06003200 if (!accelerationStructure) return;
sourav parmarcd5fb182020-07-17 12:58:44 -07003201 auto *as_state = GetAccelerationStructureStateKHR(accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003202 if (as_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003203 as_state->Destroy();
sourav parmarcd5fb182020-07-17 12:58:44 -07003204 accelerationStructureMap_khr.erase(accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003205 }
3206}
3207
Jeff Bolz95176d02020-04-01 00:36:16 -05003208void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
3209 VkAccelerationStructureNV accelerationStructure,
3210 const VkAllocationCallbacks *pAllocator) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003211 if (!accelerationStructure) return;
3212 auto *as_state = GetAccelerationStructureStateNV(accelerationStructure);
3213 if (as_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003214 as_state->Destroy();
sourav parmarcd5fb182020-07-17 12:58:44 -07003215 accelerationStructureMap.erase(accelerationStructure);
3216 }
Jeff Bolz95176d02020-04-01 00:36:16 -05003217}
3218
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003219void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3220 uint32_t viewportCount,
3221 const VkViewportWScalingNV *pViewportWScalings) {
3222 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3223 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003224 cb_state->static_status &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003225}
3226
locke-lunargd556cc32019-09-17 01:21:23 -06003227void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
3228 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3229 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003230 cb_state->static_status &= ~CBSTATUS_LINE_WIDTH_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003231}
3232
3233void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
3234 uint16_t lineStipplePattern) {
3235 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3236 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003237 cb_state->static_status &= ~CBSTATUS_LINE_STIPPLE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003238}
3239
3240void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
3241 float depthBiasClamp, float depthBiasSlopeFactor) {
3242 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3243 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003244 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003245}
3246
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003247void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
3248 const VkRect2D *pScissors) {
3249 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07003250 uint32_t bits = ((1u << scissorCount) - 1u) << firstScissor;
3251 cb_state->scissorMask |= bits;
3252 cb_state->trashedScissorMask &= ~bits;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003253 cb_state->status |= CBSTATUS_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003254 cb_state->static_status &= ~CBSTATUS_SCISSOR_SET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003255}
3256
locke-lunargd556cc32019-09-17 01:21:23 -06003257void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
3258 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3259 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003260 cb_state->static_status &= ~CBSTATUS_BLEND_CONSTANTS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003261}
3262
3263void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
3264 float maxDepthBounds) {
3265 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3266 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003267 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003268}
3269
3270void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3271 uint32_t compareMask) {
3272 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3273 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003274 cb_state->static_status &= ~CBSTATUS_STENCIL_READ_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003275}
3276
3277void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3278 uint32_t writeMask) {
3279 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3280 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003281 cb_state->static_status &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003282}
3283
3284void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3285 uint32_t reference) {
3286 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3287 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003288 cb_state->static_status &= ~CBSTATUS_STENCIL_REFERENCE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003289}
3290
3291// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
3292// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
3293// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
3294void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
3295 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
3296 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
3297 cvdescriptorset::DescriptorSet *push_descriptor_set,
3298 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
3299 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
3300 // Defensive
3301 assert(pipeline_layout);
3302 if (!pipeline_layout) return;
3303
3304 uint32_t required_size = first_set + set_count;
3305 const uint32_t last_binding_index = required_size - 1;
3306 assert(last_binding_index < pipeline_layout->compat_for_set.size());
3307
3308 // Some useful shorthand
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003309 const auto lv_bind_point = ConvertToLvlBindPoint(pipeline_bind_point);
3310 auto &last_bound = cb_state->lastBound[lv_bind_point];
locke-lunargd556cc32019-09-17 01:21:23 -06003311 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
3312 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
3313
3314 // We need this three times in this function, but nowhere else
3315 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
3316 if (ds && ds->IsPushDescriptor()) {
3317 assert(ds == last_bound.push_descriptor_set.get());
3318 last_bound.push_descriptor_set = nullptr;
3319 return true;
3320 }
3321 return false;
3322 };
3323
3324 // Clean up the "disturbed" before and after the range to be set
3325 if (required_size < current_size) {
3326 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
3327 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
3328 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
3329 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
3330 }
3331 } else {
3332 // We're not disturbing past last, so leave the upper binding data alone.
3333 required_size = current_size;
3334 }
3335 }
3336
3337 // We resize if we need more set entries or if those past "last" are disturbed
3338 if (required_size != current_size) {
3339 last_bound.per_set.resize(required_size);
3340 }
3341
3342 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
3343 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
3344 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
3345 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3346 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
3347 last_bound.per_set[set_idx].dynamicOffsets.clear();
3348 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
3349 }
3350 }
3351
3352 // Now update the bound sets with the input sets
3353 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
3354 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
3355 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
3356 cvdescriptorset::DescriptorSet *descriptor_set =
3357 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
3358
3359 // Record binding (or push)
3360 if (descriptor_set != last_bound.push_descriptor_set.get()) {
3361 // Only cleanup the push descriptors if they aren't the currently used set.
3362 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3363 }
3364 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
3365 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
3366
3367 if (descriptor_set) {
3368 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
3369 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
3370 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
3371 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
3372 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
3373 input_dynamic_offsets = end_offset;
3374 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
3375 } else {
3376 last_bound.per_set[set_idx].dynamicOffsets.clear();
3377 }
3378 if (!descriptor_set->IsPushDescriptor()) {
3379 // Can't cache validation of push_descriptors
3380 cb_state->validated_descriptor_sets.insert(descriptor_set);
3381 }
3382 }
3383 }
3384}
3385
3386// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
3387void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
3388 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3389 uint32_t firstSet, uint32_t setCount,
3390 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
3391 const uint32_t *pDynamicOffsets) {
3392 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3393 auto pipeline_layout = GetPipelineLayout(layout);
3394
3395 // Resize binding arrays
3396 uint32_t last_set_index = firstSet + setCount - 1;
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003397 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
3398 if (last_set_index >= cb_state->lastBound[lv_bind_point].per_set.size()) {
3399 cb_state->lastBound[lv_bind_point].per_set.resize(last_set_index + 1);
locke-lunargd556cc32019-09-17 01:21:23 -06003400 }
3401
3402 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
3403 dynamicOffsetCount, pDynamicOffsets);
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003404 cb_state->lastBound[lv_bind_point].pipeline_layout = layout;
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -06003405 cb_state->lastBound[lv_bind_point].UpdateSamplerDescriptorsUsedByImage();
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003406}
3407
locke-lunargd556cc32019-09-17 01:21:23 -06003408void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
3409 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
3410 const VkWriteDescriptorSet *pDescriptorWrites) {
3411 const auto &pipeline_layout = GetPipelineLayout(layout);
3412 // Short circuit invalid updates
3413 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003414 !pipeline_layout->set_layouts[set]->IsPushDescriptor()) {
locke-lunargd556cc32019-09-17 01:21:23 -06003415 return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003416 }
locke-lunargd556cc32019-09-17 01:21:23 -06003417
3418 // We need a descriptor set to update the bindings with, compatible with the passed layout
Jeremy Gebben50fb1832021-03-19 09:10:13 -06003419 const auto& dsl = pipeline_layout->set_layouts[set];
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003420 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
3421 auto &last_bound = cb_state->lastBound[lv_bind_point];
locke-lunargd556cc32019-09-17 01:21:23 -06003422 auto &push_descriptor_set = last_bound.push_descriptor_set;
3423 // If we are disturbing the current push_desriptor_set clear it
3424 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003425 last_bound.UnbindAndResetPushDescriptorSet(cb_state, new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003426 }
3427
3428 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3429 nullptr);
3430 last_bound.pipeline_layout = layout;
3431
3432 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003433 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003434}
3435
3436void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3437 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3438 uint32_t set, uint32_t descriptorWriteCount,
3439 const VkWriteDescriptorSet *pDescriptorWrites) {
3440 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3441 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3442}
3443
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003444void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3445 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3446 const void *pValues) {
3447 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3448 if (cb_state != nullptr) {
3449 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3450
3451 auto &push_constant_data = cb_state->push_constant_data;
3452 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3453 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
locke-lunargde3f0fa2020-09-10 11:55:31 -06003454 cb_state->push_constant_pipeline_layout_set = layout;
3455
3456 auto flags = stageFlags;
3457 uint32_t bit_shift = 0;
3458 while (flags) {
3459 if (flags & 1) {
3460 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
3461 const auto it = cb_state->push_constant_data_update.find(flag);
3462
3463 if (it != cb_state->push_constant_data_update.end()) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06003464 std::memset(it->second.data() + offset, PC_Byte_Updated, static_cast<std::size_t>(size));
locke-lunargde3f0fa2020-09-10 11:55:31 -06003465 }
3466 }
3467 flags = flags >> 1;
3468 ++bit_shift;
3469 }
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003470 }
3471}
3472
locke-lunargd556cc32019-09-17 01:21:23 -06003473void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3474 VkIndexType indexType) {
locke-lunargd556cc32019-09-17 01:21:23 -06003475 auto cb_state = GetCBState(commandBuffer);
3476
3477 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
Piers Daniell39842ee2020-07-10 16:42:33 -06003478 cb_state->static_status &= ~CBSTATUS_INDEX_BUFFER_BOUND;
locke-lunarg1ae57d62020-11-18 10:49:19 -07003479 cb_state->index_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(buffer);
3480 cb_state->index_buffer_binding.size = cb_state->index_buffer_binding.buffer_state->createInfo.size;
locke-lunargd556cc32019-09-17 01:21:23 -06003481 cb_state->index_buffer_binding.offset = offset;
3482 cb_state->index_buffer_binding.index_type = indexType;
3483 // Add binding for this index buffer to this commandbuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003484 if (!disabled[command_buffer_state]) {
3485 cb_state->AddChild(cb_state->index_buffer_binding.buffer_state.get());
3486 }
locke-lunargd556cc32019-09-17 01:21:23 -06003487}
3488
3489void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3490 uint32_t bindingCount, const VkBuffer *pBuffers,
3491 const VkDeviceSize *pOffsets) {
3492 auto cb_state = GetCBState(commandBuffer);
3493
3494 uint32_t end = firstBinding + bindingCount;
3495 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3496 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3497 }
3498
3499 for (uint32_t i = 0; i < bindingCount; ++i) {
3500 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07003501 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003502 vertex_buffer_binding.offset = pOffsets[i];
Piers Daniell39842ee2020-07-10 16:42:33 -06003503 vertex_buffer_binding.size = VK_WHOLE_SIZE;
3504 vertex_buffer_binding.stride = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06003505 // Add binding for this vertex buffer to this commandbuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003506 if (pBuffers[i] && !disabled[command_buffer_state]) {
3507 cb_state->AddChild(vertex_buffer_binding.buffer_state.get());
Jeff Bolz165818a2020-05-08 11:19:03 -05003508 }
locke-lunargd556cc32019-09-17 01:21:23 -06003509 }
3510}
3511
3512void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3513 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003514 if (disabled[command_buffer_state]) return;
3515
locke-lunargd556cc32019-09-17 01:21:23 -06003516 auto cb_state = GetCBState(commandBuffer);
3517 auto dst_buffer_state = GetBufferState(dstBuffer);
3518
3519 // Update bindings between buffer and cmd buffer
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003520 if (cb_state && dst_buffer_state) {
3521 cb_state->AddChild(dst_buffer_state);
3522 }
locke-lunargd556cc32019-09-17 01:21:23 -06003523}
3524
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06003525static bool SetEventStageMask(VkEvent event, VkPipelineStageFlags2KHR stageMask,
Jeff Bolz310775c2019-10-09 00:46:33 -05003526 EventToStageMap *localEventToStageMap) {
3527 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003528 return false;
3529}
3530
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003531void ValidationStateTracker::RecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask) {
locke-lunargd556cc32019-09-17 01:21:23 -06003532 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003533 if (!disabled[command_buffer_state]) {
3534 auto event_state = GetEventState(event);
3535 if (event_state) {
3536 cb_state->AddChild(event_state);
3537 }
locke-lunargd556cc32019-09-17 01:21:23 -06003538 }
3539 cb_state->events.push_back(event);
3540 if (!cb_state->waitedEvents.count(event)) {
3541 cb_state->writeEventsBeforeWait.push_back(event);
3542 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003543 cb_state->eventUpdates.emplace_back(
3544 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3545 return SetEventStageMask(event, stageMask, localEventToStageMap);
3546 });
locke-lunargd556cc32019-09-17 01:21:23 -06003547}
3548
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003549void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3550 VkPipelineStageFlags stageMask) {
3551 RecordCmdSetEvent(commandBuffer, event, stageMask);
3552}
3553
3554void ValidationStateTracker::PreCallRecordCmdSetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event,
3555 const VkDependencyInfoKHR *pDependencyInfo) {
3556 auto stage_masks = sync_utils::GetGlobalStageMasks(*pDependencyInfo);
3557
3558 RecordCmdSetEvent(commandBuffer, event, stage_masks.src);
Jeremy Gebben79649152021-06-22 14:46:24 -06003559
3560 RecordBarriers(commandBuffer, pDependencyInfo);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003561}
3562
3563void ValidationStateTracker::RecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3564 VkPipelineStageFlags2KHR stageMask) {
locke-lunargd556cc32019-09-17 01:21:23 -06003565 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003566 if (!disabled[command_buffer_state]) {
3567 auto event_state = GetEventState(event);
3568 if (event_state) {
3569 cb_state->AddChild(event_state);
3570 }
locke-lunargd556cc32019-09-17 01:21:23 -06003571 }
3572 cb_state->events.push_back(event);
3573 if (!cb_state->waitedEvents.count(event)) {
3574 cb_state->writeEventsBeforeWait.push_back(event);
3575 }
3576
3577 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003578 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003579 return SetEventStageMask(event, VkPipelineStageFlags2KHR(0), localEventToStageMap);
Jeff Bolz310775c2019-10-09 00:46:33 -05003580 });
locke-lunargd556cc32019-09-17 01:21:23 -06003581}
3582
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003583void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3584 VkPipelineStageFlags stageMask) {
3585 RecordCmdResetEvent(commandBuffer, event, stageMask);
3586}
3587
3588void ValidationStateTracker::PreCallRecordCmdResetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event,
3589 VkPipelineStageFlags2KHR stageMask) {
3590 RecordCmdResetEvent(commandBuffer, event, stageMask);
3591}
3592
3593void ValidationStateTracker::RecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents) {
locke-lunargd556cc32019-09-17 01:21:23 -06003594 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3595 for (uint32_t i = 0; i < eventCount; ++i) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003596 if (!disabled[command_buffer_state]) {
3597 auto event_state = GetEventState(pEvents[i]);
3598 if (event_state) {
3599 cb_state->AddChild(event_state);
3600 }
locke-lunargd556cc32019-09-17 01:21:23 -06003601 }
3602 cb_state->waitedEvents.insert(pEvents[i]);
3603 cb_state->events.push_back(pEvents[i]);
3604 }
3605}
3606
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003607void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3608 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3609 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3610 uint32_t bufferMemoryBarrierCount,
3611 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3612 uint32_t imageMemoryBarrierCount,
3613 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3614 RecordCmdWaitEvents(commandBuffer, eventCount, pEvents);
Jeremy Gebben79649152021-06-22 14:46:24 -06003615 RecordBarriers(commandBuffer, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers,
3616 imageMemoryBarrierCount, pImageMemoryBarriers);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003617}
3618
3619void ValidationStateTracker::PreCallRecordCmdWaitEvents2KHR(VkCommandBuffer commandBuffer, uint32_t eventCount,
3620 const VkEvent *pEvents, const VkDependencyInfoKHR *pDependencyInfos) {
3621 RecordCmdWaitEvents(commandBuffer, eventCount, pEvents);
Jeremy Gebben79649152021-06-22 14:46:24 -06003622 for (uint32_t i = 0; i < eventCount; i++) {
3623 RecordBarriers(commandBuffer, &pDependencyInfos[i]);
3624 }
3625}
3626
3627void ValidationStateTracker::PostCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
3628 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
3629 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3630 uint32_t bufferMemoryBarrierCount,
3631 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3632 uint32_t imageMemoryBarrierCount,
3633 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3634 RecordBarriers(commandBuffer, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers,
3635 imageMemoryBarrierCount, pImageMemoryBarriers);
3636}
3637
3638void ValidationStateTracker::PreCallRecordCmdPipelineBarrier2KHR(VkCommandBuffer commandBuffer,
3639 const VkDependencyInfoKHR *pDependencyInfo) {
3640 RecordBarriers(commandBuffer, pDependencyInfo);
3641}
3642
3643void ValidationStateTracker::RecordBarriers(VkCommandBuffer commandBuffer, uint32_t memoryBarrierCount,
3644 const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount,
3645 const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount,
3646 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3647 if (disabled[command_buffer_state]) return;
3648
3649 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3650 for (uint32_t i = 0; i < bufferMemoryBarrierCount; i++) {
3651 auto buffer_state = GetBufferState(pBufferMemoryBarriers[i].buffer);
3652 if (buffer_state) {
3653 cb_state->AddChild(buffer_state);
3654 }
3655 }
3656 for (uint32_t i = 0; i < imageMemoryBarrierCount; i++) {
3657 auto image_state = GetImageState(pImageMemoryBarriers[i].image);
3658 if (image_state) {
3659 cb_state->AddChild(image_state);
3660 }
3661 }
3662}
3663
3664void ValidationStateTracker::RecordBarriers(VkCommandBuffer commandBuffer, const VkDependencyInfoKHR *pDependencyInfo) {
3665 if (disabled[command_buffer_state]) return;
3666
3667 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3668 for (uint32_t i = 0; i < pDependencyInfo->bufferMemoryBarrierCount; i++) {
3669 auto buffer_state = GetBufferState(pDependencyInfo->pBufferMemoryBarriers[i].buffer);
3670 if (buffer_state) {
3671 cb_state->AddChild(buffer_state);
3672 }
3673 }
3674 for (uint32_t i = 0; i < pDependencyInfo->imageMemoryBarrierCount; i++) {
3675 auto image_state = GetImageState(pDependencyInfo->pImageMemoryBarriers[i].image);
3676 if (image_state) {
3677 cb_state->AddChild(image_state);
3678 }
3679 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003680}
3681
Jeff Bolz310775c2019-10-09 00:46:33 -05003682bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
3683 (*localQueryToStateMap)[object] = value;
3684 return false;
3685}
3686
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003687bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
3688 QueryState value, QueryMap *localQueryToStateMap) {
Jeff Bolz310775c2019-10-09 00:46:33 -05003689 for (uint32_t i = 0; i < queryCount; i++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003690 QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
Jeff Bolz310775c2019-10-09 00:46:33 -05003691 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06003692 }
3693 return false;
3694}
3695
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003696QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex,
3697 uint32_t perfPass) const {
3698 QueryObject query = QueryObject(QueryObject(queryPool, queryIndex), perfPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003699
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003700 auto iter = localQueryToStateMap->find(query);
3701 if (iter != localQueryToStateMap->end()) return iter->second;
Jeff Bolz310775c2019-10-09 00:46:33 -05003702
Jeff Bolz310775c2019-10-09 00:46:33 -05003703 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06003704}
3705
3706void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003707 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003708 cb_state->activeQueries.insert(query_obj);
3709 cb_state->startedQueries.insert(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003710 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3711 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3712 QueryMap *localQueryToStateMap) {
3713 SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
3714 return false;
3715 });
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003716 if (!disabled[command_buffer_state]) {
3717 auto pool_state = GetQueryPoolState(query_obj.pool);
3718 cb_state->AddChild(pool_state);
3719 }
locke-lunargd556cc32019-09-17 01:21:23 -06003720}
3721
3722void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3723 VkFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003724 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003725 QueryObject query = {queryPool, slot};
3726 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3727 RecordCmdBeginQuery(cb_state, query);
3728}
3729
3730void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003731 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003732 cb_state->activeQueries.erase(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003733 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3734 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3735 QueryMap *localQueryToStateMap) {
3736 return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3737 });
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003738 if (!disabled[command_buffer_state]) {
3739 auto pool_state = GetQueryPoolState(query_obj.pool);
3740 cb_state->AddChild(pool_state);
3741 }
locke-lunargd556cc32019-09-17 01:21:23 -06003742}
3743
3744void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003745 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003746 QueryObject query_obj = {queryPool, slot};
3747 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3748 RecordCmdEndQuery(cb_state, query_obj);
3749}
3750
3751void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3752 uint32_t firstQuery, uint32_t queryCount) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003753 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003754 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3755
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02003756 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
3757 QueryObject query = {queryPool, slot};
3758 cb_state->resetQueries.insert(query);
3759 }
3760
Jeff Bolz310775c2019-10-09 00:46:33 -05003761 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003762 bool do_validate, VkQueryPool &firstPerfQueryPool,
3763 uint32_t perfQueryPass,
3764 QueryMap *localQueryToStateMap) {
3765 return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06003766 });
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003767 if (!disabled[command_buffer_state]) {
3768 auto pool_state = GetQueryPoolState(queryPool);
3769 cb_state->AddChild(pool_state);
3770 }
locke-lunargd556cc32019-09-17 01:21:23 -06003771}
3772
3773void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3774 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3775 VkDeviceSize dstOffset, VkDeviceSize stride,
3776 VkQueryResultFlags flags) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003777 if (disabled[query_validation] || disabled[command_buffer_state]) return;
3778
locke-lunargd556cc32019-09-17 01:21:23 -06003779 auto cb_state = GetCBState(commandBuffer);
3780 auto dst_buff_state = GetBufferState(dstBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003781 cb_state->AddChild(dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003782 auto pool_state = GetQueryPoolState(queryPool);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003783 cb_state->AddChild(pool_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003784}
3785
3786void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3787 VkQueryPool queryPool, uint32_t slot) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003788 PostCallRecordCmdWriteTimestamp2KHR(commandBuffer, pipelineStage, queryPool, slot);
3789}
3790
3791void ValidationStateTracker::PostCallRecordCmdWriteTimestamp2KHR(VkCommandBuffer commandBuffer,
3792 VkPipelineStageFlags2KHR pipelineStage, VkQueryPool queryPool,
3793 uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003794 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003795 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003796 if (!disabled[command_buffer_state]) {
3797 auto pool_state = GetQueryPoolState(queryPool);
3798 cb_state->AddChild(pool_state);
3799 }
locke-lunargd556cc32019-09-17 01:21:23 -06003800 QueryObject query = {queryPool, slot};
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003801 cb_state->queryUpdates.emplace_back([query](const ValidationStateTracker *device_data, bool do_validate,
3802 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3803 QueryMap *localQueryToStateMap) {
3804 return SetQueryState(QueryObject(query, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3805 });
locke-lunargd556cc32019-09-17 01:21:23 -06003806}
3807
Marijn Suijten6750fdc2020-12-30 22:06:42 +01003808void ValidationStateTracker::PostCallRecordCmdWriteAccelerationStructuresPropertiesKHR(
3809 VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR *pAccelerationStructures,
3810 VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) {
3811 if (disabled[query_validation]) return;
3812 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003813 if (!disabled[command_buffer_state]) {
3814 auto pool_state = GetQueryPoolState(queryPool);
3815 cb_state->AddChild(pool_state);
3816 }
Marijn Suijten6750fdc2020-12-30 22:06:42 +01003817 cb_state->queryUpdates.emplace_back(
3818 [queryPool, firstQuery, accelerationStructureCount](const ValidationStateTracker *device_data, bool do_validate,
3819 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3820 QueryMap *localQueryToStateMap) {
3821 return SetQueryStateMulti(queryPool, firstQuery, accelerationStructureCount, perfQueryPass, QUERYSTATE_ENDED,
3822 localQueryToStateMap);
3823 });
3824}
3825
locke-lunargd556cc32019-09-17 01:21:23 -06003826void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3827 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3828 VkResult result) {
3829 if (VK_SUCCESS != result) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003830
Jeremy Gebben88f58142021-06-01 10:07:52 -06003831 std::vector<std::shared_ptr<IMAGE_VIEW_STATE>> views;
Mike Schuchardt2df08912020-12-15 16:28:09 -08003832 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) == 0) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003833 views.resize(pCreateInfo->attachmentCount);
locke-lunarg1ae57d62020-11-18 10:49:19 -07003834
locke-lunargd556cc32019-09-17 01:21:23 -06003835 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003836 views[i] = GetShared<IMAGE_VIEW_STATE>(pCreateInfo->pAttachments[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003837 }
3838 }
Jeremy Gebben88f58142021-06-01 10:07:52 -06003839
3840 frameBufferMap[*pFramebuffer] = std::make_shared<FRAMEBUFFER_STATE>(
3841 *pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass), std::move(views));
locke-lunargd556cc32019-09-17 01:21:23 -06003842}
3843
locke-lunargd556cc32019-09-17 01:21:23 -06003844void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
3845 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3846 VkResult result) {
3847 if (VK_SUCCESS != result) return;
Jeremy Gebben88f58142021-06-01 10:07:52 -06003848 renderPassMap[*pRenderPass] = std::make_shared<RENDER_PASS_STATE>(*pRenderPass, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003849}
3850
Mike Schuchardt2df08912020-12-15 16:28:09 -08003851void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07003852 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3853 VkResult result) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003854 if (VK_SUCCESS != result) return;
3855
3856 renderPassMap[*pRenderPass] = std::make_shared<RENDER_PASS_STATE>(*pRenderPass, pCreateInfo);
Tony-LunarG977448c2019-12-02 14:52:02 -07003857}
3858
Mike Schuchardt2df08912020-12-15 16:28:09 -08003859void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07003860 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3861 VkResult result) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003862 if (VK_SUCCESS != result) return;
3863
3864 renderPassMap[*pRenderPass] = std::make_shared<RENDER_PASS_STATE>(*pRenderPass, pCreateInfo);
Tony-LunarG977448c2019-12-02 14:52:02 -07003865}
3866
locke-lunargd556cc32019-09-17 01:21:23 -06003867void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
3868 const VkRenderPassBeginInfo *pRenderPassBegin,
3869 const VkSubpassContents contents) {
3870 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunargaecf2152020-05-12 17:15:41 -06003871 auto render_pass_state = pRenderPassBegin ? GetShared<RENDER_PASS_STATE>(pRenderPassBegin->renderPass) : nullptr;
3872 auto framebuffer = pRenderPassBegin ? GetShared<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer) : nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06003873
3874 if (render_pass_state) {
locke-lunargaecf2152020-05-12 17:15:41 -06003875 cb_state->activeFramebuffer = framebuffer;
locke-lunargd556cc32019-09-17 01:21:23 -06003876 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07003877 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06003878 cb_state->activeSubpass = 0;
3879 cb_state->activeSubpassContents = contents;
locke-lunargfc78e932020-11-19 17:06:24 -07003880
locke-lunargd556cc32019-09-17 01:21:23 -06003881 // Connect this RP to cmdBuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003882 if (!disabled[command_buffer_state]) {
3883 cb_state->AddChild(render_pass_state.get());
3884 }
locke-lunargd556cc32019-09-17 01:21:23 -06003885
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07003886 auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06003887 if (chained_device_group_struct) {
3888 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
3889 } else {
3890 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
3891 }
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003892
locke-lunargfc78e932020-11-19 17:06:24 -07003893 cb_state->active_subpasses = nullptr;
3894 cb_state->active_attachments = nullptr;
3895
3896 if (framebuffer) {
3897 cb_state->framebuffers.insert(framebuffer);
3898
3899 // Set cb_state->active_subpasses
3900 cb_state->active_subpasses =
3901 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
3902 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
3903 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
3904
3905 // Set cb_state->active_attachments & cb_state->attachments_view_states
3906 cb_state->active_attachments =
3907 std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(framebuffer->createInfo.attachmentCount);
3908 UpdateAttachmentsView(*this, *cb_state, *cb_state->activeFramebuffer, pRenderPassBegin);
3909
3910 // Connect this framebuffer and its children to this cmdBuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003911 cb_state->AddChild(framebuffer.get());
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003912 }
locke-lunargd556cc32019-09-17 01:21:23 -06003913 }
3914}
3915
3916void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
3917 const VkRenderPassBeginInfo *pRenderPassBegin,
3918 VkSubpassContents contents) {
3919 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
3920}
3921
3922void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
3923 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08003924 const VkSubpassBeginInfo *pSubpassBeginInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06003925 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3926}
3927
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06003928void ValidationStateTracker::PostCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
3929 uint32_t counterBufferCount,
3930 const VkBuffer *pCounterBuffers,
3931 const VkDeviceSize *pCounterBufferOffsets) {
3932 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3933
3934 cb_state->transform_feedback_active = true;
3935}
3936
3937void ValidationStateTracker::PostCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
3938 uint32_t counterBufferCount, const VkBuffer *pCounterBuffers,
3939 const VkDeviceSize *pCounterBufferOffsets) {
3940 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3941
3942 cb_state->transform_feedback_active = false;
3943}
3944
Tony-LunarG977448c2019-12-02 14:52:02 -07003945void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
3946 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08003947 const VkSubpassBeginInfo *pSubpassBeginInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07003948 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3949}
3950
locke-lunargd556cc32019-09-17 01:21:23 -06003951void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3952 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3953 cb_state->activeSubpass++;
3954 cb_state->activeSubpassContents = contents;
locke-lunargfc78e932020-11-19 17:06:24 -07003955
3956 // Update cb_state->active_subpasses
3957 if (cb_state->activeRenderPass && cb_state->activeFramebuffer) {
3958 cb_state->active_subpasses = nullptr;
3959 cb_state->active_subpasses =
3960 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
3961
3962 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
3963 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
3964 }
locke-lunargd556cc32019-09-17 01:21:23 -06003965}
3966
3967void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3968 RecordCmdNextSubpass(commandBuffer, contents);
3969}
3970
3971void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08003972 const VkSubpassBeginInfo *pSubpassBeginInfo,
3973 const VkSubpassEndInfo *pSubpassEndInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06003974 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3975}
3976
Tony-LunarG977448c2019-12-02 14:52:02 -07003977void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08003978 const VkSubpassBeginInfo *pSubpassBeginInfo,
3979 const VkSubpassEndInfo *pSubpassEndInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07003980 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3981}
3982
locke-lunargd556cc32019-09-17 01:21:23 -06003983void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
3984 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3985 cb_state->activeRenderPass = nullptr;
locke-lunargfc78e932020-11-19 17:06:24 -07003986 cb_state->active_attachments = nullptr;
3987 cb_state->active_subpasses = nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06003988 cb_state->activeSubpass = 0;
3989 cb_state->activeFramebuffer = VK_NULL_HANDLE;
3990}
3991
3992void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
3993 RecordCmdEndRenderPassState(commandBuffer);
3994}
3995
3996void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08003997 const VkSubpassEndInfo *pSubpassEndInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06003998 RecordCmdEndRenderPassState(commandBuffer);
3999}
4000
Tony-LunarG977448c2019-12-02 14:52:02 -07004001void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004002 const VkSubpassEndInfo *pSubpassEndInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004003 RecordCmdEndRenderPassState(commandBuffer);
4004}
locke-lunargd556cc32019-09-17 01:21:23 -06004005void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
4006 const VkCommandBuffer *pCommandBuffers) {
4007 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4008
4009 CMD_BUFFER_STATE *sub_cb_state = NULL;
4010 for (uint32_t i = 0; i < commandBuffersCount; i++) {
4011 sub_cb_state = GetCBState(pCommandBuffers[i]);
4012 assert(sub_cb_state);
4013 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
4014 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
4015 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
4016 // from the validation step to the recording step
4017 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
4018 }
4019 }
4020
4021 // Propagate inital layout and current layout state to the primary cmd buffer
4022 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
4023 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
4024 // for those other classes.
4025 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
4026 const auto image = sub_layout_map_entry.first;
4027 const auto *image_state = GetImageState(image);
4028 if (!image_state) continue; // Can't set layouts of a dead image
4029
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06004030 auto *cb_subres_map = cb_state->GetImageSubresourceLayoutMap(*image_state);
John Zulauf17708d02021-02-22 11:20:58 -07004031 const auto *sub_cb_subres_map = &sub_layout_map_entry.second;
locke-lunargd556cc32019-09-17 01:21:23 -06004032 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
4033 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
4034 }
4035
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06004036 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer();
locke-lunargd556cc32019-09-17 01:21:23 -06004037 cb_state->linkedCommandBuffers.insert(sub_cb_state);
Jeremy Gebben5570abe2021-05-16 18:35:13 -06004038 cb_state->AddChild(sub_cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004039 for (auto &function : sub_cb_state->queryUpdates) {
4040 cb_state->queryUpdates.push_back(function);
4041 }
4042 for (auto &function : sub_cb_state->queue_submit_functions) {
4043 cb_state->queue_submit_functions.push_back(function);
4044 }
David Zhao Akeley44139b12021-04-26 16:16:13 -07004045
4046 // State is trashed after executing secondary command buffers.
4047 // Importantly, this function runs after CoreChecks::PreCallValidateCmdExecuteCommands.
4048 cb_state->trashedViewportMask = ~uint32_t(0);
4049 cb_state->trashedScissorMask = ~uint32_t(0);
4050 cb_state->trashedViewportCount = true;
4051 cb_state->trashedScissorCount = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004052 }
4053}
4054
4055void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
4056 VkFlags flags, void **ppData, VkResult result) {
4057 if (VK_SUCCESS != result) return;
4058 RecordMappedMemory(mem, offset, size, ppData);
4059}
4060
4061void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
4062 auto mem_info = GetDevMemState(mem);
4063 if (mem_info) {
4064 mem_info->mapped_range = MemRange();
4065 mem_info->p_driver_data = nullptr;
4066 }
4067}
4068
4069void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
4070 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
4071 if (image_state) {
locke-lunargae26eac2020-04-16 15:29:05 -06004072 // An Android sepcial image cannot get VkSubresourceLayout until the image binds a memory.
4073 // See: VUID-vkGetImageSubresourceLayout-image-01895
4074 image_state->fragment_encoder =
4075 std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(new subresource_adapter::ImageRangeEncoder(*image_state));
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07004076 const auto swapchain_info = LvlFindInChain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06004077 if (swapchain_info) {
John Zulauf29d00532021-03-04 13:28:54 -07004078 auto *swapchain = GetSwapchainState(swapchain_info->swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004079 if (swapchain) {
John Zulaufd13b38e2021-03-05 08:17:38 -07004080 SWAPCHAIN_IMAGE &swap_image = swapchain->images[swapchain_info->imageIndex];
John Zulauf29d00532021-03-04 13:28:54 -07004081 if (swap_image.bound_images.empty()) {
4082 // If this is the first "binding" of an image to this swapchain index, get a fake allocation
4083 image_state->swapchain_fake_address = fake_memory.Alloc(image_state->fragment_encoder->TotalSize());
4084 } else {
4085 image_state->swapchain_fake_address = (*swap_image.bound_images.cbegin())->swapchain_fake_address;
4086 }
John Zulaufd13b38e2021-03-05 08:17:38 -07004087 swap_image.bound_images.emplace(image_state);
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06004088 image_state->AddParent(swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004089 image_state->bind_swapchain = swapchain_info->swapchain;
4090 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
John Zulaufd13b38e2021-03-05 08:17:38 -07004091
John Zulauf29d00532021-03-04 13:28:54 -07004092 // All images bound to this swapchain and index are aliases
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06004093 for (auto *other_image : swap_image.bound_images) {
4094 image_state->AddAliasingImage(other_image);
4095 }
locke-lunargd556cc32019-09-17 01:21:23 -06004096 }
4097 } else {
4098 // Track bound memory range information
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004099 auto mem_info = GetDevMemShared(bindInfo.memory);
locke-lunargd556cc32019-09-17 01:21:23 -06004100 if (mem_info) {
John Zulaufd13b38e2021-03-05 08:17:38 -07004101 if (image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06004102 for (auto *base_node : mem_info->ObjectBindings()) {
4103 if (base_node->Handle().type == kVulkanObjectTypeImage) {
4104 auto other_image = static_cast<IMAGE_STATE *>(base_node);
4105 image_state->AddAliasingImage(other_image);
4106 }
4107 }
John Zulaufd13b38e2021-03-05 08:17:38 -07004108 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004109 // Track objects tied to memory
4110 image_state->SetMemBinding(mem_info, bindInfo.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06004111 }
locke-lunargd556cc32019-09-17 01:21:23 -06004112 }
locke-lunargd556cc32019-09-17 01:21:23 -06004113 }
4114}
4115
4116void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
4117 VkDeviceSize memoryOffset, VkResult result) {
4118 if (VK_SUCCESS != result) return;
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06004119 auto bind_info = LvlInitStruct<VkBindImageMemoryInfo>();
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004120 bind_info.image = image;
4121 bind_info.memory = mem;
4122 bind_info.memoryOffset = memoryOffset;
4123 UpdateBindImageMemoryState(bind_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004124}
4125
4126void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004127 const VkBindImageMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004128 if (VK_SUCCESS != result) return;
4129 for (uint32_t i = 0; i < bindInfoCount; i++) {
4130 UpdateBindImageMemoryState(pBindInfos[i]);
4131 }
4132}
4133
4134void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004135 const VkBindImageMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004136 if (VK_SUCCESS != result) return;
4137 for (uint32_t i = 0; i < bindInfoCount; i++) {
4138 UpdateBindImageMemoryState(pBindInfos[i]);
4139 }
4140}
4141
4142void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
4143 auto event_state = GetEventState(event);
4144 if (event_state) {
4145 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
4146 }
locke-lunargd556cc32019-09-17 01:21:23 -06004147}
4148
4149void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
4150 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
4151 VkResult result) {
4152 if (VK_SUCCESS != result) return;
4153 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
4154 pImportSemaphoreFdInfo->flags);
4155}
4156
4157void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004158 VkExternalSemaphoreHandleTypeFlagBits handle_type) {
locke-lunargd556cc32019-09-17 01:21:23 -06004159 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
Mike Schuchardt2df08912020-12-15 16:28:09 -08004160 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT) {
locke-lunargd556cc32019-09-17 01:21:23 -06004161 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
4162 semaphore_state->scope = kSyncScopeExternalPermanent;
4163 }
4164}
4165
4166#ifdef VK_USE_PLATFORM_WIN32_KHR
4167void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
4168 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
4169 if (VK_SUCCESS != result) return;
4170 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
4171 pImportSemaphoreWin32HandleInfo->flags);
4172}
4173
4174void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
4175 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4176 HANDLE *pHandle, VkResult result) {
4177 if (VK_SUCCESS != result) return;
4178 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
4179}
4180
4181void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
4182 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
4183 if (VK_SUCCESS != result) return;
4184 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
4185 pImportFenceWin32HandleInfo->flags);
4186}
4187
4188void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
4189 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4190 HANDLE *pHandle, VkResult result) {
4191 if (VK_SUCCESS != result) return;
4192 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
4193}
4194#endif
4195
4196void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
4197 VkResult result) {
4198 if (VK_SUCCESS != result) return;
4199 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
4200}
4201
Mike Schuchardt2df08912020-12-15 16:28:09 -08004202void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBits handle_type,
4203 VkFenceImportFlags flags) {
locke-lunargd556cc32019-09-17 01:21:23 -06004204 FENCE_STATE *fence_node = GetFenceState(fence);
4205 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08004206 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT || flags & VK_FENCE_IMPORT_TEMPORARY_BIT) &&
locke-lunargd556cc32019-09-17 01:21:23 -06004207 fence_node->scope == kSyncScopeInternal) {
4208 fence_node->scope = kSyncScopeExternalTemporary;
4209 } else {
4210 fence_node->scope = kSyncScopeExternalPermanent;
4211 }
4212 }
4213}
4214
4215void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
4216 VkResult result) {
4217 if (VK_SUCCESS != result) return;
4218 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
4219}
4220
Mike Schuchardt2df08912020-12-15 16:28:09 -08004221void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBits handle_type) {
locke-lunargd556cc32019-09-17 01:21:23 -06004222 FENCE_STATE *fence_state = GetFenceState(fence);
4223 if (fence_state) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08004224 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT) {
locke-lunargd556cc32019-09-17 01:21:23 -06004225 // Export with reference transference becomes external
4226 fence_state->scope = kSyncScopeExternalPermanent;
4227 } else if (fence_state->scope == kSyncScopeInternal) {
4228 // Export with copy transference has a side effect of resetting the fence
4229 fence_state->state = FENCE_UNSIGNALED;
4230 }
4231 }
4232}
4233
4234void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
4235 VkResult result) {
4236 if (VK_SUCCESS != result) return;
4237 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
4238}
4239
4240void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
4241 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
4242 if (VK_SUCCESS != result) return;
John Zulaufd5115702021-01-18 12:34:33 -07004243 const auto event = *pEvent;
Jeremy Gebbencbf22862021-03-03 12:01:22 -07004244 eventMap.emplace(event, std::make_shared<EVENT_STATE>(event, pCreateInfo->flags));
locke-lunargd556cc32019-09-17 01:21:23 -06004245}
4246
4247void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
4248 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
4249 SWAPCHAIN_NODE *old_swapchain_state) {
4250 if (VK_SUCCESS == result) {
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06004251 if (surface_state->swapchain) {
4252 surface_state->swapchain->RemoveParent(surface_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004253 }
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06004254 surface_state->swapchain = CreateSwapchainState(pCreateInfo, *pSwapchain);
4255 surface_state->swapchain->AddParent(surface_state);
4256 swapchainMap[*pSwapchain] = surface_state->swapchain;
locke-lunargd556cc32019-09-17 01:21:23 -06004257 } else {
4258 surface_state->swapchain = nullptr;
4259 }
4260 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
4261 if (old_swapchain_state) {
4262 old_swapchain_state->retired = true;
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06004263 old_swapchain_state->RemoveParent(surface_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004264 }
4265 return;
4266}
4267
4268void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
4269 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
4270 VkResult result) {
4271 auto surface_state = GetSurfaceState(pCreateInfo->surface);
4272 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
4273 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
4274}
4275
4276void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
4277 const VkAllocationCallbacks *pAllocator) {
4278 if (!swapchain) return;
4279 auto swapchain_data = GetSwapchainState(swapchain);
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06004280 if (!swapchain_data) return;
John Zulauffaa7a522021-03-05 12:22:45 -07004281
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06004282 for (auto &swapchain_image : swapchain_data->images) {
4283 for (auto *image : swapchain_image.bound_images) {
4284 imageMap.erase(image->image());
locke-lunargd556cc32019-09-17 01:21:23 -06004285 }
locke-lunargd556cc32019-09-17 01:21:23 -06004286 }
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06004287
4288 swapchain_data->Destroy();
4289 swapchainMap.erase(swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004290}
4291
sfricke-samsung5c1b7392020-12-13 22:17:15 -08004292void ValidationStateTracker::PostCallRecordCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
4293 const VkDisplayModeCreateInfoKHR *pCreateInfo,
4294 const VkAllocationCallbacks *pAllocator, VkDisplayModeKHR *pMode,
4295 VkResult result) {
4296 if (VK_SUCCESS != result) return;
4297 if (!pMode) return;
Jeremy Gebben5573a8c2021-06-04 08:55:10 -06004298 display_mode_map[*pMode] = std::make_shared<DISPLAY_MODE_STATE>(*pMode, physicalDevice);
sfricke-samsung5c1b7392020-12-13 22:17:15 -08004299}
4300
locke-lunargd556cc32019-09-17 01:21:23 -06004301void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
4302 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
4303 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004304 auto semaphore_state = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
4305 if (semaphore_state) {
4306 semaphore_state->signaler.first = VK_NULL_HANDLE;
4307 semaphore_state->signaled = false;
locke-lunargd556cc32019-09-17 01:21:23 -06004308 }
4309 }
4310
4311 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
4312 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
4313 // confused itself just as much.
4314 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
4315 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
4316 // Mark the image as having been released to the WSI
4317 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06004318 if (swapchain_data) {
4319 swapchain_data->PresentImage(pPresentInfo->pImageIndices[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06004320 }
4321 }
4322 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
4323 // its semaphore waits) /never/ participate in any completion proof.
4324}
4325
4326void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
4327 const VkSwapchainCreateInfoKHR *pCreateInfos,
4328 const VkAllocationCallbacks *pAllocator,
4329 VkSwapchainKHR *pSwapchains, VkResult result) {
4330 if (pCreateInfos) {
4331 for (uint32_t i = 0; i < swapchainCount; i++) {
4332 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
4333 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
4334 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
4335 }
4336 }
4337}
4338
4339void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4340 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004341 auto fence_state = GetFenceState(fence);
4342 if (fence_state && fence_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06004343 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
4344 // import
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004345 fence_state->state = FENCE_INFLIGHT;
4346 fence_state->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
locke-lunargd556cc32019-09-17 01:21:23 -06004347 }
4348
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004349 auto semaphore_state = GetSemaphoreState(semaphore);
4350 if (semaphore_state && semaphore_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06004351 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
4352 // temporary import
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004353 semaphore_state->signaled = true;
4354 semaphore_state->signaler.first = VK_NULL_HANDLE;
locke-lunargd556cc32019-09-17 01:21:23 -06004355 }
4356
4357 // Mark the image as acquired.
4358 auto swapchain_data = GetSwapchainState(swapchain);
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06004359 if (swapchain_data) {
4360 swapchain_data->AcquireImage(*pImageIndex);
locke-lunargd556cc32019-09-17 01:21:23 -06004361 }
4362}
4363
4364void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4365 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
4366 VkResult result) {
4367 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4368 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
4369}
4370
4371void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
4372 uint32_t *pImageIndex, VkResult result) {
4373 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4374 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
4375 pAcquireInfo->fence, pImageIndex);
4376}
4377
4378void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
4379 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
4380 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
4381 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
4382 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
4383 phys_device_state.phys_device = pPhysicalDevices[i];
4384 // Init actual features for each physical device
4385 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
4386 }
4387 }
4388}
4389
4390// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
4391static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004392 VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004393 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
4394
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004395 if (pQueueFamilyProperties) { // Save queue family properties
locke-lunargd556cc32019-09-17 01:21:23 -06004396 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
4397 for (uint32_t i = 0; i < count; ++i) {
4398 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
4399 }
4400 }
4401}
4402
4403void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
4404 uint32_t *pQueueFamilyPropertyCount,
4405 VkQueueFamilyProperties *pQueueFamilyProperties) {
4406 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4407 assert(physical_device_state);
Mike Schuchardt2df08912020-12-15 16:28:09 -08004408 VkQueueFamilyProperties2 *pqfp = nullptr;
4409 std::vector<VkQueueFamilyProperties2> qfp;
locke-lunargd556cc32019-09-17 01:21:23 -06004410 qfp.resize(*pQueueFamilyPropertyCount);
4411 if (pQueueFamilyProperties) {
4412 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06004413 qfp[i] = LvlInitStruct<VkQueueFamilyProperties2>();
locke-lunargd556cc32019-09-17 01:21:23 -06004414 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
4415 }
4416 pqfp = qfp.data();
4417 }
4418 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
4419}
4420
4421void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004422 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004423 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4424 assert(physical_device_state);
4425 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4426 pQueueFamilyProperties);
4427}
4428
4429void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004430 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004431 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4432 assert(physical_device_state);
4433 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4434 pQueueFamilyProperties);
4435}
4436void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
4437 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004438 if (!surface) return;
4439 auto surface_state = GetSurfaceState(surface);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004440 surface_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06004441 surface_map.erase(surface);
4442}
4443
4444void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004445 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06004446}
4447
4448void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
4449 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
4450 const VkAllocationCallbacks *pAllocator,
4451 VkSurfaceKHR *pSurface, VkResult result) {
4452 if (VK_SUCCESS != result) return;
4453 RecordVulkanSurface(pSurface);
4454}
4455
4456#ifdef VK_USE_PLATFORM_ANDROID_KHR
4457void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
4458 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
4459 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4460 VkResult result) {
4461 if (VK_SUCCESS != result) return;
4462 RecordVulkanSurface(pSurface);
4463}
4464#endif // VK_USE_PLATFORM_ANDROID_KHR
4465
4466#ifdef VK_USE_PLATFORM_IOS_MVK
4467void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
4468 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4469 VkResult result) {
4470 if (VK_SUCCESS != result) return;
4471 RecordVulkanSurface(pSurface);
4472}
4473#endif // VK_USE_PLATFORM_IOS_MVK
4474
4475#ifdef VK_USE_PLATFORM_MACOS_MVK
4476void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
4477 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
4478 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4479 VkResult result) {
4480 if (VK_SUCCESS != result) return;
4481 RecordVulkanSurface(pSurface);
4482}
4483#endif // VK_USE_PLATFORM_MACOS_MVK
4484
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07004485#ifdef VK_USE_PLATFORM_METAL_EXT
4486void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
4487 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
4488 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4489 VkResult result) {
4490 if (VK_SUCCESS != result) return;
4491 RecordVulkanSurface(pSurface);
4492}
4493#endif // VK_USE_PLATFORM_METAL_EXT
4494
locke-lunargd556cc32019-09-17 01:21:23 -06004495#ifdef VK_USE_PLATFORM_WAYLAND_KHR
4496void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
4497 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
4498 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4499 VkResult result) {
4500 if (VK_SUCCESS != result) return;
4501 RecordVulkanSurface(pSurface);
4502}
4503#endif // VK_USE_PLATFORM_WAYLAND_KHR
4504
4505#ifdef VK_USE_PLATFORM_WIN32_KHR
4506void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
4507 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
4508 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4509 VkResult result) {
4510 if (VK_SUCCESS != result) return;
4511 RecordVulkanSurface(pSurface);
4512}
4513#endif // VK_USE_PLATFORM_WIN32_KHR
4514
4515#ifdef VK_USE_PLATFORM_XCB_KHR
4516void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
4517 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4518 VkResult result) {
4519 if (VK_SUCCESS != result) return;
4520 RecordVulkanSurface(pSurface);
4521}
4522#endif // VK_USE_PLATFORM_XCB_KHR
4523
4524#ifdef VK_USE_PLATFORM_XLIB_KHR
4525void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
4526 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4527 VkResult result) {
4528 if (VK_SUCCESS != result) return;
4529 RecordVulkanSurface(pSurface);
4530}
4531#endif // VK_USE_PLATFORM_XLIB_KHR
4532
Niklas Haas8b84af12020-04-19 22:20:11 +02004533void ValidationStateTracker::PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance,
4534 const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
4535 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4536 VkResult result) {
4537 if (VK_SUCCESS != result) return;
4538 RecordVulkanSurface(pSurface);
4539}
4540
Cort23cf2282019-09-20 18:58:18 +02004541void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004542 VkPhysicalDeviceFeatures *pFeatures) {
4543 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Yilong Li358152a2020-07-08 02:16:45 -07004544 // Reset the features2 safe struct before setting up the features field.
4545 physical_device_state->features2 = safe_VkPhysicalDeviceFeatures2();
Cortffba2642019-09-20 22:09:41 +02004546 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02004547}
4548
4549void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004550 VkPhysicalDeviceFeatures2 *pFeatures) {
4551 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02004552 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004553}
4554
4555void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004556 VkPhysicalDeviceFeatures2 *pFeatures) {
4557 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02004558 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004559}
4560
locke-lunargd556cc32019-09-17 01:21:23 -06004561void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
4562 VkSurfaceKHR surface,
4563 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
4564 VkResult result) {
4565 if (VK_SUCCESS != result) return;
4566 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004567 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004568
4569 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
4570 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004571}
4572
4573void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
4574 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4575 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
4576 if (VK_SUCCESS != result) return;
4577 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004578 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004579
4580 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
4581 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004582}
4583
4584void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
4585 VkSurfaceKHR surface,
4586 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
4587 VkResult result) {
4588 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004589 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
4590 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
4591 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
4592 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
4593 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
4594 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
4595 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
4596 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
4597 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
4598 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004599
4600 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
4601 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004602}
4603
4604void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
4605 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
4606 VkBool32 *pSupported, VkResult result) {
4607 if (VK_SUCCESS != result) return;
4608 auto surface_state = GetSurfaceState(surface);
4609 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
4610}
4611
4612void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
4613 VkSurfaceKHR surface,
4614 uint32_t *pPresentModeCount,
4615 VkPresentModeKHR *pPresentModes,
4616 VkResult result) {
4617 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4618
4619 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
4620 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004621
4622 if (*pPresentModeCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004623 if (*pPresentModeCount > physical_device_state->present_modes.size()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004624 physical_device_state->present_modes.resize(*pPresentModeCount);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004625 }
locke-lunargd556cc32019-09-17 01:21:23 -06004626 }
4627 if (pPresentModes) {
locke-lunargd556cc32019-09-17 01:21:23 -06004628 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
4629 physical_device_state->present_modes[i] = pPresentModes[i];
4630 }
4631 }
4632}
4633
4634void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
4635 uint32_t *pSurfaceFormatCount,
4636 VkSurfaceFormatKHR *pSurfaceFormats,
4637 VkResult result) {
4638 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4639
4640 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004641
4642 if (*pSurfaceFormatCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004643 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004644 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004645 }
locke-lunargd556cc32019-09-17 01:21:23 -06004646 }
4647 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06004648 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4649 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
4650 }
4651 }
4652}
4653
4654void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
4655 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4656 uint32_t *pSurfaceFormatCount,
4657 VkSurfaceFormat2KHR *pSurfaceFormats,
4658 VkResult result) {
4659 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4660
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004661 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004662 if (*pSurfaceFormatCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004663 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size()) {
4664 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
4665 }
locke-lunargd556cc32019-09-17 01:21:23 -06004666 }
4667 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06004668 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004669 physical_device_state->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
locke-lunargd556cc32019-09-17 01:21:23 -06004670 }
4671 }
4672}
4673
4674void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4675 const VkDebugUtilsLabelEXT *pLabelInfo) {
4676 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4677}
4678
4679void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
4680 EndCmdDebugUtilsLabel(report_data, commandBuffer);
4681}
4682
4683void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4684 const VkDebugUtilsLabelEXT *pLabelInfo) {
4685 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4686
4687 // Squirrel away an easily accessible copy.
4688 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4689 cb_state->debug_label = LoggingLabel(pLabelInfo);
4690}
4691
4692void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004693 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004694 if (NULL != pPhysicalDeviceGroupProperties) {
4695 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
4696 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
4697 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
4698 auto &phys_device_state = physical_device_map[cur_phys_dev];
4699 phys_device_state.phys_device = cur_phys_dev;
4700 // Init actual features for each physical device
4701 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
4702 }
4703 }
4704 }
4705}
4706
4707void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004708 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties,
locke-lunargd556cc32019-09-17 01:21:23 -06004709 VkResult result) {
4710 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4711 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4712}
4713
4714void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004715 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties,
locke-lunargd556cc32019-09-17 01:21:23 -06004716 VkResult result) {
4717 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4718 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4719}
4720
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004721void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
4722 uint32_t queueFamilyIndex,
4723 uint32_t *pCounterCount,
4724 VkPerformanceCounterKHR *pCounters) {
4725 if (NULL == pCounters) return;
4726
4727 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4728 assert(physical_device_state);
4729
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004730 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queue_family_counters(new QUEUE_FAMILY_PERF_COUNTERS());
4731 queue_family_counters->counters.resize(*pCounterCount);
4732 for (uint32_t i = 0; i < *pCounterCount; i++) queue_family_counters->counters[i] = pCounters[i];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004733
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004734 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queue_family_counters);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004735}
4736
4737void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
4738 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
4739 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
4740 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4741 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
4742}
4743
4744void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
4745 VkResult result) {
4746 if (result == VK_SUCCESS) performance_lock_acquired = true;
4747}
4748
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004749void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
4750 performance_lock_acquired = false;
4751 for (auto &cmd_buffer : commandBufferMap) {
4752 cmd_buffer.second->performance_lock_released = true;
4753 }
4754}
4755
locke-lunargd556cc32019-09-17 01:21:23 -06004756void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004757 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004758 const VkAllocationCallbacks *pAllocator) {
4759 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004760 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4761 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004762 desc_template_map.erase(descriptorUpdateTemplate);
4763}
4764
4765void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004766 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004767 const VkAllocationCallbacks *pAllocator) {
4768 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004769 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4770 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004771 desc_template_map.erase(descriptorUpdateTemplate);
4772}
4773
Mike Schuchardt2df08912020-12-15 16:28:09 -08004774void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
4775 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate) {
locke-lunargd556cc32019-09-17 01:21:23 -06004776 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004777 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004778 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
4779}
4780
Mike Schuchardt2df08912020-12-15 16:28:09 -08004781void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(VkDevice device,
4782 const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
4783 const VkAllocationCallbacks *pAllocator,
4784 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate,
4785 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004786 if (VK_SUCCESS != result) return;
4787 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4788}
4789
4790void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004791 VkDevice device, const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4792 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004793 if (VK_SUCCESS != result) return;
4794 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4795}
4796
4797void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004798 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004799 const void *pData) {
4800 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
4801 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
4802 assert(0);
4803 } else {
4804 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
4805 // TODO: Record template push descriptor updates
4806 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
4807 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
4808 }
4809 }
4810}
4811
4812void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
4813 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4814 const void *pData) {
4815 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4816}
4817
4818void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004819 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004820 const void *pData) {
4821 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4822}
4823
Mike Schuchardt2df08912020-12-15 16:28:09 -08004824void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
4825 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4826 VkPipelineLayout layout, uint32_t set,
4827 const void *pData) {
locke-lunargd556cc32019-09-17 01:21:23 -06004828 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4829
4830 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4831 if (template_state) {
4832 auto layout_data = GetPipelineLayout(layout);
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -06004833 auto dsl = layout_data ? layout_data->GetDsl(set) : nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004834 const auto &template_ci = template_state->create_info;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004835 if (dsl && !dsl->Destroyed()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004836 // Decode the template into a set of write updates
4837 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
4838 dsl->GetDescriptorSetLayout());
4839 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
4840 static_cast<uint32_t>(decoded_template.desc_writes.size()),
4841 decoded_template.desc_writes.data());
4842 }
4843 }
4844}
4845
4846void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
4847 uint32_t *pPropertyCount, void *pProperties) {
4848 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4849 if (*pPropertyCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06004850 physical_device_state->display_plane_property_count = *pPropertyCount;
4851 }
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004852 if (*pPropertyCount || pProperties) {
4853 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004854 }
4855}
4856
4857void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
4858 uint32_t *pPropertyCount,
4859 VkDisplayPlanePropertiesKHR *pProperties,
4860 VkResult result) {
4861 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4862 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4863}
4864
4865void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
4866 uint32_t *pPropertyCount,
4867 VkDisplayPlaneProperties2KHR *pProperties,
4868 VkResult result) {
4869 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4870 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4871}
4872
4873void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4874 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
4875 QueryObject query_obj = {queryPool, query, index};
4876 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4877 RecordCmdBeginQuery(cb_state, query_obj);
4878}
4879
4880void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4881 uint32_t query, uint32_t index) {
4882 QueryObject query_obj = {queryPool, query, index};
4883 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4884 RecordCmdEndQuery(cb_state, query_obj);
4885}
4886
4887void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
4888 VkSamplerYcbcrConversion ycbcr_conversion) {
Jeremy Gebbenf4fb2a02021-07-08 09:57:46 -06004889 VkFormatFeatureFlags format_features = 0;
4890
4891 if (create_info->format != VK_FORMAT_UNDEFINED) {
4892 format_features = GetPotentialFormatFeatures(create_info->format);
4893 } else if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4894 // If format is VK_FORMAT_UNDEFINED, format_features will be set by external AHB features
4895 format_features = GetExternalFormatFeaturesANDROID(create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004896 }
Jeremy Gebbenf4fb2a02021-07-08 09:57:46 -06004897
4898 samplerYcbcrConversionMap[ycbcr_conversion] =
4899 std::make_shared<SAMPLER_YCBCR_CONVERSION_STATE>(ycbcr_conversion, create_info, format_features);
locke-lunargd556cc32019-09-17 01:21:23 -06004900}
4901
4902void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
4903 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4904 const VkAllocationCallbacks *pAllocator,
4905 VkSamplerYcbcrConversion *pYcbcrConversion,
4906 VkResult result) {
4907 if (VK_SUCCESS != result) return;
4908 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4909}
4910
4911void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
4912 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4913 const VkAllocationCallbacks *pAllocator,
4914 VkSamplerYcbcrConversion *pYcbcrConversion,
4915 VkResult result) {
4916 if (VK_SUCCESS != result) return;
4917 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4918}
4919
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004920void ValidationStateTracker::RecordDestroySamplerYcbcrConversionState(VkSamplerYcbcrConversion ycbcr_conversion) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004921 auto ycbcr_state = GetSamplerYcbcrConversionState(ycbcr_conversion);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004922 ycbcr_state->Destroy();
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004923 samplerYcbcrConversionMap.erase(ycbcr_conversion);
4924}
4925
locke-lunargd556cc32019-09-17 01:21:23 -06004926void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
4927 const VkAllocationCallbacks *pAllocator) {
4928 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004929 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06004930}
4931
4932void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
4933 VkSamplerYcbcrConversion ycbcrConversion,
4934 const VkAllocationCallbacks *pAllocator) {
4935 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004936 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06004937}
4938
Tony-LunarG977448c2019-12-02 14:52:02 -07004939void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4940 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06004941 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07004942 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004943
4944 // Do nothing if the query pool has been destroyed.
4945 auto query_pool_state = GetQueryPoolState(queryPool);
4946 if (!query_pool_state) return;
4947
4948 // Reset the state of existing entries.
4949 QueryObject query_obj{queryPool, 0};
4950 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
4951 for (uint32_t i = 0; i < max_query_count; ++i) {
4952 query_obj.query = firstQuery + i;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004953 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004954 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004955 for (uint32_t pass_index = 0; pass_index < query_pool_state->n_performance_passes; pass_index++) {
4956 query_obj.perf_pass = pass_index;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004957 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004958 }
4959 }
locke-lunargd556cc32019-09-17 01:21:23 -06004960 }
4961}
4962
Tony-LunarG977448c2019-12-02 14:52:02 -07004963void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4964 uint32_t queryCount) {
4965 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
4966}
4967
4968void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4969 uint32_t queryCount) {
4970 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
4971}
4972
locke-lunargd556cc32019-09-17 01:21:23 -06004973void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
4974 const TEMPLATE_STATE *template_state, const void *pData) {
4975 // Translate the templated update into a normal update for validation...
4976 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
4977 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
4978 decoded_update.desc_writes.data(), 0, NULL);
4979}
4980
4981// Update the common AllocateDescriptorSetsData
4982void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004983 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06004984 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05004985 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06004986 if (layout) {
4987 ds_data->layout_nodes[i] = layout;
4988 // Count total descriptors required per type
4989 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
4990 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004991 uint32_t type_index = static_cast<uint32_t>(binding_layout->descriptorType);
4992 ds_data->required_descriptors_by_type[type_index] += binding_layout->descriptorCount;
locke-lunargd556cc32019-09-17 01:21:23 -06004993 }
4994 }
4995 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
4996 }
4997}
4998
4999// Decrement allocated sets from the pool and insert new sets into set_map
5000void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
5001 const VkDescriptorSet *descriptor_sets,
5002 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
5003 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
5004 // Account for sets and individual descriptors allocated from pool
5005 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
5006 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
5007 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
5008 }
5009
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07005010 const auto *variable_count_info = LvlFindInChain<VkDescriptorSetVariableDescriptorCountAllocateInfo>(p_alloc_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06005011 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
5012
5013 // Create tracking object for each descriptor set; insert into global map and the pool's set.
5014 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
5015 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
5016
Jeff Bolz41a1ced2019-10-11 11:40:49 -05005017 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07005018 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06005019 pool_state->sets.insert(new_ds.get());
locke-lunargd556cc32019-09-17 01:21:23 -06005020 setMap[descriptor_sets[i]] = std::move(new_ds);
5021 }
5022}
5023
5024// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
Jeremy Kniager05631e72020-06-08 14:21:35 -06005025void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type,
locke-lunarg540b2252020-08-03 13:23:36 -06005026 VkPipelineBindPoint bind_point, const char *function) {
5027 UpdateDrawState(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005028 cb_state->hasDispatchCmd = true;
5029}
5030
locke-lunargd556cc32019-09-17 01:21:23 -06005031// Generic function to handle state update for all CmdDraw* type functions
locke-lunarg540b2252020-08-03 13:23:36 -06005032void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, VkPipelineBindPoint bind_point,
5033 const char *function) {
5034 UpdateStateCmdDrawDispatchType(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005035 cb_state->hasDrawCmd = true;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005036
5037 // Update the consumed viewport/scissor count.
5038 uint32_t& used = cb_state->usedViewportScissorCount;
5039 used = std::max(used, cb_state->pipelineStaticViewportCount);
5040 used = std::max(used, cb_state->pipelineStaticScissorCount);
5041 cb_state->usedDynamicViewportCount |= !!(cb_state->dynamic_status & CBSTATUS_VIEWPORT_WITH_COUNT_SET); // !! silences MSVC warn
5042 cb_state->usedDynamicScissorCount |= !!(cb_state->dynamic_status & CBSTATUS_SCISSOR_WITH_COUNT_SET);
locke-lunargd556cc32019-09-17 01:21:23 -06005043}
5044
5045void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
5046 uint32_t firstVertex, uint32_t firstInstance) {
5047 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005048 UpdateStateCmdDrawType(cb_state, CMD_DRAW, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDraw()");
locke-lunargd556cc32019-09-17 01:21:23 -06005049}
5050
Tony-LunarG745150c2021-07-02 15:07:31 -06005051void ValidationStateTracker::PostCallRecordCmdDrawMultiEXT(VkCommandBuffer commandBuffer, uint32_t drawCount,
5052 const VkMultiDrawInfoEXT *pVertexInfo, uint32_t instanceCount,
5053 uint32_t firstInstance, uint32_t stride) {
5054 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5055 UpdateStateCmdDrawType(cb_state, CMD_DRAWMULTIEXT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawMultiEXT()");
5056}
5057
locke-lunargd556cc32019-09-17 01:21:23 -06005058void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
5059 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
5060 uint32_t firstInstance) {
5061 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005062 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXED, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexed()");
locke-lunargd556cc32019-09-17 01:21:23 -06005063}
5064
Tony-LunarG745150c2021-07-02 15:07:31 -06005065void ValidationStateTracker::PostCallRecordCmdDrawMultiIndexedEXT(VkCommandBuffer commandBuffer, uint32_t drawCount,
5066 const VkMultiDrawIndexedInfoEXT *pIndexInfo,
5067 uint32_t instanceCount, uint32_t firstInstance, uint32_t stride,
5068 const int32_t *pVertexOffset) {
5069 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5070 UpdateStateCmdDrawType(cb_state, CMD_DRAWMULTIINDEXEDEXT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawMultiIndexedEXT()");
5071}
5072
locke-lunargd556cc32019-09-17 01:21:23 -06005073void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5074 uint32_t count, uint32_t stride) {
5075 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5076 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005077 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndirect()");
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005078 if (!disabled[command_buffer_state]) {
5079 cb_state->AddChild(buffer_state);
5080 }
locke-lunargd556cc32019-09-17 01:21:23 -06005081}
5082
5083void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5084 VkDeviceSize offset, uint32_t count, uint32_t stride) {
5085 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5086 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005087 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexedIndirect()");
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005088 if (!disabled[command_buffer_state]) {
5089 cb_state->AddChild(buffer_state);
5090 }
locke-lunargd556cc32019-09-17 01:21:23 -06005091}
5092
5093void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
5094 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005095 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCH, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatch()");
locke-lunargd556cc32019-09-17 01:21:23 -06005096}
5097
5098void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5099 VkDeviceSize offset) {
5100 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005101 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCHINDIRECT, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatchIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06005102 BUFFER_STATE *buffer_state = GetBufferState(buffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005103 if (!disabled[command_buffer_state]) {
5104 cb_state->AddChild(buffer_state);
5105 }
locke-lunargd556cc32019-09-17 01:21:23 -06005106}
5107
Tony-LunarG977448c2019-12-02 14:52:02 -07005108void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5109 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
locke-lunarg540b2252020-08-03 13:23:36 -06005110 uint32_t stride, const char *function) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005111 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5112 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5113 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005114 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005115 if (!disabled[command_buffer_state]) {
5116 cb_state->AddChild(buffer_state);
5117 cb_state->AddChild(count_buffer_state);
5118 }
Tony-LunarG977448c2019-12-02 14:52:02 -07005119}
5120
locke-lunargd556cc32019-09-17 01:21:23 -06005121void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5122 VkDeviceSize offset, VkBuffer countBuffer,
5123 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5124 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005125 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5126 "vkCmdDrawIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005127}
5128
5129void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5130 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5131 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005132 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5133 "vkCmdDrawIndirectCount()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005134}
5135
5136void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5137 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
locke-lunarg540b2252020-08-03 13:23:36 -06005138 uint32_t maxDrawCount, uint32_t stride, const char *function) {
locke-lunargd556cc32019-09-17 01:21:23 -06005139 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5140 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5141 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005142 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005143 if (!disabled[command_buffer_state]) {
5144 cb_state->AddChild(buffer_state);
5145 cb_state->AddChild(count_buffer_state);
5146 }
locke-lunargd556cc32019-09-17 01:21:23 -06005147}
5148
5149void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5150 VkDeviceSize offset, VkBuffer countBuffer,
5151 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5152 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005153 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5154 "vkCmdDrawIndexedIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005155}
5156
5157void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
5158 VkDeviceSize offset, VkBuffer countBuffer,
5159 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5160 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005161 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5162 "vkCmdDrawIndexedIndirectCount()");
locke-lunargd556cc32019-09-17 01:21:23 -06005163}
5164
5165void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
5166 uint32_t firstTask) {
5167 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005168 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSNV, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawMeshTasksNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005169}
5170
5171void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5172 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
5173 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005174 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
5175 "vkCmdDrawMeshTasksIndirectNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005176 BUFFER_STATE *buffer_state = GetBufferState(buffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005177 if (!disabled[command_buffer_state] && buffer_state) {
5178 cb_state->AddChild(buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -06005179 }
5180}
5181
5182void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5183 VkDeviceSize offset, VkBuffer countBuffer,
5184 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5185 uint32_t stride) {
5186 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5187 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5188 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005189 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTCOUNTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
5190 "vkCmdDrawMeshTasksIndirectCountNV()");
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005191 if (!disabled[command_buffer_state]) {
5192 if (buffer_state) {
5193 cb_state->AddChild(buffer_state);
5194 }
5195 if (count_buffer_state) {
5196 cb_state->AddChild(count_buffer_state);
5197 }
locke-lunargd556cc32019-09-17 01:21:23 -06005198 }
5199}
5200
Jeremy Gebben252f60c2021-07-15 14:54:30 -06005201void ValidationStateTracker::PostCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer,
5202 VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer,
5203 VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride,
5204 VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset,
5205 VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer,
5206 VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride,
5207 uint32_t width, uint32_t height, uint32_t depth) {
5208 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5209 UpdateStateCmdDrawDispatchType(cb_state, CMD_TRACERAYSNV, VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, "vkCmdTraceRaysNV()");
5210 cb_state->hasTraceRaysCmd = true;
5211}
5212
5213
5214void ValidationStateTracker::PostCallRecordCmdTraceRaysKHR(VkCommandBuffer commandBuffer,
5215 const VkStridedDeviceAddressRegionKHR *pRaygenShaderBindingTable,
5216 const VkStridedDeviceAddressRegionKHR *pMissShaderBindingTable,
5217 const VkStridedDeviceAddressRegionKHR *pHitShaderBindingTable,
5218 const VkStridedDeviceAddressRegionKHR *pCallableShaderBindingTable, uint32_t width,
5219 uint32_t height, uint32_t depth) {
5220 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5221 UpdateStateCmdDrawDispatchType(cb_state, CMD_TRACERAYSKHR, VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, "vkCmdTraceRaysKHR()");
5222 cb_state->hasTraceRaysCmd = true;
5223}
5224
5225void ValidationStateTracker::PostCallRecordCmdTraceRaysIndirectKHR(VkCommandBuffer commandBuffer,
5226 const VkStridedDeviceAddressRegionKHR *pRaygenShaderBindingTable,
5227 const VkStridedDeviceAddressRegionKHR *pMissShaderBindingTable,
5228 const VkStridedDeviceAddressRegionKHR *pHitShaderBindingTable,
5229 const VkStridedDeviceAddressRegionKHR *pCallableShaderBindingTable,
5230 VkDeviceAddress indirectDeviceAddress) {
5231 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5232 UpdateStateCmdDrawDispatchType(cb_state, CMD_TRACERAYSINDIRECTKHR, VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR,
5233 "vkCmdTraceRaysIndirectKHR()");
5234 cb_state->hasTraceRaysCmd = true;
5235}
5236
locke-lunargd556cc32019-09-17 01:21:23 -06005237void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
5238 const VkAllocationCallbacks *pAllocator,
5239 VkShaderModule *pShaderModule, VkResult result,
5240 void *csm_state_data) {
5241 if (VK_SUCCESS != result) return;
5242 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
5243
Tony-LunarG8a51b7d2020-07-01 15:57:23 -06005244 spv_target_env spirv_environment = PickSpirvEnv(api_version, (device_extensions.vk_khr_spirv_1_4 != kNotEnabled));
locke-lunargd556cc32019-09-17 01:21:23 -06005245 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005246 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
5247 csm_state->unique_shader_id)
5248 : std::make_shared<SHADER_MODULE_STATE>();
sfricke-samsung962cad92021-04-13 00:46:29 -07005249 new_shader_module->SetPushConstantUsedInShader();
locke-lunargd556cc32019-09-17 01:21:23 -06005250 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
5251}
5252
5253void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06005254 PipelineStageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005255 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
locke-lunargde3f0fa2020-09-10 11:55:31 -06005256 stage_state->entry_point_name = pStage->pName;
5257 stage_state->shader_state = GetShared<SHADER_MODULE_STATE>(pStage->module);
5258 auto module = stage_state->shader_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -06005259 if (!module->has_valid_spirv) return;
5260
5261 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
sfricke-samsung962cad92021-04-13 00:46:29 -07005262 auto entrypoint = module->FindEntrypoint(pStage->pName, pStage->stage);
locke-lunargd556cc32019-09-17 01:21:23 -06005263 if (entrypoint == module->end()) return;
5264
locke-lunarg654e3692020-06-04 17:19:15 -06005265 stage_state->stage_flag = pStage->stage;
5266
locke-lunargd556cc32019-09-17 01:21:23 -06005267 // Mark accessible ids
sfricke-samsung962cad92021-04-13 00:46:29 -07005268 stage_state->accessible_ids = module->MarkAccessibleIds(entrypoint);
5269 module->ProcessExecutionModes(entrypoint, pipeline);
locke-lunargd556cc32019-09-17 01:21:23 -06005270
sfricke-samsung962cad92021-04-13 00:46:29 -07005271 stage_state->descriptor_uses = module->CollectInterfaceByDescriptorSlot(
5272 stage_state->accessible_ids, &stage_state->has_writable_descriptor, &stage_state->has_atomic_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06005273 // Capture descriptor uses for the pipeline
locke-lunarg36045992020-08-20 16:54:37 -06005274 for (const auto &use : stage_state->descriptor_uses) {
locke-lunargd556cc32019-09-17 01:21:23 -06005275 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06005276 const uint32_t slot = use.first.first;
locke-lunarg351c9d82020-10-23 14:43:21 -06005277 pipeline->active_slots[slot][use.first.second].is_writable |= use.second.is_writable;
locke-lunarg36045992020-08-20 16:54:37 -06005278 auto &reqs = pipeline->active_slots[slot][use.first.second].reqs;
sfricke-samsung962cad92021-04-13 00:46:29 -07005279 reqs = descriptor_req(reqs | module->DescriptorTypeToReqs(use.second.type_id));
locke-lunarg25b6c352020-08-06 17:44:18 -06005280 if (use.second.is_atomic_operation) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_VIEW_ATOMIC_OPERATION);
locke-lunarg12d20992020-09-21 12:46:49 -06005281 if (use.second.is_sampler_implicitLod_dref_proj) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_IMPLICITLOD_DREF_PROJ);
locke-lunargae2a43c2020-09-22 17:21:57 -06005282 if (use.second.is_sampler_bias_offset) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_BIAS_OFFSET);
locke-lunarg12d20992020-09-21 12:46:49 -06005283
John Zulauf649edd52019-10-02 14:39:41 -06005284 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunarg36045992020-08-20 16:54:37 -06005285 if (use.second.samplers_used_by_image.size()) {
locke-lunarg654a9052020-10-13 16:28:42 -06005286 auto &samplers_used_by_image = pipeline->active_slots[slot][use.first.second].samplers_used_by_image;
5287 if (use.second.samplers_used_by_image.size() > samplers_used_by_image.size()) {
5288 samplers_used_by_image.resize(use.second.samplers_used_by_image.size());
5289 }
locke-lunarg654a9052020-10-13 16:28:42 -06005290 uint32_t image_index = 0;
5291 for (const auto &samplers : use.second.samplers_used_by_image) {
5292 for (const auto &sampler : samplers) {
locke-lunargb8be8222020-10-20 00:34:37 -06005293 samplers_used_by_image[image_index].emplace(sampler, nullptr);
locke-lunarg654a9052020-10-13 16:28:42 -06005294 }
5295 ++image_index;
5296 }
locke-lunarg36045992020-08-20 16:54:37 -06005297 }
locke-lunargd556cc32019-09-17 01:21:23 -06005298 }
locke-lunarg78486832020-09-09 19:39:42 -06005299
locke-lunarg96dc9632020-06-10 17:22:18 -06005300 if (pStage->stage == VK_SHADER_STAGE_FRAGMENT_BIT) {
sfricke-samsung962cad92021-04-13 00:46:29 -07005301 pipeline->fragmentShader_writable_output_location_list = module->CollectWritableOutputLocationinFS(*pStage);
locke-lunarg96dc9632020-06-10 17:22:18 -06005302 }
locke-lunargd556cc32019-09-17 01:21:23 -06005303}
5304
sfricke-samsung70ad9ce2021-04-04 00:53:54 -07005305// Discussed in details in https://github.com/KhronosGroup/Vulkan-Docs/issues/1081
5306// Internal discussion and CTS were written to prove that this is not called after an incompatible vkCmdBindPipeline
5307// "Binding a pipeline with a layout that is not compatible with the push constant layout does not disturb the push constant values"
5308//
5309// vkCmdBindDescriptorSet has nothing to do with push constants and don't need to call this after neither
5310//
5311// Part of this assumes apps at draw/dispath/traceRays/etc time will have it properly compatabile or else other VU will be triggered
locke-lunargd556cc32019-09-17 01:21:23 -06005312void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
5313 if (cb_state == nullptr) {
5314 return;
5315 }
5316
5317 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
5318 if (pipeline_layout_state == nullptr) {
5319 return;
5320 }
5321
5322 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
5323 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
5324 cb_state->push_constant_data.clear();
locke-lunargde3f0fa2020-09-10 11:55:31 -06005325 cb_state->push_constant_data_update.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06005326 uint32_t size_needed = 0;
John Zulauf79f06582021-02-27 18:38:39 -07005327 for (const auto &push_constant_range : *cb_state->push_constant_data_ranges) {
locke-lunargde3f0fa2020-09-10 11:55:31 -06005328 auto size = push_constant_range.offset + push_constant_range.size;
5329 size_needed = std::max(size_needed, size);
5330
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005331 auto stage_flags = push_constant_range.stageFlags;
locke-lunargde3f0fa2020-09-10 11:55:31 -06005332 uint32_t bit_shift = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005333 while (stage_flags) {
5334 if (stage_flags & 1) {
locke-lunargde3f0fa2020-09-10 11:55:31 -06005335 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
5336 const auto it = cb_state->push_constant_data_update.find(flag);
5337
5338 if (it != cb_state->push_constant_data_update.end()) {
5339 if (it->second.size() < push_constant_range.offset) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06005340 it->second.resize(push_constant_range.offset, PC_Byte_Not_Set);
locke-lunargde3f0fa2020-09-10 11:55:31 -06005341 }
5342 if (it->second.size() < size) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06005343 it->second.resize(size, PC_Byte_Not_Updated);
locke-lunargde3f0fa2020-09-10 11:55:31 -06005344 }
5345 } else {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06005346 std::vector<uint8_t> bytes;
5347 bytes.resize(push_constant_range.offset, PC_Byte_Not_Set);
5348 bytes.resize(size, PC_Byte_Not_Updated);
locke-lunargde3f0fa2020-09-10 11:55:31 -06005349 cb_state->push_constant_data_update[flag] = bytes;
5350 }
5351 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005352 stage_flags = stage_flags >> 1;
locke-lunargde3f0fa2020-09-10 11:55:31 -06005353 ++bit_shift;
5354 }
locke-lunargd556cc32019-09-17 01:21:23 -06005355 }
5356 cb_state->push_constant_data.resize(size_needed, 0);
5357 }
5358}
John Zulauf22b0fbe2019-10-15 06:26:16 -06005359
5360void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
5361 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
5362 VkResult result) {
5363 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
5364 auto swapchain_state = GetSwapchainState(swapchain);
5365
5366 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
5367
5368 if (pSwapchainImages) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005369 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
John Zulauf29d00532021-03-04 13:28:54 -07005370 SWAPCHAIN_IMAGE &swapchain_image = swapchain_state->images[i];
John Zulauffaa7a522021-03-05 12:22:45 -07005371 if (swapchain_image.image_state) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06005372
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06005373 auto format_features =
5374 GetImageFormatFeatures(physical_device, device, pSwapchainImages[i], swapchain_state->image_create_info.format,
5375 swapchain_state->image_create_info.tiling);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005376
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06005377 auto image_state = std::make_shared<IMAGE_STATE>(device, pSwapchainImages[i], &swapchain_state->image_create_info,
5378 swapchain, i, format_features);
John Zulauf29d00532021-03-04 13:28:54 -07005379
5380 if (swapchain_image.bound_images.empty()) {
5381 // First time "bind" allocates
5382 image_state->swapchain_fake_address = fake_memory.Alloc(image_state->fragment_encoder->TotalSize());
5383 } else {
5384 // All others reuse
5385 image_state->swapchain_fake_address = (*swapchain_image.bound_images.cbegin())->swapchain_fake_address;
5386 // Since there are others, need to update the aliasing information
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06005387 for (auto other_image : swapchain_image.bound_images) {
5388 image_state->AddAliasingImage(other_image);
5389 }
John Zulauf29d00532021-03-04 13:28:54 -07005390 }
5391
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06005392 swapchain_image.image_state = image_state.get();
5393 swapchain_image.bound_images.emplace(image_state.get());
Petr Kraus44f1c482020-04-25 20:09:25 +02005394
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06005395 image_state->AddParent(swapchain_state);
5396 imageMap[pSwapchainImages[i]] = std::move(image_state);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005397 }
5398 }
5399
5400 if (*pSwapchainImageCount) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005401 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
5402 }
5403}
sourav parmar35e7a002020-06-09 17:58:44 -07005404
sourav parmar35e7a002020-06-09 17:58:44 -07005405void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer,
5406 const VkCopyAccelerationStructureInfoKHR *pInfo) {
5407 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5408 if (cb_state) {
sourav parmarcd5fb182020-07-17 12:58:44 -07005409 ACCELERATION_STRUCTURE_STATE_KHR *src_as_state = GetAccelerationStructureStateKHR(pInfo->src);
5410 ACCELERATION_STRUCTURE_STATE_KHR *dst_as_state = GetAccelerationStructureStateKHR(pInfo->dst);
sourav parmar35e7a002020-06-09 17:58:44 -07005411 if (dst_as_state != nullptr && src_as_state != nullptr) {
5412 dst_as_state->built = true;
5413 dst_as_state->build_info_khr = src_as_state->build_info_khr;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005414 if (!disabled[command_buffer_state]) {
5415 cb_state->AddChild(dst_as_state);
5416 cb_state->AddChild(src_as_state);
5417 }
sourav parmar35e7a002020-06-09 17:58:44 -07005418 }
5419 }
5420}
Piers Daniell39842ee2020-07-10 16:42:33 -06005421
5422void ValidationStateTracker::PreCallRecordCmdSetCullModeEXT(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) {
5423 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5424 cb_state->status |= CBSTATUS_CULL_MODE_SET;
5425 cb_state->static_status &= ~CBSTATUS_CULL_MODE_SET;
5426}
5427
5428void ValidationStateTracker::PreCallRecordCmdSetFrontFaceEXT(VkCommandBuffer commandBuffer, VkFrontFace frontFace) {
5429 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5430 cb_state->status |= CBSTATUS_FRONT_FACE_SET;
5431 cb_state->static_status &= ~CBSTATUS_FRONT_FACE_SET;
5432}
5433
5434void ValidationStateTracker::PreCallRecordCmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer,
5435 VkPrimitiveTopology primitiveTopology) {
5436 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5437 cb_state->primitiveTopology = primitiveTopology;
5438 cb_state->status |= CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5439 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5440}
5441
5442void ValidationStateTracker::PreCallRecordCmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer, uint32_t viewportCount,
5443 const VkViewport *pViewports) {
5444 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07005445 uint32_t bits = (1u << viewportCount) - 1u;
5446 cb_state->viewportWithCountMask |= bits;
5447 cb_state->trashedViewportMask &= ~bits;
Tobias Hector6663c9b2020-11-05 10:18:02 +00005448 cb_state->viewportWithCountCount = viewportCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005449 cb_state->trashedViewportCount = false;
Piers Daniell39842ee2020-07-10 16:42:33 -06005450 cb_state->status |= CBSTATUS_VIEWPORT_WITH_COUNT_SET;
5451 cb_state->static_status &= ~CBSTATUS_VIEWPORT_WITH_COUNT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005452
5453 cb_state->dynamicViewports.resize(std::max(size_t(viewportCount), cb_state->dynamicViewports.size()));
5454 for (size_t i = 0; i < viewportCount; ++i) {
5455 cb_state->dynamicViewports[i] = pViewports[i];
5456 }
Piers Daniell39842ee2020-07-10 16:42:33 -06005457}
5458
5459void ValidationStateTracker::PreCallRecordCmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer, uint32_t scissorCount,
5460 const VkRect2D *pScissors) {
5461 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07005462 uint32_t bits = (1u << scissorCount) - 1u;
5463 cb_state->scissorWithCountMask |= bits;
5464 cb_state->trashedScissorMask &= ~bits;
5465 cb_state->scissorWithCountCount = scissorCount;
5466 cb_state->trashedScissorCount = false;
Piers Daniell39842ee2020-07-10 16:42:33 -06005467 cb_state->status |= CBSTATUS_SCISSOR_WITH_COUNT_SET;
5468 cb_state->static_status &= ~CBSTATUS_SCISSOR_WITH_COUNT_SET;
5469}
5470
5471void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer, uint32_t firstBinding,
5472 uint32_t bindingCount, const VkBuffer *pBuffers,
5473 const VkDeviceSize *pOffsets, const VkDeviceSize *pSizes,
5474 const VkDeviceSize *pStrides) {
5475 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5476 if (pStrides) {
5477 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5478 cb_state->static_status &= ~CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5479 }
5480
5481 uint32_t end = firstBinding + bindingCount;
5482 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
5483 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
5484 }
5485
5486 for (uint32_t i = 0; i < bindingCount; ++i) {
5487 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07005488 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
Piers Daniell39842ee2020-07-10 16:42:33 -06005489 vertex_buffer_binding.offset = pOffsets[i];
5490 vertex_buffer_binding.size = (pSizes) ? pSizes[i] : VK_WHOLE_SIZE;
5491 vertex_buffer_binding.stride = (pStrides) ? pStrides[i] : 0;
5492 // Add binding for this vertex buffer to this commandbuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005493 if (!disabled[command_buffer_state] && pBuffers[i]) {
5494 cb_state->AddChild(vertex_buffer_binding.buffer_state.get());
Piers Daniell39842ee2020-07-10 16:42:33 -06005495 }
5496 }
5497}
5498
5499void ValidationStateTracker::PreCallRecordCmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) {
5500 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5501 cb_state->status |= CBSTATUS_DEPTH_TEST_ENABLE_SET;
5502 cb_state->static_status &= ~CBSTATUS_DEPTH_TEST_ENABLE_SET;
5503}
5504
5505void ValidationStateTracker::PreCallRecordCmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) {
5506 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5507 cb_state->status |= CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5508 cb_state->static_status &= ~CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5509}
5510
5511void ValidationStateTracker::PreCallRecordCmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) {
5512 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5513 cb_state->status |= CBSTATUS_DEPTH_COMPARE_OP_SET;
5514 cb_state->static_status &= ~CBSTATUS_DEPTH_COMPARE_OP_SET;
5515}
5516
5517void ValidationStateTracker::PreCallRecordCmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer,
5518 VkBool32 depthBoundsTestEnable) {
5519 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5520 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5521 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5522}
5523void ValidationStateTracker::PreCallRecordCmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) {
5524 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5525 cb_state->status |= CBSTATUS_STENCIL_TEST_ENABLE_SET;
5526 cb_state->static_status &= ~CBSTATUS_STENCIL_TEST_ENABLE_SET;
5527}
5528
5529void ValidationStateTracker::PreCallRecordCmdSetStencilOpEXT(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
5530 VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp,
5531 VkCompareOp compareOp) {
5532 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5533 cb_state->status |= CBSTATUS_STENCIL_OP_SET;
5534 cb_state->static_status &= ~CBSTATUS_STENCIL_OP_SET;
5535}
locke-lunarg4189aa22020-10-21 00:23:48 -06005536
5537void ValidationStateTracker::PreCallRecordCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle,
5538 uint32_t discardRectangleCount,
5539 const VkRect2D *pDiscardRectangles) {
5540 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5541 cb_state->status |= CBSTATUS_DISCARD_RECTANGLE_SET;
5542 cb_state->static_status &= ~CBSTATUS_DISCARD_RECTANGLE_SET;
5543}
5544
5545void ValidationStateTracker::PreCallRecordCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,
5546 const VkSampleLocationsInfoEXT *pSampleLocationsInfo) {
5547 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5548 cb_state->status |= CBSTATUS_SAMPLE_LOCATIONS_SET;
5549 cb_state->static_status &= ~CBSTATUS_SAMPLE_LOCATIONS_SET;
5550}
5551
5552void ValidationStateTracker::PreCallRecordCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer,
5553 VkCoarseSampleOrderTypeNV sampleOrderType,
5554 uint32_t customSampleOrderCount,
5555 const VkCoarseSampleOrderCustomNV *pCustomSampleOrders) {
5556 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5557 cb_state->status |= CBSTATUS_COARSE_SAMPLE_ORDER_SET;
5558 cb_state->static_status &= ~CBSTATUS_COARSE_SAMPLE_ORDER_SET;
5559}
Vikram Kushwahaa57b0c32021-04-19 12:21:46 -07005560
5561void ValidationStateTracker::PreCallRecordCmdSetPatchControlPointsEXT(VkCommandBuffer commandBuffer, uint32_t patchControlPoints) {
5562 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5563 cb_state->status |= CBSTATUS_PATCH_CONTROL_POINTS_SET;
5564 cb_state->static_status &= ~CBSTATUS_PATCH_CONTROL_POINTS_SET;
5565}
5566
5567void ValidationStateTracker::PreCallRecordCmdSetLogicOpEXT(VkCommandBuffer commandBuffer, VkLogicOp logicOp) {
5568 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5569 cb_state->status |= CBSTATUS_LOGIC_OP_SET;
5570 cb_state->static_status &= ~CBSTATUS_LOGIC_OP_SET;
5571}
5572
5573void ValidationStateTracker::PreCallRecordCmdSetRasterizerDiscardEnableEXT(VkCommandBuffer commandBuffer,
5574 VkBool32 rasterizerDiscardEnable) {
5575 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5576 cb_state->status |= CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET;
5577 cb_state->static_status &= ~CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET;
5578}
5579
5580void ValidationStateTracker::PreCallRecordCmdSetDepthBiasEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable) {
5581 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5582 cb_state->status |= CBSTATUS_DEPTH_BIAS_ENABLE_SET;
5583 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_ENABLE_SET;
5584}
5585
5586void ValidationStateTracker::PreCallRecordCmdSetPrimitiveRestartEnableEXT(VkCommandBuffer commandBuffer,
5587 VkBool32 primitiveRestartEnable) {
5588 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5589 cb_state->status |= CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET;
5590 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005591}
Piers Daniell924cd832021-05-18 13:48:47 -06005592
5593void ValidationStateTracker::PreCallRecordCmdSetVertexInputEXT(
5594 VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount,
5595 const VkVertexInputBindingDescription2EXT *pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount,
5596 const VkVertexInputAttributeDescription2EXT *pVertexAttributeDescriptions) {
5597 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5598 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET | CBSTATUS_VERTEX_INPUT_SET;
5599 cb_state->static_status &= ~(CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET | CBSTATUS_VERTEX_INPUT_SET);
5600}
Nathaniel Cesario42ac6ca2021-06-15 17:23:05 -06005601
5602void ValidationStateTracker::RecordGetBufferDeviceAddress(const VkBufferDeviceAddressInfo *pInfo, VkDeviceAddress address) {
5603 BUFFER_STATE *buffer_state = GetBufferState(pInfo->buffer);
5604 if (buffer_state) {
5605 // address is used for GPU-AV and ray tracing buffer validation
5606 buffer_state->deviceAddress = address;
5607 buffer_address_map_.emplace(address, buffer_state);
5608 }
5609}
5610
5611void ValidationStateTracker::PostCallRecordGetBufferDeviceAddress(VkDevice device, const VkBufferDeviceAddressInfo *pInfo,
5612 VkDeviceAddress address) {
5613 RecordGetBufferDeviceAddress(pInfo, address);
5614}
5615
5616void ValidationStateTracker::PostCallRecordGetBufferDeviceAddressKHR(VkDevice device, const VkBufferDeviceAddressInfo *pInfo,
5617 VkDeviceAddress address) {
5618 RecordGetBufferDeviceAddress(pInfo, address);
5619}
5620
5621void ValidationStateTracker::PostCallRecordGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfo *pInfo,
5622 VkDeviceAddress address) {
5623 RecordGetBufferDeviceAddress(pInfo, address);
Nathaniel Cesario39152e62021-07-02 13:04:16 -06005624}
5625
5626std::shared_ptr<SWAPCHAIN_NODE> ValidationStateTracker::CreateSwapchainState(const VkSwapchainCreateInfoKHR *create_info,
5627 VkSwapchainKHR swapchain) {
5628 return std::make_shared<SWAPCHAIN_NODE>(create_info, swapchain);
5629}