blob: a83641cefc2de1d9b6d98675a038f99478157099 [file] [log] [blame]
sfricke-samsung486a51e2021-01-02 00:10:15 -08001/* Copyright (c) 2015-2021 The Khronos Group Inc.
2 * Copyright (c) 2015-2021 Valve Corporation
3 * Copyright (c) 2015-2021 LunarG, Inc.
4 * Copyright (C) 2015-2021 Google Inc.
Tobias Hector6663c9b2020-11-05 10:18:02 +00005 * Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved.
locke-lunargd556cc32019-09-17 01:21:23 -06006 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Mark Lobodzinski <mark@lunarg.com>
20 * Author: Dave Houlton <daveh@lunarg.com>
21 * Shannon McPherson <shannon@lunarg.com>
Tobias Hector6663c9b2020-11-05 10:18:02 +000022 * Author: Tobias Hector <tobias.hector@amd.com>
locke-lunargd556cc32019-09-17 01:21:23 -060023 */
24
David Zhao Akeley44139b12021-04-26 16:16:13 -070025#include <algorithm>
locke-lunargd556cc32019-09-17 01:21:23 -060026#include <cmath>
locke-lunargd556cc32019-09-17 01:21:23 -060027
28#include "vk_enum_string_helper.h"
29#include "vk_format_utils.h"
30#include "vk_layer_data.h"
31#include "vk_layer_utils.h"
32#include "vk_layer_logging.h"
33#include "vk_typemap_helper.h"
34
35#include "chassis.h"
36#include "state_tracker.h"
37#include "shader_validation.h"
Jeremy Gebben74aa7622020-12-15 11:18:00 -070038#include "sync_utils.h"
Jeremy Gebben159b3cc2021-06-03 09:09:03 -060039#include "cmd_buffer_state.h"
40#include "render_pass_state.h"
locke-lunarg4189aa22020-10-21 00:23:48 -060041
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -060042void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
43 if (add_obj) {
44 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
45 // Call base class
46 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
47 }
48}
49
John Zulauf2bc1fde2020-04-24 15:09:51 -060050// NOTE: Beware the lifespan of the rp_begin when holding the return. If the rp_begin isn't a "safe" copy, "IMAGELESS"
51// attachments won't persist past the API entry point exit.
Jeremy Gebben88f58142021-06-01 10:07:52 -060052static std::pair<uint32_t, const VkImageView *> GetFramebufferAttachments(const VkRenderPassBeginInfo &rp_begin,
53 const FRAMEBUFFER_STATE &fb_state) {
John Zulauf2bc1fde2020-04-24 15:09:51 -060054 const VkImageView *attachments = fb_state.createInfo.pAttachments;
55 uint32_t count = fb_state.createInfo.attachmentCount;
56 if (fb_state.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -070057 const auto *framebuffer_attachments = LvlFindInChain<VkRenderPassAttachmentBeginInfo>(rp_begin.pNext);
John Zulauf2bc1fde2020-04-24 15:09:51 -060058 if (framebuffer_attachments) {
59 attachments = framebuffer_attachments->pAttachments;
60 count = framebuffer_attachments->attachmentCount;
61 }
62 }
63 return std::make_pair(count, attachments);
64}
65
John Zulauf64ffe552021-02-06 10:25:07 -070066template <typename ImageViewPointer, typename Get>
67std::vector<ImageViewPointer> GetAttachmentViewsImpl(const VkRenderPassBeginInfo &rp_begin, const FRAMEBUFFER_STATE &fb_state,
68 const Get &get_fn) {
69 std::vector<ImageViewPointer> views;
John Zulauf2bc1fde2020-04-24 15:09:51 -060070
71 const auto count_attachment = GetFramebufferAttachments(rp_begin, fb_state);
72 const auto attachment_count = count_attachment.first;
73 const auto *attachments = count_attachment.second;
74 views.resize(attachment_count, nullptr);
75 for (uint32_t i = 0; i < attachment_count; i++) {
76 if (attachments[i] != VK_NULL_HANDLE) {
John Zulauf64ffe552021-02-06 10:25:07 -070077 views[i] = get_fn(attachments[i]);
John Zulauf2bc1fde2020-04-24 15:09:51 -060078 }
79 }
80 return views;
81}
82
John Zulauf64ffe552021-02-06 10:25:07 -070083std::vector<std::shared_ptr<const IMAGE_VIEW_STATE>> ValidationStateTracker::GetSharedAttachmentViews(
84 const VkRenderPassBeginInfo &rp_begin, const FRAMEBUFFER_STATE &fb_state) const {
85 auto get_fn = [this](VkImageView handle) { return this->GetShared<IMAGE_VIEW_STATE>(handle); };
86 return GetAttachmentViewsImpl<std::shared_ptr<const IMAGE_VIEW_STATE>>(rp_begin, fb_state, get_fn);
87}
88
locke-lunargd556cc32019-09-17 01:21:23 -060089#ifdef VK_USE_PLATFORM_ANDROID_KHR
90// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
91// This could also move into a seperate core_validation_android.cpp file... ?
92
Jeremy Gebbenf4fb2a02021-07-08 09:57:46 -060093template <typename CreateInfo>
94VkFormatFeatureFlags ValidationStateTracker::GetExternalFormatFeaturesANDROID(const CreateInfo *create_info) const {
95 VkFormatFeatureFlags format_features = 0;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -070096 const VkExternalFormatANDROID *ext_fmt_android = LvlFindInChain<VkExternalFormatANDROID>(create_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -060097 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
Spencer Fricke6bba8c72020-04-06 07:41:21 -070098 // VUID 01894 will catch if not found in map
99 auto it = ahb_ext_formats_map.find(ext_fmt_android->externalFormat);
100 if (it != ahb_ext_formats_map.end()) {
Jeremy Gebbenf4fb2a02021-07-08 09:57:46 -0600101 format_features = it->second;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700102 }
locke-lunargd556cc32019-09-17 01:21:23 -0600103 }
Jeremy Gebbenf4fb2a02021-07-08 09:57:46 -0600104 return format_features;
locke-lunargd556cc32019-09-17 01:21:23 -0600105}
106
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700107void ValidationStateTracker::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
108 VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties, VkResult result) {
109 if (VK_SUCCESS != result) return;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700110 auto ahb_format_props = LvlFindInChain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700111 if (ahb_format_props) {
Jeremy Gebbenfc6f8152021-03-18 16:58:55 -0600112 ahb_ext_formats_map.emplace(ahb_format_props->externalFormat, ahb_format_props->formatFeatures);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700113 }
114}
115
locke-lunargd556cc32019-09-17 01:21:23 -0600116#else
117
Jeremy Gebbenf4fb2a02021-07-08 09:57:46 -0600118template <typename CreateInfo>
119VkFormatFeatureFlags ValidationStateTracker::GetExternalFormatFeaturesANDROID(const CreateInfo *create_info) const {
120 return 0;
121}
locke-lunargd556cc32019-09-17 01:21:23 -0600122
123#endif // VK_USE_PLATFORM_ANDROID_KHR
124
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600125VkFormatFeatureFlags GetImageFormatFeatures(VkPhysicalDevice physical_device, VkDevice device, VkImage image, VkFormat format,
126 VkImageTiling tiling) {
127 VkFormatFeatureFlags format_features = 0;
Petr Kraus44f1c482020-04-25 20:09:25 +0200128 // Add feature support according to Image Format Features (vkspec.html#resources-image-format-features)
129 // if format is AHB external format then the features are already set
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600130 if (tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
131 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
132 nullptr};
133 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image, &drm_format_properties);
Petr Kraus44f1c482020-04-25 20:09:25 +0200134
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600135 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
136 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
137 nullptr};
138 format_properties_2.pNext = (void *)&drm_properties_list;
139 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
140 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
141 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
142 drm_properties_list.pDrmFormatModifierProperties = &drm_properties[0];
143 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
Petr Kraus44f1c482020-04-25 20:09:25 +0200144
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600145 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
146 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier == drm_format_properties.drmFormatModifier) {
147 format_features = drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
148 break;
Petr Kraus44f1c482020-04-25 20:09:25 +0200149 }
Petr Kraus44f1c482020-04-25 20:09:25 +0200150 }
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600151 } else {
152 VkFormatProperties format_properties;
153 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
154 format_features =
155 (tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures : format_properties.optimalTilingFeatures;
Petr Kraus44f1c482020-04-25 20:09:25 +0200156 }
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600157 return format_features;
Petr Kraus44f1c482020-04-25 20:09:25 +0200158}
159
locke-lunargd556cc32019-09-17 01:21:23 -0600160void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
161 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
162 if (VK_SUCCESS != result) return;
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600163 VkFormatFeatureFlags format_features = 0;
locke-lunargd556cc32019-09-17 01:21:23 -0600164 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600165 format_features = GetExternalFormatFeaturesANDROID(pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600166 }
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600167 if (format_features == 0) {
168 format_features = GetImageFormatFeatures(physical_device, device, *pImage, pCreateInfo->format, pCreateInfo->tiling);
locke-lunargd556cc32019-09-17 01:21:23 -0600169 }
170
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600171 auto is_node = std::make_shared<IMAGE_STATE>(device, *pImage, pCreateInfo, format_features);
locke-lunargd556cc32019-09-17 01:21:23 -0600172 // Record the memory requirements in case they won't be queried
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700173 // External AHB memory can't be queried until after memory is bound
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600174 if (is_node->IsExternalAHB() == false) {
sfricke-samsung71bc6572020-04-29 15:49:43 -0700175 if (is_node->disjoint == false) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600176 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements[0]);
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700177 } else {
178 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
179 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
180 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
181 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
182 mem_req_info2.pNext = &image_plane_req;
183 mem_req_info2.image = *pImage;
184
185 assert(plane_count != 0); // assumes each format has at least first plane
186 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
187 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600188 is_node->requirements[0] = mem_reqs2.memoryRequirements;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700189
190 if (plane_count >= 2) {
191 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
192 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600193 is_node->requirements[1] = mem_reqs2.memoryRequirements;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700194 }
195 if (plane_count >= 3) {
196 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
197 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
Jeremy Gebben6fbf8242021-06-21 09:14:46 -0600198 is_node->requirements[2] = mem_reqs2.memoryRequirements;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700199 }
200 }
locke-lunargd556cc32019-09-17 01:21:23 -0600201 }
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700202
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600203 imageMap[*pImage] = std::move(is_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600204}
205
206void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
207 if (!image) return;
208 IMAGE_STATE *image_state = GetImageState(image);
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600209 if (!image_state) return;
210
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600211 image_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -0600212 imageMap.erase(image);
213}
214
215void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
216 VkImageLayout imageLayout, const VkClearColorValue *pColor,
217 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600218
219 if (disabled[command_buffer_state]) return;
220
locke-lunargd556cc32019-09-17 01:21:23 -0600221 auto cb_node = GetCBState(commandBuffer);
222 auto image_state = GetImageState(image);
223 if (cb_node && image_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600224 cb_node->AddChild(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600225 }
226}
227
228void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
229 VkImageLayout imageLayout,
230 const VkClearDepthStencilValue *pDepthStencil,
231 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600232 if (disabled[command_buffer_state]) return;
233
locke-lunargd556cc32019-09-17 01:21:23 -0600234 auto cb_node = GetCBState(commandBuffer);
235 auto image_state = GetImageState(image);
236 if (cb_node && image_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600237 cb_node->AddChild(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600238 }
239}
240
241void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
242 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
243 uint32_t regionCount, const VkImageCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600244 if (disabled[command_buffer_state]) return;
245
locke-lunargd556cc32019-09-17 01:21:23 -0600246 auto cb_node = GetCBState(commandBuffer);
247 auto src_image_state = GetImageState(srcImage);
248 auto dst_image_state = GetImageState(dstImage);
249
250 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600251 cb_node->AddChild(src_image_state);
252 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600253}
254
Jeff Leger178b1e52020-10-05 12:22:23 -0400255void ValidationStateTracker::PreCallRecordCmdCopyImage2KHR(VkCommandBuffer commandBuffer,
256 const VkCopyImageInfo2KHR *pCopyImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600257 if (disabled[command_buffer_state]) return;
258
Jeff Leger178b1e52020-10-05 12:22:23 -0400259 auto cb_node = GetCBState(commandBuffer);
260 auto src_image_state = GetImageState(pCopyImageInfo->srcImage);
261 auto dst_image_state = GetImageState(pCopyImageInfo->dstImage);
262
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600263 cb_node->AddChild(src_image_state);
264 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400265}
266
locke-lunargd556cc32019-09-17 01:21:23 -0600267void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
268 VkImageLayout srcImageLayout, VkImage dstImage,
269 VkImageLayout dstImageLayout, uint32_t regionCount,
270 const VkImageResolve *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600271 if (disabled[command_buffer_state]) return;
272
locke-lunargd556cc32019-09-17 01:21:23 -0600273 auto cb_node = GetCBState(commandBuffer);
274 auto src_image_state = GetImageState(srcImage);
275 auto dst_image_state = GetImageState(dstImage);
276
277 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600278 cb_node->AddChild(src_image_state);
279 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600280}
281
Jeff Leger178b1e52020-10-05 12:22:23 -0400282void ValidationStateTracker::PreCallRecordCmdResolveImage2KHR(VkCommandBuffer commandBuffer,
283 const VkResolveImageInfo2KHR *pResolveImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600284 if (disabled[command_buffer_state]) return;
285
Jeff Leger178b1e52020-10-05 12:22:23 -0400286 auto cb_node = GetCBState(commandBuffer);
287 auto src_image_state = GetImageState(pResolveImageInfo->srcImage);
288 auto dst_image_state = GetImageState(pResolveImageInfo->dstImage);
289
290 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600291 cb_node->AddChild(src_image_state);
292 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400293}
294
locke-lunargd556cc32019-09-17 01:21:23 -0600295void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
296 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
297 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600298 if (disabled[command_buffer_state]) return;
299
locke-lunargd556cc32019-09-17 01:21:23 -0600300 auto cb_node = GetCBState(commandBuffer);
301 auto src_image_state = GetImageState(srcImage);
302 auto dst_image_state = GetImageState(dstImage);
303
304 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600305 cb_node->AddChild(src_image_state);
306 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600307}
308
Jeff Leger178b1e52020-10-05 12:22:23 -0400309void ValidationStateTracker::PreCallRecordCmdBlitImage2KHR(VkCommandBuffer commandBuffer,
310 const VkBlitImageInfo2KHR *pBlitImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600311 if (disabled[command_buffer_state]) return;
312
Jeff Leger178b1e52020-10-05 12:22:23 -0400313 auto cb_node = GetCBState(commandBuffer);
314 auto src_image_state = GetImageState(pBlitImageInfo->srcImage);
315 auto dst_image_state = GetImageState(pBlitImageInfo->dstImage);
316
317 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600318 cb_node->AddChild(src_image_state);
319 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400320}
321
locke-lunargd556cc32019-09-17 01:21:23 -0600322void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
323 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
324 VkResult result) {
325 if (result != VK_SUCCESS) return;
Jeremy Gebbenf2912cd2021-07-07 07:57:39 -0600326
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500327 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600328
James Rumble2f6e7bb2021-07-13 15:21:20 +0100329 if (pCreateInfo) {
330 const auto *opaque_capture_address = LvlFindInChain<VkBufferOpaqueCaptureAddressCreateInfo>(pCreateInfo->pNext);
331 if (opaque_capture_address) {
332 // address is used for GPU-AV and ray tracing buffer validation
333 buffer_state->deviceAddress = opaque_capture_address->opaqueCaptureAddress;
334 buffer_address_map_.emplace(opaque_capture_address->opaqueCaptureAddress, buffer_state.get());
335 }
336 }
337
locke-lunargd556cc32019-09-17 01:21:23 -0600338 // Get a set of requirements in the case the app does not
sfricke-samsungad90e722020-07-08 20:54:24 -0700339 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
locke-lunargd556cc32019-09-17 01:21:23 -0600340
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700341 bufferMap.emplace(*pBuffer, std::move(buffer_state));
locke-lunargd556cc32019-09-17 01:21:23 -0600342}
343
344void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
345 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
346 VkResult result) {
347 if (result != VK_SUCCESS) return;
Jeremy Gebbenf2912cd2021-07-07 07:57:39 -0600348
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500349 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
locke-lunarg25b6c352020-08-06 17:44:18 -0600350
351 VkFormatProperties format_properties;
352 DispatchGetPhysicalDeviceFormatProperties(physical_device, pCreateInfo->format, &format_properties);
locke-lunarg25b6c352020-08-06 17:44:18 -0600353
Jeremy Gebbenf2912cd2021-07-07 07:57:39 -0600354 bufferViewMap[*pView] =
355 std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo, format_properties.bufferFeatures);
locke-lunargd556cc32019-09-17 01:21:23 -0600356}
357
358void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
359 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
360 VkResult result) {
361 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500362 auto image_state = GetImageShared(pCreateInfo->image);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700363
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600364 VkFormatFeatureFlags format_features = 0;
365 if (image_state->HasAHBFormat() == true) {
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700366 // The ImageView uses same Image's format feature since they share same AHB
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600367 format_features = image_state->format_features;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700368 } else {
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600369 format_features = GetImageFormatFeatures(physical_device, device, image_state->image(), pCreateInfo->format,
370 image_state->createInfo.tiling);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700371 }
372
locke-lunarg9939d4b2020-10-26 20:11:08 -0600373 // filter_cubic_props is used in CmdDraw validation. But it takes a lot of performance if it does in CmdDraw.
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600374 auto filter_cubic_props = LvlInitStruct<VkFilterCubicImageViewImageFormatPropertiesEXT>();
locke-lunarg9939d4b2020-10-26 20:11:08 -0600375 if (IsExtEnabled(device_extensions.vk_ext_filter_cubic)) {
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700376 auto imageview_format_info = LvlInitStruct<VkPhysicalDeviceImageViewImageFormatInfoEXT>();
locke-lunarg9939d4b2020-10-26 20:11:08 -0600377 imageview_format_info.imageViewType = pCreateInfo->viewType;
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700378 auto image_format_info = LvlInitStruct<VkPhysicalDeviceImageFormatInfo2>(&imageview_format_info);
locke-lunarg9939d4b2020-10-26 20:11:08 -0600379 image_format_info.type = image_state->createInfo.imageType;
380 image_format_info.format = image_state->createInfo.format;
381 image_format_info.tiling = image_state->createInfo.tiling;
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600382 auto usage_create_info = LvlFindInChain<VkImageViewUsageCreateInfo>(pCreateInfo->pNext);
383 image_format_info.usage = usage_create_info ? usage_create_info->usage : image_state->createInfo.usage;
locke-lunarg9939d4b2020-10-26 20:11:08 -0600384 image_format_info.flags = image_state->createInfo.flags;
385
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600386 auto image_format_properties = LvlInitStruct<VkImageFormatProperties2>(&filter_cubic_props);
locke-lunarg9939d4b2020-10-26 20:11:08 -0600387
388 DispatchGetPhysicalDeviceImageFormatProperties2(physical_device, &image_format_info, &image_format_properties);
389 }
Jeremy Gebbenb4d17012021-07-08 13:18:15 -0600390
391 imageViewMap[*pView] =
392 std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo, format_features, filter_cubic_props);
locke-lunargd556cc32019-09-17 01:21:23 -0600393}
394
395void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
396 uint32_t regionCount, const VkBufferCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600397 if (disabled[command_buffer_state]) return;
398
locke-lunargd556cc32019-09-17 01:21:23 -0600399 auto cb_node = GetCBState(commandBuffer);
400 auto src_buffer_state = GetBufferState(srcBuffer);
401 auto dst_buffer_state = GetBufferState(dstBuffer);
402
403 // Update bindings between buffers and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600404 cb_node->AddChild(src_buffer_state);
405 cb_node->AddChild(dst_buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600406}
407
Jeff Leger178b1e52020-10-05 12:22:23 -0400408void ValidationStateTracker::PreCallRecordCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer,
409 const VkCopyBufferInfo2KHR *pCopyBufferInfos) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600410 if (disabled[command_buffer_state]) return;
411
Jeff Leger178b1e52020-10-05 12:22:23 -0400412 auto cb_node = GetCBState(commandBuffer);
413 auto src_buffer_state = GetBufferState(pCopyBufferInfos->srcBuffer);
414 auto dst_buffer_state = GetBufferState(pCopyBufferInfos->dstBuffer);
415
416 // Update bindings between buffers and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600417 cb_node->AddChild(src_buffer_state);
418 cb_node->AddChild(dst_buffer_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400419}
420
locke-lunargd556cc32019-09-17 01:21:23 -0600421void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
422 const VkAllocationCallbacks *pAllocator) {
423 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
424 if (!image_view_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -0600425
426 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600427 image_view_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -0600428 imageViewMap.erase(imageView);
429}
430
431void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
432 if (!buffer) return;
433 auto buffer_state = GetBufferState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -0600434
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600435 buffer_state->Destroy();
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600436 bufferMap.erase(buffer_state->buffer());
locke-lunargd556cc32019-09-17 01:21:23 -0600437}
438
439void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
440 const VkAllocationCallbacks *pAllocator) {
441 if (!bufferView) return;
442 auto buffer_view_state = GetBufferViewState(bufferView);
locke-lunargd556cc32019-09-17 01:21:23 -0600443
444 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600445 buffer_view_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -0600446 bufferViewMap.erase(bufferView);
447}
448
449void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
450 VkDeviceSize size, uint32_t data) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600451 if (disabled[command_buffer_state]) return;
452
locke-lunargd556cc32019-09-17 01:21:23 -0600453 auto cb_node = GetCBState(commandBuffer);
454 auto buffer_state = GetBufferState(dstBuffer);
455 // Update bindings between buffer and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600456 cb_node->AddChild(buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600457}
458
459void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
460 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
461 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600462 if (disabled[command_buffer_state]) return;
463
locke-lunargd556cc32019-09-17 01:21:23 -0600464 auto cb_node = GetCBState(commandBuffer);
465 auto src_image_state = GetImageState(srcImage);
466 auto dst_buffer_state = GetBufferState(dstBuffer);
467
468 // Update bindings between buffer/image and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600469 cb_node->AddChild(src_image_state);
470 cb_node->AddChild(dst_buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600471}
472
Jeff Leger178b1e52020-10-05 12:22:23 -0400473void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
474 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600475 if (disabled[command_buffer_state]) return;
476
Jeff Leger178b1e52020-10-05 12:22:23 -0400477 auto cb_node = GetCBState(commandBuffer);
478 auto src_image_state = GetImageState(pCopyImageToBufferInfo->srcImage);
479 auto dst_buffer_state = GetBufferState(pCopyImageToBufferInfo->dstBuffer);
480
481 // Update bindings between buffer/image and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600482 cb_node->AddChild(src_image_state);
483 cb_node->AddChild(dst_buffer_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400484}
485
locke-lunargd556cc32019-09-17 01:21:23 -0600486void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
487 VkImageLayout dstImageLayout, uint32_t regionCount,
488 const VkBufferImageCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600489 if (disabled[command_buffer_state]) return;
490
locke-lunargd556cc32019-09-17 01:21:23 -0600491 auto cb_node = GetCBState(commandBuffer);
492 auto src_buffer_state = GetBufferState(srcBuffer);
493 auto dst_image_state = GetImageState(dstImage);
494
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600495 cb_node->AddChild(src_buffer_state);
496 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600497}
498
Jeff Leger178b1e52020-10-05 12:22:23 -0400499void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
500 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600501
502 if (disabled[command_buffer_state]) return;
503
Jeff Leger178b1e52020-10-05 12:22:23 -0400504 auto cb_node = GetCBState(commandBuffer);
505 auto src_buffer_state = GetBufferState(pCopyBufferToImageInfo->srcBuffer);
506 auto dst_image_state = GetImageState(pCopyBufferToImageInfo->dstImage);
507
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600508 cb_node->AddChild(src_buffer_state);
509 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400510}
511
Jeremy Gebben159b3cc2021-06-03 09:09:03 -0600512QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
513 auto it = queueMap.find(queue);
514 if (it == queueMap.end()) {
515 return nullptr;
516 }
517 return &it->second;
locke-lunargd556cc32019-09-17 01:21:23 -0600518}
519
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600520const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
521 auto it = queueMap.find(queue);
522 if (it == queueMap.cend()) {
523 return nullptr;
524 }
525 return &it->second;
locke-lunargd556cc32019-09-17 01:21:23 -0600526}
527
locke-lunargd556cc32019-09-17 01:21:23 -0600528const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
529 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
530 auto it = phys_dev_map->find(phys);
531 if (it == phys_dev_map->end()) {
532 return nullptr;
533 }
534 return &it->second;
535}
536
537PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
538 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
539 auto it = phys_dev_map->find(phys);
540 if (it == phys_dev_map->end()) {
541 return nullptr;
542 }
543 return &it->second;
544}
545
546PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
547const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
548
549// Return ptr to memory binding for given handle of specified type
550template <typename State, typename Result>
551static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
552 switch (typed_handle.type) {
553 case kVulkanObjectTypeImage:
554 return state->GetImageState(typed_handle.Cast<VkImage>());
555 case kVulkanObjectTypeBuffer:
556 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
557 case kVulkanObjectTypeAccelerationStructureNV:
sourav parmarcd5fb182020-07-17 12:58:44 -0700558 return state->GetAccelerationStructureStateNV(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -0600559 default:
560 break;
561 }
562 return nullptr;
563}
564
565const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
566 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
567}
568
569BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
570 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
571}
572
locke-lunarg540b2252020-08-03 13:23:36 -0600573void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, const VkPipelineBindPoint bind_point,
574 const char *function) {
locke-lunargb8d7a7a2020-10-25 16:01:52 -0600575 const auto lv_bind_point = ConvertToLvlBindPoint(bind_point);
576 auto &state = cb_state->lastBound[lv_bind_point];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700577 PIPELINE_STATE *pipe = state.pipeline_state;
locke-lunargd556cc32019-09-17 01:21:23 -0600578 if (VK_NULL_HANDLE != state.pipeline_layout) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700579 for (const auto &set_binding_pair : pipe->active_slots) {
580 uint32_t set_index = set_binding_pair.first;
locke-lunargd556cc32019-09-17 01:21:23 -0600581 // Pull the set node
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700582 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[set_index].bound_descriptor_set;
locke-lunargd556cc32019-09-17 01:21:23 -0600583
Tony-LunarG77822802020-05-28 16:35:46 -0600584 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
locke-lunargd556cc32019-09-17 01:21:23 -0600585
Tony-LunarG77822802020-05-28 16:35:46 -0600586 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
587 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
588 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700589 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pipe);
Tony-LunarG77822802020-05-28 16:35:46 -0600590
591 if (reduced_map.IsManyDescriptors()) {
592 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700593 descriptor_set->UpdateValidationCache(*cb_state, *pipe, binding_req_map);
Tony-LunarG77822802020-05-28 16:35:46 -0600594 }
595
596 // We can skip updating the state if "nothing" has changed since the last validation.
597 // See CoreChecks::ValidateCmdBufDrawState for more details.
598 bool descriptor_set_changed =
599 !reduced_map.IsManyDescriptors() ||
600 // Update if descriptor set (or contents) has changed
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700601 state.per_set[set_index].validated_set != descriptor_set ||
602 state.per_set[set_index].validated_set_change_count != descriptor_set->GetChangeCount() ||
Tony-LunarG77822802020-05-28 16:35:46 -0600603 (!disabled[image_layout_validation] &&
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700604 state.per_set[set_index].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
Tony-LunarG77822802020-05-28 16:35:46 -0600605 bool need_update = descriptor_set_changed ||
606 // Update if previous bindingReqMap doesn't include new bindingReqMap
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700607 !std::includes(state.per_set[set_index].validated_set_binding_req_map.begin(),
608 state.per_set[set_index].validated_set_binding_req_map.end(), binding_req_map.begin(),
Tony-LunarG77822802020-05-28 16:35:46 -0600609 binding_req_map.end());
610
611 if (need_update) {
612 // Bind this set and its active descriptor resources to the command buffer
613 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
614 // Only record the bindings that haven't already been recorded
615 BindingReqMap delta_reqs;
616 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700617 state.per_set[set_index].validated_set_binding_req_map.begin(),
618 state.per_set[set_index].validated_set_binding_req_map.end(),
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700619 layer_data::insert_iterator<BindingReqMap>(delta_reqs, delta_reqs.begin()));
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700620 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pipe, delta_reqs, function);
Tony-LunarG77822802020-05-28 16:35:46 -0600621 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700622 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pipe, binding_req_map, function);
locke-lunargd556cc32019-09-17 01:21:23 -0600623 }
624
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700625 state.per_set[set_index].validated_set = descriptor_set;
626 state.per_set[set_index].validated_set_change_count = descriptor_set->GetChangeCount();
627 state.per_set[set_index].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
Tony-LunarG77822802020-05-28 16:35:46 -0600628 if (reduced_map.IsManyDescriptors()) {
629 // Check whether old == new before assigning, the equality check is much cheaper than
630 // freeing and reallocating the map.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700631 if (state.per_set[set_index].validated_set_binding_req_map != set_binding_pair.second) {
632 state.per_set[set_index].validated_set_binding_req_map = set_binding_pair.second;
Jeff Bolz56308942019-10-06 22:05:23 -0500633 }
Tony-LunarG77822802020-05-28 16:35:46 -0600634 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700635 state.per_set[set_index].validated_set_binding_req_map = BindingReqMap();
locke-lunargd556cc32019-09-17 01:21:23 -0600636 }
637 }
638 }
639 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700640 if (!pipe->vertex_binding_descriptions_.empty()) {
locke-lunargd556cc32019-09-17 01:21:23 -0600641 cb_state->vertex_buffer_used = true;
642 }
643}
644
645// Remove set from setMap and delete the set
646void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500647 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600648 descriptor_set->Destroy();
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500649
locke-lunargd556cc32019-09-17 01:21:23 -0600650 setMap.erase(descriptor_set->GetSet());
651}
652
653// Free all DS Pools including their Sets & related sub-structs
654// NOTE : Calls to this function should be wrapped in mutex
655void ValidationStateTracker::DeleteDescriptorSetPools() {
656 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
657 // Remove this pools' sets from setMap and delete them
John Zulauf79f06582021-02-27 18:38:39 -0700658 for (auto *ds : ii->second->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -0600659 FreeDescriptorSet(ds);
660 }
661 ii->second->sets.clear();
662 ii = descriptorPoolMap.erase(ii);
663 }
664}
665
666// For given object struct return a ptr of BASE_NODE type for its wrapping struct
667BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500668 if (object_struct.node) {
669#ifdef _DEBUG
670 // assert that lookup would find the same object
671 VulkanTypedHandle other = object_struct;
672 other.node = nullptr;
673 assert(object_struct.node == GetStateStructPtrFromObject(other));
674#endif
675 return object_struct.node;
676 }
locke-lunargd556cc32019-09-17 01:21:23 -0600677 BASE_NODE *base_ptr = nullptr;
678 switch (object_struct.type) {
679 case kVulkanObjectTypeDescriptorSet: {
680 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
681 break;
682 }
683 case kVulkanObjectTypeSampler: {
684 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
685 break;
686 }
687 case kVulkanObjectTypeQueryPool: {
688 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
689 break;
690 }
691 case kVulkanObjectTypePipeline: {
692 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
693 break;
694 }
695 case kVulkanObjectTypeBuffer: {
696 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
697 break;
698 }
699 case kVulkanObjectTypeBufferView: {
700 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
701 break;
702 }
703 case kVulkanObjectTypeImage: {
704 base_ptr = GetImageState(object_struct.Cast<VkImage>());
705 break;
706 }
707 case kVulkanObjectTypeImageView: {
708 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
709 break;
710 }
711 case kVulkanObjectTypeEvent: {
712 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
713 break;
714 }
715 case kVulkanObjectTypeDescriptorPool: {
716 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
717 break;
718 }
719 case kVulkanObjectTypeCommandPool: {
720 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
721 break;
722 }
723 case kVulkanObjectTypeFramebuffer: {
724 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
725 break;
726 }
727 case kVulkanObjectTypeRenderPass: {
728 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
729 break;
730 }
731 case kVulkanObjectTypeDeviceMemory: {
732 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
733 break;
734 }
735 case kVulkanObjectTypeAccelerationStructureNV: {
sourav parmarcd5fb182020-07-17 12:58:44 -0700736 base_ptr = GetAccelerationStructureStateNV(object_struct.Cast<VkAccelerationStructureNV>());
737 break;
738 }
739 case kVulkanObjectTypeAccelerationStructureKHR: {
740 base_ptr = GetAccelerationStructureStateKHR(object_struct.Cast<VkAccelerationStructureKHR>());
locke-lunargd556cc32019-09-17 01:21:23 -0600741 break;
742 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500743 case kVulkanObjectTypeUnknown:
744 // This can happen if an element of the object_bindings vector has been
745 // zeroed out, after an object is destroyed.
746 break;
locke-lunargd556cc32019-09-17 01:21:23 -0600747 default:
748 // TODO : Any other objects to be handled here?
749 assert(0);
750 break;
751 }
752 return base_ptr;
753}
754
sfricke-samsungbf1a2ed2020-06-14 23:31:00 -0700755// Gets union of all features defined by Potential Format Features
756// except, does not handle the external format case for AHB as that only can be used for sampled images
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700757VkFormatFeatureFlags ValidationStateTracker::GetPotentialFormatFeatures(VkFormat format) const {
758 VkFormatFeatureFlags format_features = 0;
759
760 if (format != VK_FORMAT_UNDEFINED) {
761 VkFormatProperties format_properties;
762 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
763 format_features |= format_properties.linearTilingFeatures;
764 format_features |= format_properties.optimalTilingFeatures;
765 if (device_extensions.vk_ext_image_drm_format_modifier) {
766 // VK_KHR_get_physical_device_properties2 is required in this case
767 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2};
768 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
769 nullptr};
770 format_properties_2.pNext = (void *)&drm_properties_list;
Marc Alcala Prieto773871c2021-02-04 19:24:43 +0100771
772 // First call is to get the number of modifiers compatible with the queried format
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700773 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
Marc Alcala Prieto773871c2021-02-04 19:24:43 +0100774
775 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
776 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
777 drm_properties_list.pDrmFormatModifierProperties = drm_properties.data();
778
779 // Second call, now with an allocated array in pDrmFormatModifierProperties, is to get the modifiers
780 // compatible with the queried format
781 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
782
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700783 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
784 format_features |= drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
785 }
786 }
787 }
788
789 return format_features;
790}
791
locke-lunargd556cc32019-09-17 01:21:23 -0600792// Reset the command buffer state
793// Maintain the createInfo and set state to CB_NEW, but clear all other state
794void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700795 CMD_BUFFER_STATE *cb_state = GetCBState(cb);
796 if (cb_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600797 cb_state->Reset();
locke-lunargd556cc32019-09-17 01:21:23 -0600798 // Clean up the label data
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600799 ResetCmdDebugUtilsLabel(report_data, cb_state->commandBuffer());
locke-lunargd556cc32019-09-17 01:21:23 -0600800 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600801
locke-lunargd556cc32019-09-17 01:21:23 -0600802 if (command_buffer_reset_callback) {
803 (*command_buffer_reset_callback)(cb);
804 }
805}
806
807void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
808 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
809 VkResult result) {
810 if (VK_SUCCESS != result) return;
811
Locke Linf3873542021-04-26 11:25:10 -0600812 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
813 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
814 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
815
locke-lunargd556cc32019-09-17 01:21:23 -0600816 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
817 if (nullptr == enabled_features_found) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700818 const auto *features2 = LvlFindInChain<VkPhysicalDeviceFeatures2>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600819 if (features2) {
820 enabled_features_found = &(features2->features);
Locke Linf3873542021-04-26 11:25:10 -0600821
822 const auto *provoking_vertex_features = lvl_find_in_chain<VkPhysicalDeviceProvokingVertexFeaturesEXT>(features2->pNext);
823 if (provoking_vertex_features) {
824 state_tracker->enabled_features.provoking_vertex_features = *provoking_vertex_features;
825 }
locke-lunargd556cc32019-09-17 01:21:23 -0600826 }
827 }
828
locke-lunargd556cc32019-09-17 01:21:23 -0600829 if (nullptr == enabled_features_found) {
830 state_tracker->enabled_features.core = {};
831 } else {
832 state_tracker->enabled_features.core = *enabled_features_found;
833 }
834
835 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
836 // previously set them through an explicit API call.
837 uint32_t count;
838 auto pd_state = GetPhysicalDeviceState(gpu);
839 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
840 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
841 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
842 // Save local link to this device's physical device state
843 state_tracker->physical_device_state = pd_state;
844
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700845 const auto *vulkan_12_features = LvlFindInChain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700846 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700847 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700848 } else {
sfricke-samsung27c70722020-05-02 08:42:39 -0700849 // Set Extension Feature Aliases to false as there is no struct to check
850 state_tracker->enabled_features.core12.drawIndirectCount = VK_FALSE;
851 state_tracker->enabled_features.core12.samplerMirrorClampToEdge = VK_FALSE;
852 state_tracker->enabled_features.core12.descriptorIndexing = VK_FALSE;
853 state_tracker->enabled_features.core12.samplerFilterMinmax = VK_FALSE;
854 state_tracker->enabled_features.core12.shaderOutputLayer = VK_FALSE;
855 state_tracker->enabled_features.core12.shaderOutputViewportIndex = VK_FALSE;
sfricke-samsunga4143ac2020-12-18 00:00:53 -0800856 state_tracker->enabled_features.core12.subgroupBroadcastDynamicId = VK_FALSE;
sfricke-samsung27c70722020-05-02 08:42:39 -0700857
858 // These structs are only allowed in pNext chain if there is no VkPhysicalDeviceVulkan12Features
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700859
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700860 const auto *eight_bit_storage_features = LvlFindInChain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700861 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700862 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
863 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
864 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
865 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700866 }
867
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700868 const auto *float16_int8_features = LvlFindInChain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700869 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700870 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
871 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700872 }
873
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700874 const auto *descriptor_indexing_features = LvlFindInChain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700875 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700876 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
877 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
878 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
879 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
880 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
881 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
882 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
883 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
884 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
885 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
886 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
887 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
888 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
889 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
890 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
891 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
892 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
893 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
894 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
895 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
896 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
897 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
898 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
899 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
900 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
901 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
902 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
903 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
904 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
905 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
906 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
907 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
908 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
909 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
910 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
911 descriptor_indexing_features->descriptorBindingPartiallyBound;
912 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
913 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
914 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700915 }
916
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700917 const auto *scalar_block_layout_features = LvlFindInChain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700918 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700919 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700920 }
921
922 const auto *imageless_framebuffer_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700923 LvlFindInChain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700924 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700925 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700926 }
927
928 const auto *uniform_buffer_standard_layout_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700929 LvlFindInChain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700930 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700931 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
932 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700933 }
934
935 const auto *subgroup_extended_types_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700936 LvlFindInChain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700937 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700938 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
939 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700940 }
941
942 const auto *separate_depth_stencil_layouts_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700943 LvlFindInChain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700944 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700945 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
946 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700947 }
948
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700949 const auto *host_query_reset_features = LvlFindInChain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700950 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700951 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700952 }
953
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700954 const auto *timeline_semaphore_features = LvlFindInChain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700955 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700956 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700957 }
958
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700959 const auto *buffer_device_address = LvlFindInChain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700960 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700961 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
962 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
963 buffer_device_address->bufferDeviceAddressCaptureReplay;
964 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
965 buffer_device_address->bufferDeviceAddressMultiDevice;
966 }
sfricke-samsunga4143ac2020-12-18 00:00:53 -0800967
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700968 const auto *atomic_int64_features = LvlFindInChain<VkPhysicalDeviceShaderAtomicInt64Features>(pCreateInfo->pNext);
sfricke-samsunga4143ac2020-12-18 00:00:53 -0800969 if (atomic_int64_features) {
970 state_tracker->enabled_features.core12.shaderBufferInt64Atomics = atomic_int64_features->shaderBufferInt64Atomics;
971 state_tracker->enabled_features.core12.shaderSharedInt64Atomics = atomic_int64_features->shaderSharedInt64Atomics;
972 }
973
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700974 const auto *memory_model_features = LvlFindInChain<VkPhysicalDeviceVulkanMemoryModelFeatures>(pCreateInfo->pNext);
sfricke-samsunga4143ac2020-12-18 00:00:53 -0800975 if (memory_model_features) {
976 state_tracker->enabled_features.core12.vulkanMemoryModel = memory_model_features->vulkanMemoryModel;
977 state_tracker->enabled_features.core12.vulkanMemoryModelDeviceScope =
978 memory_model_features->vulkanMemoryModelDeviceScope;
979 state_tracker->enabled_features.core12.vulkanMemoryModelAvailabilityVisibilityChains =
980 memory_model_features->vulkanMemoryModelAvailabilityVisibilityChains;
981 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700982 }
983
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700984 const auto *vulkan_11_features = LvlFindInChain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700985 if (vulkan_11_features) {
986 state_tracker->enabled_features.core11 = *vulkan_11_features;
987 } else {
988 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
989
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700990 const auto *sixteen_bit_storage_features = LvlFindInChain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700991 if (sixteen_bit_storage_features) {
992 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
993 sixteen_bit_storage_features->storageBuffer16BitAccess;
994 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
995 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
996 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
997 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
998 }
999
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001000 const auto *multiview_features = LvlFindInChain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001001 if (multiview_features) {
1002 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1003 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1004 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1005 }
1006
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001007 const auto *variable_pointers_features = LvlFindInChain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001008 if (variable_pointers_features) {
1009 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1010 variable_pointers_features->variablePointersStorageBuffer;
1011 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1012 }
1013
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001014 const auto *protected_memory_features = LvlFindInChain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001015 if (protected_memory_features) {
1016 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1017 }
1018
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001019 const auto *ycbcr_conversion_features = LvlFindInChain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001020 if (ycbcr_conversion_features) {
1021 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1022 }
1023
1024 const auto *shader_draw_parameters_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001025 LvlFindInChain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001026 if (shader_draw_parameters_features) {
1027 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001028 }
1029 }
1030
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001031 const auto *device_group_ci = LvlFindInChain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
Tony-LunarGca4891a2020-08-10 15:46:46 -06001032 if (device_group_ci) {
1033 state_tracker->physical_device_count = device_group_ci->physicalDeviceCount;
1034 state_tracker->device_group_create_info = *device_group_ci;
1035 } else {
1036 state_tracker->physical_device_count = 1;
1037 }
locke-lunargd556cc32019-09-17 01:21:23 -06001038
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001039 const auto *exclusive_scissor_features = LvlFindInChain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001040 if (exclusive_scissor_features) {
1041 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1042 }
1043
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001044 const auto *shading_rate_image_features = LvlFindInChain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001045 if (shading_rate_image_features) {
1046 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1047 }
1048
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001049 const auto *mesh_shader_features = LvlFindInChain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001050 if (mesh_shader_features) {
1051 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1052 }
1053
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001054 const auto *inline_uniform_block_features = LvlFindInChain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001055 if (inline_uniform_block_features) {
1056 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1057 }
1058
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001059 const auto *transform_feedback_features = LvlFindInChain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001060 if (transform_feedback_features) {
1061 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1062 }
1063
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001064 const auto *vtx_attrib_div_features = LvlFindInChain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001065 if (vtx_attrib_div_features) {
1066 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1067 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001068
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001069 const auto *buffer_device_address_ext = LvlFindInChain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001070 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001071 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001072 }
1073
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001074 const auto *cooperative_matrix_features = LvlFindInChain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001075 if (cooperative_matrix_features) {
1076 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1077 }
1078
locke-lunargd556cc32019-09-17 01:21:23 -06001079 const auto *compute_shader_derivatives_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001080 LvlFindInChain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001081 if (compute_shader_derivatives_features) {
1082 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1083 }
1084
1085 const auto *fragment_shader_barycentric_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001086 LvlFindInChain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001087 if (fragment_shader_barycentric_features) {
1088 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1089 }
1090
1091 const auto *shader_image_footprint_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001092 LvlFindInChain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001093 if (shader_image_footprint_features) {
1094 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1095 }
1096
1097 const auto *fragment_shader_interlock_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001098 LvlFindInChain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001099 if (fragment_shader_interlock_features) {
1100 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1101 }
1102
1103 const auto *demote_to_helper_invocation_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001104 LvlFindInChain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001105 if (demote_to_helper_invocation_features) {
1106 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1107 }
1108
1109 const auto *texel_buffer_alignment_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001110 LvlFindInChain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001111 if (texel_buffer_alignment_features) {
1112 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1113 }
1114
locke-lunargd556cc32019-09-17 01:21:23 -06001115 const auto *pipeline_exe_props_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001116 LvlFindInChain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001117 if (pipeline_exe_props_features) {
1118 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1119 }
1120
Jeff Bolz82f854d2019-09-17 14:56:47 -05001121 const auto *dedicated_allocation_image_aliasing_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001122 LvlFindInChain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
Jeff Bolz82f854d2019-09-17 14:56:47 -05001123 if (dedicated_allocation_image_aliasing_features) {
1124 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1125 *dedicated_allocation_image_aliasing_features;
1126 }
1127
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001128 const auto *performance_query_features = LvlFindInChain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001129 if (performance_query_features) {
1130 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1131 }
1132
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001133 const auto *device_coherent_memory_features = LvlFindInChain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
Tobias Hector782bcde2019-11-28 16:19:42 +00001134 if (device_coherent_memory_features) {
1135 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1136 }
1137
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001138 const auto *ycbcr_image_array_features = LvlFindInChain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsungcead0802020-01-30 22:20:10 -08001139 if (ycbcr_image_array_features) {
1140 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1141 }
1142
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001143 const auto *ray_query_features = LvlFindInChain<VkPhysicalDeviceRayQueryFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001144 if (ray_query_features) {
1145 state_tracker->enabled_features.ray_query_features = *ray_query_features;
1146 }
1147
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001148 const auto *ray_tracing_pipeline_features = LvlFindInChain<VkPhysicalDeviceRayTracingPipelineFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001149 if (ray_tracing_pipeline_features) {
1150 state_tracker->enabled_features.ray_tracing_pipeline_features = *ray_tracing_pipeline_features;
1151 }
1152
1153 const auto *ray_tracing_acceleration_structure_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001154 LvlFindInChain<VkPhysicalDeviceAccelerationStructureFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001155 if (ray_tracing_acceleration_structure_features) {
1156 state_tracker->enabled_features.ray_tracing_acceleration_structure_features = *ray_tracing_acceleration_structure_features;
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001157 }
1158
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001159 const auto *robustness2_features = LvlFindInChain<VkPhysicalDeviceRobustness2FeaturesEXT>(pCreateInfo->pNext);
Jeff Bolz165818a2020-05-08 11:19:03 -05001160 if (robustness2_features) {
1161 state_tracker->enabled_features.robustness2_features = *robustness2_features;
1162 }
1163
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001164 const auto *fragment_density_map_features = LvlFindInChain<VkPhysicalDeviceFragmentDensityMapFeaturesEXT>(pCreateInfo->pNext);
janharaldfredriksen-arm3b793772020-05-12 18:55:53 +02001165 if (fragment_density_map_features) {
1166 state_tracker->enabled_features.fragment_density_map_features = *fragment_density_map_features;
1167 }
1168
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001169 const auto *fragment_density_map_features2 = LvlFindInChain<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT>(pCreateInfo->pNext);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001170 if (fragment_density_map_features2) {
1171 state_tracker->enabled_features.fragment_density_map2_features = *fragment_density_map_features2;
1172 }
1173
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001174 const auto *astc_decode_features = LvlFindInChain<VkPhysicalDeviceASTCDecodeFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0c4a06f2020-06-27 01:24:32 -07001175 if (astc_decode_features) {
1176 state_tracker->enabled_features.astc_decode_features = *astc_decode_features;
1177 }
1178
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001179 const auto *custom_border_color_features = LvlFindInChain<VkPhysicalDeviceCustomBorderColorFeaturesEXT>(pCreateInfo->pNext);
Tony-LunarG7337b312020-04-15 16:40:25 -06001180 if (custom_border_color_features) {
1181 state_tracker->enabled_features.custom_border_color_features = *custom_border_color_features;
1182 }
1183
sfricke-samsungfd661d62020-05-16 00:57:27 -07001184 const auto *pipeline_creation_cache_control_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001185 LvlFindInChain<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsungfd661d62020-05-16 00:57:27 -07001186 if (pipeline_creation_cache_control_features) {
1187 state_tracker->enabled_features.pipeline_creation_cache_control_features = *pipeline_creation_cache_control_features;
1188 }
1189
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001190 const auto *fragment_shading_rate_features = LvlFindInChain<VkPhysicalDeviceFragmentShadingRateFeaturesKHR>(pCreateInfo->pNext);
Tobias Hector6663c9b2020-11-05 10:18:02 +00001191 if (fragment_shading_rate_features) {
1192 state_tracker->enabled_features.fragment_shading_rate_features = *fragment_shading_rate_features;
1193 }
1194
Piers Daniell39842ee2020-07-10 16:42:33 -06001195 const auto *extended_dynamic_state_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001196 LvlFindInChain<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT>(pCreateInfo->pNext);
Piers Daniell39842ee2020-07-10 16:42:33 -06001197 if (extended_dynamic_state_features) {
1198 state_tracker->enabled_features.extended_dynamic_state_features = *extended_dynamic_state_features;
1199 }
1200
Vikram Kushwahaa57b0c32021-04-19 12:21:46 -07001201 const auto *extended_dynamic_state2_features =
1202 LvlFindInChain<VkPhysicalDeviceExtendedDynamicState2FeaturesEXT>(pCreateInfo->pNext);
1203 if (extended_dynamic_state2_features) {
1204 state_tracker->enabled_features.extended_dynamic_state2_features = *extended_dynamic_state2_features;
1205 }
1206
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001207 const auto *multiview_features = LvlFindInChain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
locke-lunarg3fa463a2020-10-23 16:39:04 -06001208 if (multiview_features) {
1209 state_tracker->enabled_features.multiview_features = *multiview_features;
1210 }
1211
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001212 const auto *portability_features = LvlFindInChain<VkPhysicalDevicePortabilitySubsetFeaturesKHR>(pCreateInfo->pNext);
Nathaniel Cesariob3f2d702020-11-09 09:20:49 -07001213 if (portability_features) {
1214 state_tracker->enabled_features.portability_subset_features = *portability_features;
1215 }
1216
sfricke-samsung0065ce02020-12-03 22:46:37 -08001217 const auto *shader_integer_functions2_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001218 LvlFindInChain<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001219 if (shader_integer_functions2_features) {
1220 state_tracker->enabled_features.shader_integer_functions2_features = *shader_integer_functions2_features;
1221 }
1222
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001223 const auto *shader_sm_builtins_feature = LvlFindInChain<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001224 if (shader_sm_builtins_feature) {
1225 state_tracker->enabled_features.shader_sm_builtins_feature = *shader_sm_builtins_feature;
1226 }
1227
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001228 const auto *shader_atomic_float_feature = LvlFindInChain<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001229 if (shader_atomic_float_feature) {
1230 state_tracker->enabled_features.shader_atomic_float_feature = *shader_atomic_float_feature;
1231 }
1232
1233 const auto *shader_image_atomic_int64_feature =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001234 LvlFindInChain<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001235 if (shader_image_atomic_int64_feature) {
1236 state_tracker->enabled_features.shader_image_atomic_int64_feature = *shader_image_atomic_int64_feature;
1237 }
1238
sfricke-samsung486a51e2021-01-02 00:10:15 -08001239 const auto *shader_clock_feature = LvlFindInChain<VkPhysicalDeviceShaderClockFeaturesKHR>(pCreateInfo->pNext);
1240 if (shader_clock_feature) {
1241 state_tracker->enabled_features.shader_clock_feature = *shader_clock_feature;
1242 }
1243
Jeremy Gebben5f585ae2021-02-02 09:03:06 -07001244 const auto *conditional_rendering_features =
1245 LvlFindInChain<VkPhysicalDeviceConditionalRenderingFeaturesEXT>(pCreateInfo->pNext);
1246 if (conditional_rendering_features) {
1247 state_tracker->enabled_features.conditional_rendering = *conditional_rendering_features;
1248 }
1249
Shannon McPhersondb287d42021-02-02 15:27:32 -07001250 const auto *workgroup_memory_explicit_layout_features =
1251 LvlFindInChain<VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>(pCreateInfo->pNext);
1252 if (workgroup_memory_explicit_layout_features) {
1253 state_tracker->enabled_features.workgroup_memory_explicit_layout_features = *workgroup_memory_explicit_layout_features;
1254 }
1255
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001256 const auto *synchronization2_features =
1257 LvlFindInChain<VkPhysicalDeviceSynchronization2FeaturesKHR>(pCreateInfo->pNext);
1258 if (synchronization2_features) {
1259 state_tracker->enabled_features.synchronization2_features = *synchronization2_features;
1260 }
1261
Locke Linf3873542021-04-26 11:25:10 -06001262 const auto *provoking_vertex_features = lvl_find_in_chain<VkPhysicalDeviceProvokingVertexFeaturesEXT>(pCreateInfo->pNext);
1263 if (provoking_vertex_features) {
1264 state_tracker->enabled_features.provoking_vertex_features = *provoking_vertex_features;
1265 }
1266
Piers Daniellcb6d8032021-04-19 18:51:26 -06001267 const auto *vertex_input_dynamic_state_features =
1268 LvlFindInChain<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT>(pCreateInfo->pNext);
1269 if (vertex_input_dynamic_state_features) {
1270 state_tracker->enabled_features.vertex_input_dynamic_state_features = *vertex_input_dynamic_state_features;
1271 }
1272
David Zhao Akeley44139b12021-04-26 16:16:13 -07001273 const auto *inherited_viewport_scissor_features =
1274 LvlFindInChain<VkPhysicalDeviceInheritedViewportScissorFeaturesNV>(pCreateInfo->pNext);
1275 if (inherited_viewport_scissor_features) {
1276 state_tracker->enabled_features.inherited_viewport_scissor_features = *inherited_viewport_scissor_features;
1277 }
1278
Tony-LunarG4490de42021-06-21 15:49:19 -06001279 const auto *multi_draw_features = LvlFindInChain<VkPhysicalDeviceMultiDrawFeaturesEXT>(pCreateInfo->pNext);
1280 if (multi_draw_features) {
1281 state_tracker->enabled_features.multi_draw_features = *multi_draw_features;
1282 }
1283
ziga-lunarg29ba2b92021-07-20 21:51:45 +02001284 const auto *color_write_features = LvlFindInChain<VkPhysicalDeviceColorWriteEnableFeaturesEXT>(pCreateInfo->pNext);
1285 if (color_write_features) {
1286 state_tracker->enabled_features.color_write_features = *color_write_features;
1287 }
1288
Mike Schuchardtb3870ea2021-07-20 18:56:51 -07001289 const auto *shader_atomic_float2_features = LvlFindInChain<VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT>(pCreateInfo->pNext);
1290 if (shader_atomic_float2_features) {
1291 state_tracker->enabled_features.shader_atomic_float2_features = *shader_atomic_float2_features;
1292 }
1293
locke-lunargd556cc32019-09-17 01:21:23 -06001294 // Store physical device properties and physical device mem limits into CoreChecks structs
1295 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1296 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001297 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1298 &state_tracker->phys_dev_props_core11);
1299 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1300 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001301
1302 const auto &dev_ext = state_tracker->device_extensions;
1303 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1304
1305 if (dev_ext.vk_khr_push_descriptor) {
1306 // Get the needed push_descriptor limits
1307 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1308 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1309 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1310 }
1311
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001312 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001313 VkPhysicalDeviceDescriptorIndexingProperties descriptor_indexing_prop;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001314 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1315 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1316 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1317 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1318 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1319 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1320 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1321 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1322 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1323 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1324 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1325 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1326 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1327 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1328 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1329 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1330 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1331 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1332 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1333 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1334 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1335 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1336 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1337 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1338 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1339 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1340 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
1341 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1342 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
1343 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
1344 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
1345 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
1346 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
1347 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
1348 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1349 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1350 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
1351 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
1352 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1353 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
1354 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
1355 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
1356 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
1357 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
1358 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
1359 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
1360 }
1361
locke-lunargd556cc32019-09-17 01:21:23 -06001362 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1363 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1364 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1365 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001366
1367 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001368 VkPhysicalDeviceDepthStencilResolveProperties depth_stencil_resolve_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001369 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
1370 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
1371 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
1372 depth_stencil_resolve_props.supportedStencilResolveModes;
1373 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
1374 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
1375 }
1376
locke-lunargd556cc32019-09-17 01:21:23 -06001377 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001378 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
sourav parmarcd5fb182020-07-17 12:58:44 -07001379 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing_pipeline, &phys_dev_props->ray_tracing_propsKHR);
1380 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_acceleration_structure, &phys_dev_props->acc_structure_props);
locke-lunargd556cc32019-09-17 01:21:23 -06001381 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1382 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
Mike Schuchardtc57de4a2021-07-20 17:26:32 -07001383 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map2, &phys_dev_props->fragment_density_map2_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001384 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
sfricke-samsung8f658d42020-05-03 20:12:24 -07001385 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_sample_locations, &phys_dev_props->sample_locations_props);
Tony-LunarG7337b312020-04-15 16:40:25 -06001386 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_custom_border_color, &phys_dev_props->custom_border_color_props);
locke-lunarg3fa463a2020-10-23 16:39:04 -06001387 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_multiview, &phys_dev_props->multiview_props);
Nathaniel Cesario3291c912020-11-17 16:54:41 -07001388 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_portability_subset, &phys_dev_props->portability_props);
Locke Lin016d8482021-05-27 12:11:31 -06001389 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_provoking_vertex, &phys_dev_props->provoking_vertex_props);
Tony-LunarG4490de42021-06-21 15:49:19 -06001390 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_multi_draw, &phys_dev_props->multi_draw_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001391
1392 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001393 VkPhysicalDeviceTimelineSemaphoreProperties timeline_semaphore_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001394 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
1395 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
1396 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
1397 }
1398
1399 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001400 VkPhysicalDeviceFloatControlsProperties float_controls_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001401 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
1402 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
1403 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
1404 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
1405 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
1406 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
1407 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
1408 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
1409 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
1410 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
1411 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
1412 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
1413 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
1414 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
1415 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
1416 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
1417 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
1418 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
1419 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
1420 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
1421 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
1422 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07001423
locke-lunargd556cc32019-09-17 01:21:23 -06001424 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1425 // Get the needed cooperative_matrix properties
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07001426 auto cooperative_matrix_props = LvlInitStruct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1427 auto prop2 = LvlInitStruct<VkPhysicalDeviceProperties2>(&cooperative_matrix_props);
locke-lunargd556cc32019-09-17 01:21:23 -06001428 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1429 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1430
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001431 uint32_t num_cooperative_matrix_properties = 0;
1432 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &num_cooperative_matrix_properties, NULL);
1433 state_tracker->cooperative_matrix_properties.resize(num_cooperative_matrix_properties,
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07001434 LvlInitStruct<VkCooperativeMatrixPropertiesNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06001435
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001436 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &num_cooperative_matrix_properties,
locke-lunargd556cc32019-09-17 01:21:23 -06001437 state_tracker->cooperative_matrix_properties.data());
1438 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001439 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06001440 // Get the needed subgroup limits
Locke Lin016d8482021-05-27 12:11:31 -06001441 auto subgroup_prop = LvlInitStruct<VkPhysicalDeviceSubgroupProperties>();
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07001442 auto prop2 = LvlInitStruct<VkPhysicalDeviceProperties2>(&subgroup_prop);
locke-lunargd556cc32019-09-17 01:21:23 -06001443 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1444
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001445 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
1446 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
1447 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
1448 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06001449 }
1450
Tobias Hector6663c9b2020-11-05 10:18:02 +00001451 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_fragment_shading_rate, &phys_dev_props->fragment_shading_rate_props);
1452
locke-lunargd556cc32019-09-17 01:21:23 -06001453 // Store queue family data
1454 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1455 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001456 const VkDeviceQueueCreateInfo &queue_create_info = pCreateInfo->pQueueCreateInfos[i];
sfricke-samsungb585ec12021-05-06 03:10:13 -07001457 state_tracker->queue_family_index_set.insert(queue_create_info.queueFamilyIndex);
1458 state_tracker->device_queue_info_list.push_back(
1459 {i, queue_create_info.queueFamilyIndex, queue_create_info.flags, queue_create_info.queueCount});
locke-lunargd556cc32019-09-17 01:21:23 -06001460 }
1461 }
1462}
1463
1464void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1465 if (!device) return;
1466
locke-lunargd556cc32019-09-17 01:21:23 -06001467 // Reset all command buffers before destroying them, to unlink object_bindings.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001468 for (auto &command_buffer : commandBufferMap) {
1469 ResetCommandBufferState(command_buffer.first);
locke-lunargd556cc32019-09-17 01:21:23 -06001470 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001471 pipelineMap.clear();
1472 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001473 commandBufferMap.clear();
1474
1475 // This will also delete all sets in the pool & remove them from setMap
1476 DeleteDescriptorSetPools();
1477 // All sets should be removed
1478 assert(setMap.empty());
1479 descriptorSetLayoutMap.clear();
1480 imageViewMap.clear();
1481 imageMap.clear();
1482 bufferViewMap.clear();
1483 bufferMap.clear();
1484 // Queues persist until device is destroyed
1485 queueMap.clear();
1486}
1487
locke-lunargd556cc32019-09-17 01:21:23 -06001488// Track which resources are in-flight by atomically incrementing their "in_use" count
1489void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1490 cb_node->submitCount++;
locke-lunargd556cc32019-09-17 01:21:23 -06001491
locke-lunargd556cc32019-09-17 01:21:23 -06001492 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1493 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1494 // should then be flagged prior to calling this function
1495 for (auto event : cb_node->writeEventsBeforeWait) {
1496 auto event_state = GetEventState(event);
1497 if (event_state) event_state->write_in_use++;
1498 }
1499}
1500
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001501void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
Jeremy Gebbencbf22862021-03-03 12:01:22 -07001502 layer_data::unordered_map<VkQueue, uint64_t> other_queue_seqs;
1503 layer_data::unordered_map<VkSemaphore, uint64_t> timeline_semaphore_counters;
locke-lunargd556cc32019-09-17 01:21:23 -06001504
1505 // Roll this queue forward, one submission at a time.
1506 while (pQueue->seq < seq) {
1507 auto &submission = pQueue->submissions.front();
1508
1509 for (auto &wait : submission.waitSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001510 auto semaphore_state = GetSemaphoreState(wait.semaphore);
1511 if (semaphore_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001512 semaphore_state->EndUse();
locke-lunargd556cc32019-09-17 01:21:23 -06001513 }
Mike Schuchardt2df08912020-12-15 16:28:09 -08001514 if (wait.type == VK_SEMAPHORE_TYPE_TIMELINE) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001515 auto &last_counter = timeline_semaphore_counters[wait.semaphore];
1516 last_counter = std::max(last_counter, wait.payload);
Marshall Drew-Brook03847582020-11-06 15:10:45 -08001517 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001518 auto &last_seq = other_queue_seqs[wait.queue];
1519 last_seq = std::max(last_seq, wait.seq);
Marshall Drew-Brook03847582020-11-06 15:10:45 -08001520 }
locke-lunargd556cc32019-09-17 01:21:23 -06001521 }
1522
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001523 for (auto &signal : submission.signalSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001524 auto semaphore_state = GetSemaphoreState(signal.semaphore);
1525 if (semaphore_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001526 semaphore_state->EndUse();
Mike Schuchardt2df08912020-12-15 16:28:09 -08001527 if (semaphore_state->type == VK_SEMAPHORE_TYPE_TIMELINE && semaphore_state->payload < signal.payload) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001528 semaphore_state->payload = signal.payload;
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001529 }
locke-lunargd556cc32019-09-17 01:21:23 -06001530 }
1531 }
1532
1533 for (auto &semaphore : submission.externalSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001534 auto semaphore_state = GetSemaphoreState(semaphore);
1535 if (semaphore_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001536 semaphore_state->EndUse();
locke-lunargd556cc32019-09-17 01:21:23 -06001537 }
1538 }
1539
1540 for (auto cb : submission.cbs) {
1541 auto cb_node = GetCBState(cb);
1542 if (!cb_node) {
1543 continue;
1544 }
1545 // First perform decrement on general case bound objects
locke-lunargd556cc32019-09-17 01:21:23 -06001546 for (auto event : cb_node->writeEventsBeforeWait) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001547 auto event_node = eventMap.find(event);
1548 if (event_node != eventMap.end()) {
John Zulauf48057322020-12-02 11:59:31 -07001549 event_node->second->write_in_use--;
locke-lunargd556cc32019-09-17 01:21:23 -06001550 }
1551 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001552 QueryMap local_query_to_state_map;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001553 VkQueryPool first_pool = VK_NULL_HANDLE;
Jeff Bolz310775c2019-10-09 00:46:33 -05001554 for (auto &function : cb_node->queryUpdates) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001555 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &local_query_to_state_map);
Jeff Bolz310775c2019-10-09 00:46:33 -05001556 }
1557
John Zulauf79f06582021-02-27 18:38:39 -07001558 for (const auto &query_state_pair : local_query_to_state_map) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001559 if (query_state_pair.second == QUERYSTATE_ENDED) {
1560 queryToStateMap[query_state_pair.first] = QUERYSTATE_AVAILABLE;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001561 }
locke-lunargd556cc32019-09-17 01:21:23 -06001562 }
Jeremy Gebben5570abe2021-05-16 18:35:13 -06001563 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
1564 cb_node->EndUse();
1565 }
locke-lunargd556cc32019-09-17 01:21:23 -06001566 }
1567
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001568 auto fence_state = GetFenceState(submission.fence);
1569 if (fence_state && fence_state->scope == kSyncScopeInternal) {
1570 fence_state->state = FENCE_RETIRED;
locke-lunargd556cc32019-09-17 01:21:23 -06001571 }
1572
1573 pQueue->submissions.pop_front();
1574 pQueue->seq++;
1575 }
1576
1577 // Roll other queues forward to the highest seq we saw a wait for
John Zulauf79f06582021-02-27 18:38:39 -07001578 for (const auto &qs : other_queue_seqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001579 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001580 }
John Zulauf79f06582021-02-27 18:38:39 -07001581 for (const auto &sc : timeline_semaphore_counters) {
Marshall Drew-Brook03847582020-11-06 15:10:45 -08001582 RetireTimelineSemaphore(sc.first, sc.second);
1583 }
locke-lunargd556cc32019-09-17 01:21:23 -06001584}
1585
1586// Submit a fence to a queue, delimiting previous fences and previous untracked
1587// work by it.
1588static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1589 pFence->state = FENCE_INFLIGHT;
1590 pFence->signaler.first = pQueue->queue;
1591 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1592}
1593
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001594uint64_t ValidationStateTracker::RecordSubmitFence(QUEUE_STATE *queue_state, VkFence fence, uint32_t submit_count) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001595 auto fence_state = GetFenceState(fence);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001596 uint64_t early_retire_seq = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001597 if (fence_state) {
1598 if (fence_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06001599 // Mark fence in use
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001600 SubmitFence(queue_state, fence_state, std::max(1u, submit_count));
1601 if (!submit_count) {
locke-lunargd556cc32019-09-17 01:21:23 -06001602 // If no submissions, but just dropping a fence on the end of the queue,
1603 // record an empty submission with just the fence, so we can determine
1604 // its completion.
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001605 CB_SUBMISSION submission;
1606 submission.fence = fence;
1607 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06001608 }
1609 } else {
1610 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001611 early_retire_seq = queue_state->seq + queue_state->submissions.size();
locke-lunargd556cc32019-09-17 01:21:23 -06001612 }
1613 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001614 return early_retire_seq;
1615}
1616
1617void ValidationStateTracker::RecordSubmitCommandBuffer(CB_SUBMISSION &submission, VkCommandBuffer command_buffer) {
1618 auto cb_node = GetCBState(command_buffer);
1619 if (cb_node) {
1620 submission.cbs.push_back(command_buffer);
John Zulauf79f06582021-02-27 18:38:39 -07001621 for (auto *secondary_cmd_buffer : cb_node->linkedCommandBuffers) {
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06001622 submission.cbs.push_back(secondary_cmd_buffer->commandBuffer());
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001623 IncrementResources(secondary_cmd_buffer);
1624 }
1625 IncrementResources(cb_node);
Jeremy Gebben5570abe2021-05-16 18:35:13 -06001626 // increment use count for all bound objects including secondary cbs
1627 cb_node->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001628
1629 VkQueryPool first_pool = VK_NULL_HANDLE;
1630 EventToStageMap local_event_to_stage_map;
1631 QueryMap local_query_to_state_map;
1632 for (auto &function : cb_node->queryUpdates) {
1633 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &local_query_to_state_map);
1634 }
1635
John Zulauf79f06582021-02-27 18:38:39 -07001636 for (const auto &query_state_pair : local_query_to_state_map) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001637 queryToStateMap[query_state_pair.first] = query_state_pair.second;
1638 }
1639
John Zulauf79f06582021-02-27 18:38:39 -07001640 for (const auto &function : cb_node->eventUpdates) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001641 function(nullptr, /*do_validate*/ false, &local_event_to_stage_map);
1642 }
1643
John Zulauf79f06582021-02-27 18:38:39 -07001644 for (const auto &eventStagePair : local_event_to_stage_map) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001645 eventMap[eventStagePair.first]->stageMask = eventStagePair.second;
1646 }
1647 }
1648}
1649
1650void ValidationStateTracker::RecordSubmitWaitSemaphore(CB_SUBMISSION &submission, VkQueue queue, VkSemaphore semaphore,
1651 uint64_t value, uint64_t next_seq) {
1652 auto semaphore_state = GetSemaphoreState(semaphore);
1653 if (semaphore_state) {
1654 if (semaphore_state->scope == kSyncScopeInternal) {
1655 SEMAPHORE_WAIT wait;
1656 wait.semaphore = semaphore;
1657 wait.type = semaphore_state->type;
1658 if (semaphore_state->type == VK_SEMAPHORE_TYPE_BINARY) {
1659 if (semaphore_state->signaler.first != VK_NULL_HANDLE) {
1660 wait.queue = semaphore_state->signaler.first;
1661 wait.seq = semaphore_state->signaler.second;
1662 submission.waitSemaphores.emplace_back(std::move(wait));
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001663 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001664 }
1665 semaphore_state->signaler.first = VK_NULL_HANDLE;
1666 semaphore_state->signaled = false;
1667 } else if (semaphore_state->payload < value) {
1668 wait.queue = queue;
1669 wait.seq = next_seq;
1670 wait.payload = value;
1671 submission.waitSemaphores.emplace_back(std::move(wait));
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001672 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001673 }
1674 } else {
1675 submission.externalSemaphores.push_back(semaphore);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001676 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001677 if (semaphore_state->scope == kSyncScopeExternalTemporary) {
1678 semaphore_state->scope = kSyncScopeInternal;
1679 }
1680 }
1681 }
1682}
1683
1684bool ValidationStateTracker::RecordSubmitSignalSemaphore(CB_SUBMISSION &submission, VkQueue queue, VkSemaphore semaphore,
1685 uint64_t value, uint64_t next_seq) {
1686 bool retire_early = false;
1687 auto semaphore_state = GetSemaphoreState(semaphore);
1688 if (semaphore_state) {
1689 if (semaphore_state->scope == kSyncScopeInternal) {
1690 SEMAPHORE_SIGNAL signal;
1691 signal.semaphore = semaphore;
1692 signal.seq = next_seq;
1693 if (semaphore_state->type == VK_SEMAPHORE_TYPE_BINARY) {
1694 semaphore_state->signaler.first = queue;
1695 semaphore_state->signaler.second = next_seq;
1696 semaphore_state->signaled = true;
1697 } else {
1698 signal.payload = value;
1699 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001700 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001701 submission.signalSemaphores.emplace_back(std::move(signal));
1702 } else {
1703 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1704 retire_early = true;
1705 }
1706 }
1707 return retire_early;
1708}
1709
1710void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1711 VkFence fence, VkResult result) {
1712 if (result != VK_SUCCESS) return;
1713 auto queue_state = GetQueueState(queue);
1714
1715 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, submitCount);
locke-lunargd556cc32019-09-17 01:21:23 -06001716
1717 // Now process each individual submit
1718 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001719 CB_SUBMISSION submission;
locke-lunargd556cc32019-09-17 01:21:23 -06001720 const VkSubmitInfo *submit = &pSubmits[submit_idx];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001721 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001722 auto *timeline_semaphore_submit = LvlFindInChain<VkTimelineSemaphoreSubmitInfo>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001723 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
Jeremy Gebben4ae5cc72021-03-10 15:34:44 -07001724 uint64_t value = 0;
1725 if (timeline_semaphore_submit && timeline_semaphore_submit->pWaitSemaphoreValues != nullptr &&
1726 (i < timeline_semaphore_submit->waitSemaphoreValueCount)) {
1727 value = timeline_semaphore_submit->pWaitSemaphoreValues[i];
1728 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001729 RecordSubmitWaitSemaphore(submission, queue, submit->pWaitSemaphores[i], value, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001730 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001731
1732 bool retire_early = false;
locke-lunargd556cc32019-09-17 01:21:23 -06001733 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
Jeremy Gebben4ae5cc72021-03-10 15:34:44 -07001734 uint64_t value = 0;
1735 if (timeline_semaphore_submit && timeline_semaphore_submit->pSignalSemaphoreValues != nullptr &&
1736 (i < timeline_semaphore_submit->signalSemaphoreValueCount)) {
1737 value = timeline_semaphore_submit->pSignalSemaphoreValues[i];
1738 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001739 retire_early |= RecordSubmitSignalSemaphore(submission, queue, submit->pSignalSemaphores[i], value, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001740 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001741 if (retire_early) {
1742 early_retire_seq = std::max(early_retire_seq, next_seq);
1743 }
1744
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001745 const auto perf_submit = LvlFindInChain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001746 submission.perf_submit_pass = perf_submit ? perf_submit->counterPassIndex : 0;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001747
locke-lunargd556cc32019-09-17 01:21:23 -06001748 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001749 RecordSubmitCommandBuffer(submission, submit->pCommandBuffers[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06001750 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001751 submission.fence = submit_idx == (submitCount - 1) ? fence : VK_NULL_HANDLE;
1752 queue_state->submissions.emplace_back(std::move(submission));
1753 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001754
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001755 if (early_retire_seq) {
1756 RetireWorkOnQueue(queue_state, early_retire_seq);
1757 }
1758}
1759
1760void ValidationStateTracker::PostCallRecordQueueSubmit2KHR(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR *pSubmits,
1761 VkFence fence, VkResult result) {
1762 if (result != VK_SUCCESS) return;
1763 auto queue_state = GetQueueState(queue);
1764
1765 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, submitCount);
1766
1767 // Now process each individual submit
1768 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1769 CB_SUBMISSION submission;
1770 const VkSubmitInfo2KHR *submit = &pSubmits[submit_idx];
1771 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
1772 for (uint32_t i = 0; i < submit->waitSemaphoreInfoCount; ++i) {
1773 const auto &sem_info = submit->pWaitSemaphoreInfos[i];
1774 RecordSubmitWaitSemaphore(submission, queue, sem_info.semaphore, sem_info.value, next_seq);
1775 }
1776 bool retire_early = false;
1777 for (uint32_t i = 0; i < submit->signalSemaphoreInfoCount; ++i) {
1778 const auto &sem_info = submit->pSignalSemaphoreInfos[i];
1779 retire_early |= RecordSubmitSignalSemaphore(submission, queue, sem_info.semaphore, sem_info.value, next_seq);
1780 }
1781 if (retire_early) {
1782 early_retire_seq = std::max(early_retire_seq, next_seq);
1783 }
1784 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
1785 submission.perf_submit_pass = perf_submit ? perf_submit->counterPassIndex : 0;
1786
1787 for (uint32_t i = 0; i < submit->commandBufferInfoCount; i++) {
1788 RecordSubmitCommandBuffer(submission, submit->pCommandBufferInfos[i].commandBuffer);
1789 }
1790 submission.fence = submit_idx == (submitCount - 1) ? fence : VK_NULL_HANDLE;
1791 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06001792 }
1793
1794 if (early_retire_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001795 RetireWorkOnQueue(queue_state, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001796 }
1797}
1798
1799void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1800 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1801 VkResult result) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06001802 if (VK_SUCCESS != result) {
1803 return;
locke-lunargd556cc32019-09-17 01:21:23 -06001804 }
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06001805 const auto &memory_type = phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex];
1806 const auto &memory_heap = phys_dev_mem_props.memoryHeaps[memory_type.heapIndex];
1807 auto fake_address = fake_memory.Alloc(pAllocateInfo->allocationSize);
1808
1809 layer_data::optional<DedicatedBinding> dedicated_binding;
1810
1811 auto dedicated = LvlFindInChain<VkMemoryDedicatedAllocateInfo>(pAllocateInfo->pNext);
1812 if (dedicated) {
1813 if (dedicated->buffer) {
1814 const auto *buffer_state = GetBufferState(dedicated->buffer);
1815 assert(buffer_state);
1816 if (!buffer_state) {
1817 return;
1818 }
1819 dedicated_binding.emplace(dedicated->buffer, buffer_state->createInfo);
1820 } else if (dedicated->image) {
1821 const auto *image_state = GetImageState(dedicated->image);
1822 assert(image_state);
1823 if (!image_state) {
1824 return;
1825 }
1826 dedicated_binding.emplace(dedicated->image, image_state->createInfo);
1827 }
1828 }
1829 memObjMap[*pMemory] = std::make_shared<DEVICE_MEMORY_STATE>(*pMemory, pAllocateInfo, fake_address, memory_type, memory_heap,
1830 std::move(dedicated_binding));
locke-lunargd556cc32019-09-17 01:21:23 -06001831 return;
1832}
1833
1834void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1835 if (!mem) return;
1836 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06001837 if (!mem_info) return;
locke-lunargd556cc32019-09-17 01:21:23 -06001838 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001839 mem_info->Destroy();
John Zulauf79952712020-04-07 11:25:54 -06001840 fake_memory.Free(mem_info->fake_base_address);
locke-lunargd556cc32019-09-17 01:21:23 -06001841 memObjMap.erase(mem);
1842}
1843
1844void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
1845 VkFence fence, VkResult result) {
1846 if (result != VK_SUCCESS) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001847 auto queue_state = GetQueueState(queue);
locke-lunargd556cc32019-09-17 01:21:23 -06001848
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001849 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, bindInfoCount);
locke-lunargd556cc32019-09-17 01:21:23 -06001850
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001851 for (uint32_t bind_idx = 0; bind_idx < bindInfoCount; ++bind_idx) {
1852 const VkBindSparseInfo &bind_info = pBindInfo[bind_idx];
locke-lunargd556cc32019-09-17 01:21:23 -06001853 // Track objects tied to memory
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001854 for (uint32_t j = 0; j < bind_info.bufferBindCount; j++) {
1855 for (uint32_t k = 0; k < bind_info.pBufferBinds[j].bindCount; k++) {
1856 auto sparse_binding = bind_info.pBufferBinds[j].pBinds[k];
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001857 auto buffer_state = GetBufferState(bind_info.pBufferBinds[j].buffer);
1858 auto mem_state = GetDevMemShared(sparse_binding.memory);
1859 if (buffer_state && mem_state) {
1860 buffer_state->SetSparseMemBinding(mem_state, sparse_binding.memoryOffset, sparse_binding.size);
1861 }
locke-lunargd556cc32019-09-17 01:21:23 -06001862 }
1863 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001864 for (uint32_t j = 0; j < bind_info.imageOpaqueBindCount; j++) {
1865 for (uint32_t k = 0; k < bind_info.pImageOpaqueBinds[j].bindCount; k++) {
1866 auto sparse_binding = bind_info.pImageOpaqueBinds[j].pBinds[k];
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001867 auto image_state = GetImageState(bind_info.pImageOpaqueBinds[j].image);
1868 auto mem_state = GetDevMemShared(sparse_binding.memory);
1869 if (image_state && mem_state) {
1870 image_state->SetSparseMemBinding(mem_state, sparse_binding.memoryOffset, sparse_binding.size);
1871 }
locke-lunargd556cc32019-09-17 01:21:23 -06001872 }
1873 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001874 for (uint32_t j = 0; j < bind_info.imageBindCount; j++) {
1875 for (uint32_t k = 0; k < bind_info.pImageBinds[j].bindCount; k++) {
1876 auto sparse_binding = bind_info.pImageBinds[j].pBinds[k];
locke-lunargd556cc32019-09-17 01:21:23 -06001877 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
1878 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001879 auto image_state = GetImageState(bind_info.pImageBinds[j].image);
1880 auto mem_state = GetDevMemShared(sparse_binding.memory);
1881 if (image_state && mem_state) {
1882 image_state->SetSparseMemBinding(mem_state, sparse_binding.memoryOffset, size);
1883 }
locke-lunargd556cc32019-09-17 01:21:23 -06001884 }
1885 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001886 CB_SUBMISSION submission;
1887 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001888 for (uint32_t i = 0; i < bind_info.waitSemaphoreCount; ++i) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001889 RecordSubmitWaitSemaphore(submission, queue, bind_info.pWaitSemaphores[i], 0, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001890 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001891 bool retire_early = false;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001892 for (uint32_t i = 0; i < bind_info.signalSemaphoreCount; ++i) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001893 retire_early |= RecordSubmitSignalSemaphore(submission, queue, bind_info.pSignalSemaphores[i], 0, next_seq);
1894 }
1895 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1896 if (retire_early) {
1897 early_retire_seq = std::max(early_retire_seq, queue_state->seq + queue_state->submissions.size() + 1);
locke-lunargd556cc32019-09-17 01:21:23 -06001898 }
1899
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001900 submission.fence = bind_idx == (bindInfoCount - 1) ? fence : VK_NULL_HANDLE;
1901 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06001902 }
1903
1904 if (early_retire_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001905 RetireWorkOnQueue(queue_state, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001906 }
1907}
1908
1909void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
1910 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
1911 VkResult result) {
1912 if (VK_SUCCESS != result) return;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001913 semaphoreMap[*pSemaphore] = std::make_shared<SEMAPHORE_STATE>(*pSemaphore, LvlFindInChain<VkSemaphoreTypeCreateInfo>(pCreateInfo->pNext));
locke-lunargd556cc32019-09-17 01:21:23 -06001914}
1915
Mike Schuchardt2df08912020-12-15 16:28:09 -08001916void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBits handle_type,
1917 VkSemaphoreImportFlags flags) {
locke-lunargd556cc32019-09-17 01:21:23 -06001918 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
1919 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001920 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) &&
locke-lunargd556cc32019-09-17 01:21:23 -06001921 sema_node->scope == kSyncScopeInternal) {
1922 sema_node->scope = kSyncScopeExternalTemporary;
1923 } else {
1924 sema_node->scope = kSyncScopeExternalPermanent;
1925 }
1926 }
1927}
1928
Mike Schuchardt2df08912020-12-15 16:28:09 -08001929void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfo *pSignalInfo,
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001930 VkResult result) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001931 auto *semaphore_state = GetSemaphoreState(pSignalInfo->semaphore);
1932 semaphore_state->payload = pSignalInfo->value;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001933}
1934
locke-lunargd556cc32019-09-17 01:21:23 -06001935void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
1936 auto mem_info = GetDevMemState(mem);
1937 if (mem_info) {
1938 mem_info->mapped_range.offset = offset;
1939 mem_info->mapped_range.size = size;
1940 mem_info->p_driver_data = *ppData;
1941 }
1942}
1943
1944void ValidationStateTracker::RetireFence(VkFence fence) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001945 auto fence_state = GetFenceState(fence);
1946 if (fence_state && fence_state->scope == kSyncScopeInternal) {
1947 if (fence_state->signaler.first != VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -06001948 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001949 RetireWorkOnQueue(GetQueueState(fence_state->signaler.first), fence_state->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001950 } else {
1951 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
1952 // the fence as retired.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001953 fence_state->state = FENCE_RETIRED;
locke-lunargd556cc32019-09-17 01:21:23 -06001954 }
1955 }
1956}
1957
1958void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
1959 VkBool32 waitAll, uint64_t timeout, VkResult result) {
1960 if (VK_SUCCESS != result) return;
1961
1962 // When we know that all fences are complete we can clean/remove their CBs
1963 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
1964 for (uint32_t i = 0; i < fenceCount; i++) {
1965 RetireFence(pFences[i]);
1966 }
1967 }
1968 // NOTE : Alternate case not handled here is when some fences have completed. In
1969 // this case for app to guarantee which fences completed it will have to call
1970 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
1971}
1972
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001973void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001974 auto semaphore_state = GetSemaphoreState(semaphore);
1975 if (semaphore_state) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001976 for (auto &pair : queueMap) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001977 QUEUE_STATE &queue_state = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06001978 uint64_t max_seq = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001979 for (const auto &submission : queue_state.submissions) {
1980 for (const auto &signal_semaphore : submission.signalSemaphores) {
1981 if (signal_semaphore.semaphore == semaphore && signal_semaphore.payload <= until_payload) {
1982 if (signal_semaphore.seq > max_seq) {
1983 max_seq = signal_semaphore.seq;
Tony-LunarG47d5e272020-04-07 15:35:55 -06001984 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001985 }
1986 }
1987 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06001988 if (max_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001989 RetireWorkOnQueue(&queue_state, max_seq);
Tony-LunarG47d5e272020-04-07 15:35:55 -06001990 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001991 }
1992 }
1993}
1994
John Zulauff89de662020-04-13 18:57:34 -06001995void ValidationStateTracker::RecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
1996 VkResult result) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001997 if (VK_SUCCESS != result) return;
1998
1999 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
2000 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
2001 }
2002}
2003
John Zulauff89de662020-04-13 18:57:34 -06002004void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2005 VkResult result) {
2006 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2007}
2008
2009void ValidationStateTracker::PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo,
2010 uint64_t timeout, VkResult result) {
2011 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2012}
2013
Adrian Coca Lorentec7d76102020-09-28 13:58:16 +02002014void ValidationStateTracker::RecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2015 VkResult result) {
2016 if (VK_SUCCESS != result) return;
2017
2018 RetireTimelineSemaphore(semaphore, *pValue);
2019}
2020
2021void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2022 VkResult result) {
2023 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2024}
2025void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2026 VkResult result) {
2027 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2028}
2029
locke-lunargd556cc32019-09-17 01:21:23 -06002030void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
2031 if (VK_SUCCESS != result) return;
2032 RetireFence(fence);
2033}
2034
2035void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
Jeremy Gebben5573a8c2021-06-04 08:55:10 -06002036 queueMap.emplace(queue, QUEUE_STATE(queue, queue_family_index));
locke-lunargd556cc32019-09-17 01:21:23 -06002037}
2038
2039void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2040 VkQueue *pQueue) {
2041 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2042}
2043
2044void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2045 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2046}
2047
2048void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2049 if (VK_SUCCESS != result) return;
2050 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002051 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002052}
2053
2054void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2055 if (VK_SUCCESS != result) return;
2056 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002057 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002058 }
2059}
2060
2061void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2062 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002063 auto fence_state = GetFenceState(fence);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002064 fence_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002065 fenceMap.erase(fence);
2066}
2067
2068void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2069 const VkAllocationCallbacks *pAllocator) {
2070 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002071 auto semaphore_state = GetSemaphoreState(semaphore);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002072 semaphore_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002073 semaphoreMap.erase(semaphore);
2074}
2075
2076void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2077 if (!event) return;
John Zulauf48057322020-12-02 11:59:31 -07002078 EVENT_STATE *event_state = Get<EVENT_STATE>(event);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002079 event_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002080 eventMap.erase(event);
2081}
2082
2083void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2084 const VkAllocationCallbacks *pAllocator) {
2085 if (!queryPool) return;
2086 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002087 qp_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002088 queryPoolMap.erase(queryPool);
2089}
2090
locke-lunargd556cc32019-09-17 01:21:23 -06002091void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2092 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2093 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002094 // Track objects tied to memory
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002095 auto mem_state = GetDevMemShared(mem);
2096 if (mem_state) {
2097 buffer_state->SetMemBinding(mem_state, memoryOffset);
2098 }
locke-lunargd556cc32019-09-17 01:21:23 -06002099 }
2100}
2101
2102void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2103 VkDeviceSize memoryOffset, VkResult result) {
2104 if (VK_SUCCESS != result) return;
2105 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2106}
2107
2108void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002109 const VkBindBufferMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06002110 for (uint32_t i = 0; i < bindInfoCount; i++) {
2111 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2112 }
2113}
2114
2115void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002116 const VkBindBufferMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06002117 for (uint32_t i = 0; i < bindInfoCount; i++) {
2118 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2119 }
2120}
2121
Spencer Fricke6c127102020-04-16 06:25:20 -07002122void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer) {
locke-lunargd556cc32019-09-17 01:21:23 -06002123 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2124 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002125 buffer_state->memory_requirements_checked = true;
2126 }
2127}
2128
2129void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2130 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002131 RecordGetBufferMemoryRequirementsState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002132}
2133
2134void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002135 const VkBufferMemoryRequirementsInfo2 *pInfo,
2136 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002137 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002138}
2139
2140void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002141 const VkBufferMemoryRequirementsInfo2 *pInfo,
2142 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002143 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002144}
2145
Spencer Fricke6c127102020-04-16 06:25:20 -07002146void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002147 const VkImagePlaneMemoryRequirementsInfo *plane_info =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002148 (pInfo == nullptr) ? nullptr : LvlFindInChain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002149 IMAGE_STATE *image_state = GetImageState(image);
2150 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002151 if (plane_info != nullptr) {
2152 // Multi-plane image
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002153 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002154 image_state->memory_requirements_checked[0] = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002155 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002156 image_state->memory_requirements_checked[1] = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002157 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002158 image_state->memory_requirements_checked[2] = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002159 }
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002160 } else if (!image_state->disjoint) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002161 // Single Plane image
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06002162 image_state->memory_requirements_checked[0] = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002163 }
locke-lunargd556cc32019-09-17 01:21:23 -06002164 }
2165}
2166
2167void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2168 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002169 RecordGetImageMemoryRequirementsState(image, nullptr);
locke-lunargd556cc32019-09-17 01:21:23 -06002170}
2171
2172void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2173 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002174 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002175}
2176
2177void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2178 const VkImageMemoryRequirementsInfo2 *pInfo,
2179 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002180 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002181}
2182
2183static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2184 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2185 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2186 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2187 image_state->sparse_metadata_required = true;
2188 }
2189}
2190
2191void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2192 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2193 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2194 auto image_state = GetImageState(image);
2195 image_state->get_sparse_reqs_called = true;
2196 if (!pSparseMemoryRequirements) return;
2197 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2198 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2199 }
2200}
2201
2202void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
Mike Schuchardt2df08912020-12-15 16:28:09 -08002203 VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount,
2204 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements) {
locke-lunargd556cc32019-09-17 01:21:23 -06002205 auto image_state = GetImageState(pInfo->image);
2206 image_state->get_sparse_reqs_called = true;
2207 if (!pSparseMemoryRequirements) return;
2208 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2209 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2210 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2211 }
2212}
2213
2214void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08002215 VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount,
2216 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements) {
locke-lunargd556cc32019-09-17 01:21:23 -06002217 auto image_state = GetImageState(pInfo->image);
2218 image_state->get_sparse_reqs_called = true;
2219 if (!pSparseMemoryRequirements) return;
2220 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2221 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2222 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2223 }
2224}
2225
2226void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2227 const VkAllocationCallbacks *pAllocator) {
2228 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002229 auto shader_module_state = GetShaderModuleState(shaderModule);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002230 shader_module_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002231 shaderModuleMap.erase(shaderModule);
2232}
2233
2234void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2235 const VkAllocationCallbacks *pAllocator) {
2236 if (!pipeline) return;
2237 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
locke-lunargd556cc32019-09-17 01:21:23 -06002238 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002239 pipeline_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002240 pipelineMap.erase(pipeline);
2241}
2242
2243void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2244 const VkAllocationCallbacks *pAllocator) {
2245 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002246 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002247 pipeline_layout_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002248 pipelineLayoutMap.erase(pipelineLayout);
2249}
2250
2251void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2252 const VkAllocationCallbacks *pAllocator) {
2253 if (!sampler) return;
2254 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
locke-lunargd556cc32019-09-17 01:21:23 -06002255 // Any bound cmd buffers are now invalid
2256 if (sampler_state) {
Yuly Novikov424cdd52020-05-26 16:45:12 -04002257 if (sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2258 sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
2259 custom_border_color_sampler_count--;
2260 }
2261
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002262 sampler_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002263 }
2264 samplerMap.erase(sampler);
2265}
2266
2267void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2268 const VkAllocationCallbacks *pAllocator) {
2269 if (!descriptorSetLayout) return;
2270 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2271 if (layout_it != descriptorSetLayoutMap.end()) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002272 layout_it->second.get()->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002273 descriptorSetLayoutMap.erase(layout_it);
2274 }
2275}
2276
2277void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2278 const VkAllocationCallbacks *pAllocator) {
2279 if (!descriptorPool) return;
2280 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002281 if (desc_pool_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002282 // Free sets that were in this pool
John Zulauf79f06582021-02-27 18:38:39 -07002283 for (auto *ds : desc_pool_state->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06002284 FreeDescriptorSet(ds);
2285 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002286 desc_pool_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002287 descriptorPoolMap.erase(descriptorPool);
2288 }
2289}
2290
2291// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2292void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2293 const VkCommandBuffer *command_buffers) {
2294 for (uint32_t i = 0; i < command_buffer_count; i++) {
John Zulaufd1f85d42020-04-15 12:23:15 -06002295 // Allow any derived class to clean up command buffer state
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002296 if (command_buffer_reset_callback) {
2297 (*command_buffer_reset_callback)(command_buffers[i]);
2298 }
John Zulaufd1f85d42020-04-15 12:23:15 -06002299 if (command_buffer_free_callback) {
2300 (*command_buffer_free_callback)(command_buffers[i]);
2301 }
2302
locke-lunargd556cc32019-09-17 01:21:23 -06002303 auto cb_state = GetCBState(command_buffers[i]);
2304 // Remove references to command buffer's state and delete
2305 if (cb_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002306 // Remove the cb_state's references from COMMAND_POOL_STATEs
2307 pool_state->commandBuffers.erase(command_buffers[i]);
2308 // Remove the cb debug labels
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06002309 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer());
locke-lunargd556cc32019-09-17 01:21:23 -06002310 // Remove CBState from CB map
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002311 cb_state->Destroy();
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06002312 commandBufferMap.erase(cb_state->commandBuffer());
locke-lunargd556cc32019-09-17 01:21:23 -06002313 }
2314 }
2315}
2316
2317void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2318 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002319 auto pool = GetCommandPoolState(commandPool);
2320 FreeCommandBufferStates(pool, commandBufferCount, pCommandBuffers);
locke-lunargd556cc32019-09-17 01:21:23 -06002321}
2322
2323void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2324 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2325 VkResult result) {
2326 if (VK_SUCCESS != result) return;
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06002327 auto queue_flags = GetPhysicalDeviceState()->queue_family_properties[pCreateInfo->queueFamilyIndex].queueFlags;
2328 commandPoolMap[*pCommandPool] = std::make_shared<COMMAND_POOL_STATE>(*pCommandPool, pCreateInfo, queue_flags);
locke-lunargd556cc32019-09-17 01:21:23 -06002329}
2330
2331void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2332 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2333 VkResult result) {
2334 if (VK_SUCCESS != result) return;
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002335
2336 uint32_t index_count = 0, n_perf_pass = 0;
2337 bool has_cb = false, has_rb = false;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002338 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002339 const auto *perf = LvlFindInChain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002340 index_count = perf->counterIndexCount;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002341
Mark Lobodzinski7e948e42020-09-09 10:23:36 -06002342 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002343 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2344 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2345 switch (counter.scope) {
2346 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002347 has_cb = true;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002348 break;
2349 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002350 has_rb = true;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002351 break;
2352 default:
2353 break;
2354 }
2355 }
2356
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002357 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf, &n_perf_pass);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002358 }
2359
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002360 queryPoolMap[*pQueryPool] =
2361 std::make_shared<QUERY_POOL_STATE>(*pQueryPool, pCreateInfo, index_count, n_perf_pass, has_cb, has_rb);
locke-lunargd556cc32019-09-17 01:21:23 -06002362
2363 QueryObject query_obj{*pQueryPool, 0u};
2364 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2365 query_obj.query = i;
2366 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2367 }
2368}
2369
2370void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2371 const VkAllocationCallbacks *pAllocator) {
2372 if (!commandPool) return;
2373 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2374 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2375 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2376 if (cp_state) {
2377 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2378 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2379 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002380 cp_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002381 commandPoolMap.erase(commandPool);
2382 }
2383}
2384
2385void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2386 VkCommandPoolResetFlags flags, VkResult result) {
2387 if (VK_SUCCESS != result) return;
2388 // Reset all of the CBs allocated from this pool
2389 auto command_pool_state = GetCommandPoolState(commandPool);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002390 for (auto cmd_buffer : command_pool_state->commandBuffers) {
2391 ResetCommandBufferState(cmd_buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002392 }
2393}
2394
2395void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2396 VkResult result) {
2397 for (uint32_t i = 0; i < fenceCount; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002398 auto fence_state = GetFenceState(pFences[i]);
2399 if (fence_state) {
2400 if (fence_state->scope == kSyncScopeInternal) {
2401 fence_state->state = FENCE_UNSIGNALED;
2402 } else if (fence_state->scope == kSyncScopeExternalTemporary) {
2403 fence_state->scope = kSyncScopeInternal;
locke-lunargd556cc32019-09-17 01:21:23 -06002404 }
2405 }
2406 }
2407}
2408
locke-lunargd556cc32019-09-17 01:21:23 -06002409void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2410 const VkAllocationCallbacks *pAllocator) {
2411 if (!framebuffer) return;
2412 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002413 framebuffer_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002414 frameBufferMap.erase(framebuffer);
2415}
2416
2417void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2418 const VkAllocationCallbacks *pAllocator) {
2419 if (!renderPass) return;
2420 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002421 rp_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002422 renderPassMap.erase(renderPass);
2423}
2424
2425void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2426 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2427 if (VK_SUCCESS != result) return;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002428 fenceMap[*pFence] = std::make_shared<FENCE_STATE>(*pFence, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002429}
2430
2431bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2432 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2433 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002434 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002435 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2436 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2437 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2438 cgpl_state->pipe_state.reserve(count);
2439 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002440 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002441 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002442 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002443 }
2444 return false;
2445}
2446
2447void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2448 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2449 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2450 VkResult result, void *cgpl_state_data) {
2451 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2452 // This API may create pipelines regardless of the return value
2453 for (uint32_t i = 0; i < count; i++) {
2454 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002455 (cgpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002456 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2457 }
2458 }
2459 cgpl_state->pipe_state.clear();
2460}
2461
2462bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2463 const VkComputePipelineCreateInfo *pCreateInfos,
2464 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002465 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002466 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2467 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2468 ccpl_state->pipe_state.reserve(count);
2469 for (uint32_t i = 0; i < count; i++) {
2470 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002471 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002472 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002473 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002474 }
2475 return false;
2476}
2477
2478void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2479 const VkComputePipelineCreateInfo *pCreateInfos,
2480 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2481 VkResult result, void *ccpl_state_data) {
2482 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2483
2484 // This API may create pipelines regardless of the return value
2485 for (uint32_t i = 0; i < count; i++) {
2486 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002487 (ccpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002488 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2489 }
2490 }
2491 ccpl_state->pipe_state.clear();
2492}
2493
2494bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2495 uint32_t count,
2496 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2497 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002498 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002499 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2500 crtpl_state->pipe_state.reserve(count);
2501 for (uint32_t i = 0; i < count; i++) {
2502 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002503 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002504 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002505 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002506 }
2507 return false;
2508}
2509
2510void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2511 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2512 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2513 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2514 // This API may create pipelines regardless of the return value
2515 for (uint32_t i = 0; i < count; i++) {
2516 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002517 (crtpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002518 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2519 }
2520 }
2521 crtpl_state->pipe_state.clear();
2522}
2523
sourav parmarcd5fb182020-07-17 12:58:44 -07002524bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
2525 VkPipelineCache pipelineCache, uint32_t count,
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002526 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2527 const VkAllocationCallbacks *pAllocator,
2528 VkPipeline *pPipelines, void *crtpl_state_data) const {
2529 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2530 crtpl_state->pipe_state.reserve(count);
2531 for (uint32_t i = 0; i < count; i++) {
2532 // Create and initialize internal tracking data structure
2533 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
2534 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
2535 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
2536 }
2537 return false;
2538}
2539
sourav parmarcd5fb182020-07-17 12:58:44 -07002540void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
2541 VkPipelineCache pipelineCache, uint32_t count,
2542 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2543 const VkAllocationCallbacks *pAllocator,
2544 VkPipeline *pPipelines, VkResult result,
2545 void *crtpl_state_data) {
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002546 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2547 // This API may create pipelines regardless of the return value
2548 for (uint32_t i = 0; i < count; i++) {
2549 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002550 (crtpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002551 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2552 }
2553 }
2554 crtpl_state->pipe_state.clear();
2555}
2556
locke-lunargd556cc32019-09-17 01:21:23 -06002557void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2558 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2559 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002560 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002561 if (pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2562 pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
Tony-LunarG7337b312020-04-15 16:40:25 -06002563 custom_border_color_sampler_count++;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002564 }
locke-lunargd556cc32019-09-17 01:21:23 -06002565}
2566
2567void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2568 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2569 const VkAllocationCallbacks *pAllocator,
2570 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2571 if (VK_SUCCESS != result) return;
2572 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2573}
2574
2575// For repeatable sorting, not very useful for "memory in range" search
2576struct PushConstantRangeCompare {
2577 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2578 if (lhs->offset == rhs->offset) {
2579 if (lhs->size == rhs->size) {
2580 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2581 return lhs->stageFlags < rhs->stageFlags;
2582 }
2583 // If the offsets are the same then sorting by the end of range is useful for validation
2584 return lhs->size < rhs->size;
2585 }
2586 return lhs->offset < rhs->offset;
2587 }
2588};
2589
2590static PushConstantRangesDict push_constant_ranges_dict;
2591
2592PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2593 if (!info->pPushConstantRanges) {
2594 // Hand back the empty entry (creating as needed)...
2595 return push_constant_ranges_dict.look_up(PushConstantRanges());
2596 }
2597
2598 // Sort the input ranges to ensure equivalent ranges map to the same id
2599 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2600 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2601 sorted.insert(info->pPushConstantRanges + i);
2602 }
2603
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07002604 PushConstantRanges ranges;
2605 ranges.reserve(sorted.size());
John Zulauf79f06582021-02-27 18:38:39 -07002606 for (const auto *range : sorted) {
locke-lunargd556cc32019-09-17 01:21:23 -06002607 ranges.emplace_back(*range);
2608 }
2609 return push_constant_ranges_dict.look_up(std::move(ranges));
2610}
2611
2612// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2613static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2614
2615// Dictionary of canonical form of the "compatible for set" records
2616static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2617
2618static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2619 const PipelineLayoutSetLayoutsId set_layouts_id) {
2620 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2621}
2622
2623void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2624 const VkAllocationCallbacks *pAllocator,
2625 VkPipelineLayout *pPipelineLayout, VkResult result) {
2626 if (VK_SUCCESS != result) return;
2627
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002628 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>(*pPipelineLayout);
locke-lunargd556cc32019-09-17 01:21:23 -06002629 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2630 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2631 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002632 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002633 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2634 }
2635
2636 // Get canonical form IDs for the "compatible for set" contents
2637 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
2638 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
2639 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
2640
2641 // Create table of "compatible for set N" cannonical forms for trivial accept validation
2642 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2643 pipeline_layout_state->compat_for_set.emplace_back(
2644 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
2645 }
2646 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
2647}
2648
2649void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
2650 const VkAllocationCallbacks *pAllocator,
2651 VkDescriptorPool *pDescriptorPool, VkResult result) {
2652 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002653 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002654}
2655
2656void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2657 VkDescriptorPoolResetFlags flags, VkResult result) {
2658 if (VK_SUCCESS != result) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002659 DESCRIPTOR_POOL_STATE *pool = GetDescriptorPoolState(descriptorPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002660 // TODO: validate flags
2661 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
John Zulauf79f06582021-02-27 18:38:39 -07002662 for (auto *ds : pool->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06002663 FreeDescriptorSet(ds);
2664 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002665 pool->sets.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06002666 // Reset available count for each type and available sets for this pool
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002667 for (auto it = pool->availableDescriptorTypeCount.begin(); it != pool->availableDescriptorTypeCount.end(); ++it) {
2668 pool->availableDescriptorTypeCount[it->first] = pool->maxDescriptorTypeCount[it->first];
locke-lunargd556cc32019-09-17 01:21:23 -06002669 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002670 pool->availableSets = pool->maxSets;
locke-lunargd556cc32019-09-17 01:21:23 -06002671}
2672
2673bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
2674 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002675 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002676 // Always update common data
2677 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2678 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2679 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
2680
2681 return false;
2682}
2683
2684// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
2685void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
2686 VkDescriptorSet *pDescriptorSets, VkResult result,
2687 void *ads_state_data) {
2688 if (VK_SUCCESS != result) return;
2689 // All the updates are contained in a single cvdescriptorset function
2690 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2691 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2692 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
2693}
2694
2695void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
2696 const VkDescriptorSet *pDescriptorSets) {
2697 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
2698 // Update available descriptor sets in pool
2699 pool_state->availableSets += count;
2700
2701 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
2702 for (uint32_t i = 0; i < count; ++i) {
2703 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
2704 auto descriptor_set = setMap[pDescriptorSets[i]].get();
2705 uint32_t type_index = 0, descriptor_count = 0;
2706 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
2707 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
2708 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
2709 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
2710 }
2711 FreeDescriptorSet(descriptor_set);
2712 pool_state->sets.erase(descriptor_set);
2713 }
2714 }
2715}
2716
2717void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
2718 const VkWriteDescriptorSet *pDescriptorWrites,
2719 uint32_t descriptorCopyCount,
2720 const VkCopyDescriptorSet *pDescriptorCopies) {
2721 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
2722 pDescriptorCopies);
2723}
2724
2725void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
2726 VkCommandBuffer *pCommandBuffer, VkResult result) {
2727 if (VK_SUCCESS != result) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002728 auto pool = GetCommandPoolShared(pCreateInfo->commandPool);
2729 if (pool) {
locke-lunargd556cc32019-09-17 01:21:23 -06002730 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
2731 // Add command buffer to its commandPool map
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002732 pool->commandBuffers.insert(pCommandBuffer[i]);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002733 auto cb_state = std::make_shared<CMD_BUFFER_STATE>(pCommandBuffer[i], pCreateInfo);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002734 cb_state->command_pool = pool;
2735 cb_state->unprotected = pool->unprotected;
locke-lunargd556cc32019-09-17 01:21:23 -06002736 // Add command buffer to map
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002737 commandBufferMap[pCommandBuffer[i]] = std::move(cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002738 ResetCommandBufferState(pCommandBuffer[i]);
2739 }
2740 }
2741}
2742
locke-lunargfc78e932020-11-19 17:06:24 -07002743void UpdateSubpassAttachments(const safe_VkSubpassDescription2 &subpass, std::vector<SUBPASS_INFO> &subpasses) {
2744 for (uint32_t index = 0; index < subpass.inputAttachmentCount; ++index) {
2745 const uint32_t attachment_index = subpass.pInputAttachments[index].attachment;
2746 if (attachment_index != VK_ATTACHMENT_UNUSED) {
2747 subpasses[attachment_index].used = true;
2748 subpasses[attachment_index].usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
2749 subpasses[attachment_index].layout = subpass.pInputAttachments[index].layout;
2750 }
2751 }
2752
2753 for (uint32_t index = 0; index < subpass.colorAttachmentCount; ++index) {
2754 const uint32_t attachment_index = subpass.pColorAttachments[index].attachment;
2755 if (attachment_index != VK_ATTACHMENT_UNUSED) {
2756 subpasses[attachment_index].used = true;
2757 subpasses[attachment_index].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2758 subpasses[attachment_index].layout = subpass.pColorAttachments[index].layout;
2759 }
2760 if (subpass.pResolveAttachments) {
2761 const uint32_t attachment_index2 = subpass.pResolveAttachments[index].attachment;
2762 if (attachment_index2 != VK_ATTACHMENT_UNUSED) {
2763 subpasses[attachment_index2].used = true;
2764 subpasses[attachment_index2].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2765 subpasses[attachment_index2].layout = subpass.pResolveAttachments[index].layout;
2766 }
2767 }
2768 }
2769
2770 if (subpass.pDepthStencilAttachment) {
2771 const uint32_t attachment_index = subpass.pDepthStencilAttachment->attachment;
2772 if (attachment_index != VK_ATTACHMENT_UNUSED) {
2773 subpasses[attachment_index].used = true;
2774 subpasses[attachment_index].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
2775 subpasses[attachment_index].layout = subpass.pDepthStencilAttachment->layout;
2776 }
2777 }
2778}
2779
2780void UpdateAttachmentsView(ValidationStateTracker &tracker, CMD_BUFFER_STATE &cb_state, const FRAMEBUFFER_STATE &framebuffer,
2781 const VkRenderPassBeginInfo *pRenderPassBegin) {
2782 auto &attachments = *(cb_state.active_attachments.get());
2783 const bool imageless = (framebuffer.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) ? true : false;
2784 const VkRenderPassAttachmentBeginInfo *attachment_info_struct = nullptr;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002785 if (pRenderPassBegin) attachment_info_struct = LvlFindInChain<VkRenderPassAttachmentBeginInfo>(pRenderPassBegin->pNext);
locke-lunargfc78e932020-11-19 17:06:24 -07002786
2787 for (uint32_t i = 0; i < attachments.size(); ++i) {
2788 if (imageless) {
2789 if (attachment_info_struct && i < attachment_info_struct->attachmentCount) {
2790 auto res = cb_state.attachments_view_states.insert(
2791 tracker.GetShared<IMAGE_VIEW_STATE>(attachment_info_struct->pAttachments[i]));
2792 attachments[i] = res.first->get();
2793 }
2794 } else {
2795 auto res = cb_state.attachments_view_states.insert(framebuffer.attachments_view_state[i]);
2796 attachments[i] = res.first->get();
2797 }
2798 }
2799}
2800
locke-lunargd556cc32019-09-17 01:21:23 -06002801void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
2802 const VkCommandBufferBeginInfo *pBeginInfo) {
2803 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2804 if (!cb_state) return;
locke-lunargfc78e932020-11-19 17:06:24 -07002805
locke-lunargd556cc32019-09-17 01:21:23 -06002806 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
2807 ResetCommandBufferState(commandBuffer);
2808 }
2809 // Set updated state here in case implicit reset occurs above
2810 cb_state->state = CB_RECORDING;
2811 cb_state->beginInfo = *pBeginInfo;
Tony-LunarG3c287f62020-12-17 12:39:49 -07002812 if (cb_state->beginInfo.pInheritanceInfo && (cb_state->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY)) {
locke-lunargd556cc32019-09-17 01:21:23 -06002813 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
2814 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
2815 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
2816 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
2817 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
locke-lunargaecf2152020-05-12 17:15:41 -06002818 cb_state->activeRenderPass = GetShared<RENDER_PASS_STATE>(cb_state->beginInfo.pInheritanceInfo->renderPass);
locke-lunargd556cc32019-09-17 01:21:23 -06002819 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
locke-lunargfc78e932020-11-19 17:06:24 -07002820
locke-lunargaecf2152020-05-12 17:15:41 -06002821 if (cb_state->beginInfo.pInheritanceInfo->framebuffer) {
2822 cb_state->activeFramebuffer = GetShared<FRAMEBUFFER_STATE>(cb_state->beginInfo.pInheritanceInfo->framebuffer);
locke-lunargfc78e932020-11-19 17:06:24 -07002823 cb_state->active_subpasses = nullptr;
2824 cb_state->active_attachments = nullptr;
2825
2826 if (cb_state->activeFramebuffer) {
2827 cb_state->framebuffers.insert(cb_state->activeFramebuffer);
2828
2829 // Set cb_state->active_subpasses
2830 cb_state->active_subpasses =
2831 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
2832 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
2833 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
2834
2835 // Set cb_state->active_attachments & cb_state->attachments_view_states
2836 cb_state->active_attachments =
2837 std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
2838 UpdateAttachmentsView(*this, *cb_state, *cb_state->activeFramebuffer, nullptr);
2839
2840 // Connect this framebuffer and its children to this cmdBuffer
Jeremy Gebben5570abe2021-05-16 18:35:13 -06002841 if (!disabled[command_buffer_state]) {
2842 cb_state->AddChild(cb_state->activeFramebuffer.get());
2843 }
locke-lunargfc78e932020-11-19 17:06:24 -07002844 }
locke-lunargaecf2152020-05-12 17:15:41 -06002845 }
David Zhao Akeley44139b12021-04-26 16:16:13 -07002846
2847 // Check for VkCommandBufferInheritanceViewportScissorInfoNV (VK_NV_inherited_viewport_scissor)
2848 auto p_inherited_viewport_scissor_info =
2849 LvlFindInChain<VkCommandBufferInheritanceViewportScissorInfoNV>(cb_state->beginInfo.pInheritanceInfo->pNext);
2850 if (p_inherited_viewport_scissor_info != nullptr && p_inherited_viewport_scissor_info->viewportScissor2D) {
2851 auto pViewportDepths = p_inherited_viewport_scissor_info->pViewportDepths;
2852 cb_state->inheritedViewportDepths.assign(
2853 pViewportDepths, pViewportDepths + p_inherited_viewport_scissor_info->viewportDepthCount);
2854 }
locke-lunargd556cc32019-09-17 01:21:23 -06002855 }
2856 }
2857
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002858 auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002859 if (chained_device_group_struct) {
2860 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
2861 } else {
2862 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
2863 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002864
2865 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06002866}
2867
2868void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
2869 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2870 if (!cb_state) return;
2871 // Cached validation is specific to a specific recording of a specific command buffer.
John Zulauf79f06582021-02-27 18:38:39 -07002872 for (auto *descriptor_set : cb_state->validated_descriptor_sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06002873 descriptor_set->ClearCachedValidation(cb_state);
2874 }
2875 cb_state->validated_descriptor_sets.clear();
2876 if (VK_SUCCESS == result) {
2877 cb_state->state = CB_RECORDED;
2878 }
2879}
2880
2881void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
2882 VkResult result) {
2883 if (VK_SUCCESS == result) {
2884 ResetCommandBufferState(commandBuffer);
2885 }
2886}
2887
2888CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
2889 // initially assume everything is static state
2890 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
2891
2892 if (ds) {
2893 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
locke-lunarg4189aa22020-10-21 00:23:48 -06002894 flags &= ~ConvertToCBStatusFlagBits(ds->pDynamicStates[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002895 }
2896 }
locke-lunargd556cc32019-09-17 01:21:23 -06002897 return flags;
2898}
2899
2900// Validation cache:
2901// CV is the bottommost implementor of this extension. Don't pass calls down.
2902// utility function to set collective state for pipeline
2903void SetPipelineState(PIPELINE_STATE *pPipe) {
2904 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
2905 if (pPipe->graphicsPipelineCI.pColorBlendState) {
2906 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
2907 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
2908 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2909 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2910 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2911 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2912 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2913 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2914 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2915 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
2916 pPipe->blendConstantsEnabled = true;
2917 }
2918 }
2919 }
2920 }
sfricke-samsung8f658d42020-05-03 20:12:24 -07002921 // Check if sample location is enabled
2922 if (pPipe->graphicsPipelineCI.pMultisampleState) {
2923 const VkPipelineSampleLocationsStateCreateInfoEXT *sample_location_state =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002924 LvlFindInChain<VkPipelineSampleLocationsStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pMultisampleState->pNext);
sfricke-samsung8f658d42020-05-03 20:12:24 -07002925 if (sample_location_state != nullptr) {
2926 pPipe->sample_location_enabled = sample_location_state->sampleLocationsEnable;
2927 }
2928 }
locke-lunargd556cc32019-09-17 01:21:23 -06002929}
2930
2931void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
2932 VkPipeline pipeline) {
2933 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2934 assert(cb_state);
2935
2936 auto pipe_state = GetPipelineState(pipeline);
2937 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002938 bool rasterization_enabled = VK_FALSE == pipe_state->graphicsPipelineCI.ptr()->pRasterizationState->rasterizerDiscardEnable;
David Zhao Akeley44139b12021-04-26 16:16:13 -07002939 const auto* viewport_state = pipe_state->graphicsPipelineCI.ptr()->pViewportState;
2940 const auto* dynamic_state = pipe_state->graphicsPipelineCI.ptr()->pDynamicState;
locke-lunargd556cc32019-09-17 01:21:23 -06002941 cb_state->status &= ~cb_state->static_status;
David Zhao Akeley44139b12021-04-26 16:16:13 -07002942 cb_state->static_status = MakeStaticStateMask(dynamic_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002943 cb_state->status |= cb_state->static_status;
locke-lunarg4189aa22020-10-21 00:23:48 -06002944 cb_state->dynamic_status = CBSTATUS_ALL_STATE_SET & (~cb_state->static_status);
David Zhao Akeley44139b12021-04-26 16:16:13 -07002945
2946 // Used to calculate CMD_BUFFER_STATE::usedViewportScissorCount upon draw command with this graphics pipeline.
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002947 // If rasterization disabled (no viewport/scissors used), or the actual number of viewports/scissors is dynamic (unknown at
2948 // this time), then these are set to 0 to disable this checking.
David Zhao Akeley44139b12021-04-26 16:16:13 -07002949 auto has_dynamic_viewport_count = cb_state->dynamic_status & CBSTATUS_VIEWPORT_WITH_COUNT_SET;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002950 auto has_dynamic_scissor_count = cb_state->dynamic_status & CBSTATUS_SCISSOR_WITH_COUNT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07002951 cb_state->pipelineStaticViewportCount =
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002952 has_dynamic_viewport_count || !rasterization_enabled ? 0 : viewport_state->viewportCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07002953 cb_state->pipelineStaticScissorCount =
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002954 has_dynamic_scissor_count || !rasterization_enabled ? 0 : viewport_state->scissorCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07002955
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002956 // Trash dynamic viewport/scissor state if pipeline defines static state and enabled rasterization.
David Zhao Akeley44139b12021-04-26 16:16:13 -07002957 // akeley98 NOTE: There's a bit of an ambiguity in the spec, whether binding such a pipeline overwrites
2958 // the entire viewport (scissor) array, or only the subsection defined by the viewport (scissor) count.
2959 // I am taking the latter interpretation based on the implementation details of NVIDIA's Vulkan driver.
David Zhao Akeley44139b12021-04-26 16:16:13 -07002960 if (!has_dynamic_viewport_count) {
2961 cb_state->trashedViewportCount = true;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002962 if (rasterization_enabled && (cb_state->static_status & CBSTATUS_VIEWPORT_SET)) {
David Zhao Akeley44139b12021-04-26 16:16:13 -07002963 cb_state->trashedViewportMask |= (uint32_t(1) << viewport_state->viewportCount) - 1u;
2964 // should become = ~uint32_t(0) if the other interpretation is correct.
2965 }
2966 }
2967 if (!has_dynamic_scissor_count) {
2968 cb_state->trashedScissorCount = true;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07002969 if (rasterization_enabled && (cb_state->static_status & CBSTATUS_SCISSOR_SET)) {
David Zhao Akeley44139b12021-04-26 16:16:13 -07002970 cb_state->trashedScissorMask |= (uint32_t(1) << viewport_state->scissorCount) - 1u;
2971 // should become = ~uint32_t(0) if the other interpretation is correct.
2972 }
2973 }
locke-lunargd556cc32019-09-17 01:21:23 -06002974 }
locke-lunargb8d7a7a2020-10-25 16:01:52 -06002975 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
2976 cb_state->lastBound[lv_bind_point].pipeline_state = pipe_state;
locke-lunargd556cc32019-09-17 01:21:23 -06002977 SetPipelineState(pipe_state);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002978 if (!disabled[command_buffer_state]) {
2979 cb_state->AddChild(pipe_state);
2980 }
locke-lunargb8be8222020-10-20 00:34:37 -06002981 for (auto &slot : pipe_state->active_slots) {
2982 for (auto &req : slot.second) {
2983 for (auto &sampler : req.second.samplers_used_by_image) {
2984 for (auto &des : sampler) {
2985 des.second = nullptr;
2986 }
2987 }
2988 }
2989 }
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -06002990 cb_state->lastBound[lv_bind_point].UpdateSamplerDescriptorsUsedByImage();
locke-lunargd556cc32019-09-17 01:21:23 -06002991}
2992
2993void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2994 uint32_t viewportCount, const VkViewport *pViewports) {
2995 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07002996 uint32_t bits = ((1u << viewportCount) - 1u) << firstViewport;
2997 cb_state->viewportMask |= bits;
2998 cb_state->trashedViewportMask &= ~bits;
locke-lunargd556cc32019-09-17 01:21:23 -06002999 cb_state->status |= CBSTATUS_VIEWPORT_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003000 cb_state->static_status &= ~CBSTATUS_VIEWPORT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003001
3002 cb_state->dynamicViewports.resize(std::max(size_t(firstViewport + viewportCount), cb_state->dynamicViewports.size()));
3003 for (size_t i = 0; i < viewportCount; ++i) {
3004 cb_state->dynamicViewports[firstViewport + i] = pViewports[i];
3005 }
locke-lunargd556cc32019-09-17 01:21:23 -06003006}
3007
3008void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
3009 uint32_t exclusiveScissorCount,
3010 const VkRect2D *pExclusiveScissors) {
3011 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3012 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
3013 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
3014 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003015 cb_state->static_status &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003016}
3017
3018void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3019 VkImageLayout imageLayout) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003020 if (disabled[command_buffer_state]) return;
3021
locke-lunargd556cc32019-09-17 01:21:23 -06003022 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3023
3024 if (imageView != VK_NULL_HANDLE) {
3025 auto view_state = GetImageViewState(imageView);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003026 cb_state->AddChild(view_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003027 }
3028}
3029
3030void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3031 uint32_t viewportCount,
3032 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3033 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3034 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3035 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3036 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003037 cb_state->static_status &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003038}
3039
3040void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3041 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3042 const VkAllocationCallbacks *pAllocator,
3043 VkAccelerationStructureNV *pAccelerationStructure,
3044 VkResult result) {
3045 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003046 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003047
3048 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003049 auto as_memory_requirements_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003050 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06003051 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure();
locke-lunargd556cc32019-09-17 01:21:23 -06003052 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3053
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003054 auto scratch_memory_req_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003055 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06003056 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure();
locke-lunargd556cc32019-09-17 01:21:23 -06003057 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3058 &as_state->build_scratch_memory_requirements);
3059
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003060 auto update_memory_req_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003061 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06003062 update_memory_req_info.accelerationStructure = as_state->acceleration_structure();
locke-lunargd556cc32019-09-17 01:21:23 -06003063 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3064 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003065 as_state->allocator = pAllocator;
locke-lunargd556cc32019-09-17 01:21:23 -06003066 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3067}
3068
Jeff Bolz95176d02020-04-01 00:36:16 -05003069void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3070 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3071 const VkAllocationCallbacks *pAllocator,
3072 VkAccelerationStructureKHR *pAccelerationStructure,
3073 VkResult result) {
3074 if (VK_SUCCESS != result) return;
sourav parmarcd5fb182020-07-17 12:58:44 -07003075 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE_KHR>(*pAccelerationStructure, pCreateInfo);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003076 as_state->allocator = pAllocator;
sourav parmarcd5fb182020-07-17 12:58:44 -07003077 accelerationStructureMap_khr[*pAccelerationStructure] = std::move(as_state);
Jeff Bolz95176d02020-04-01 00:36:16 -05003078}
3079
sourav parmarcd5fb182020-07-17 12:58:44 -07003080void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructuresKHR(
3081 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
3082 const VkAccelerationStructureBuildRangeInfoKHR *const *ppBuildRangeInfos) {
3083 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3084 if (cb_state == nullptr) {
3085 return;
3086 }
3087 for (uint32_t i = 0; i < infoCount; ++i) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003088 auto *dst_as_state = GetAccelerationStructureStateKHR(pInfos[i].dstAccelerationStructure);
sourav parmarcd5fb182020-07-17 12:58:44 -07003089 if (dst_as_state != nullptr) {
3090 dst_as_state->built = true;
3091 dst_as_state->build_info_khr.initialize(&pInfos[i]);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003092 if (!disabled[command_buffer_state]) {
3093 cb_state->AddChild(dst_as_state);
3094 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003095 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003096 if (!disabled[command_buffer_state]) {
3097 auto *src_as_state = GetAccelerationStructureStateKHR(pInfos[i].srcAccelerationStructure);
3098 if (src_as_state != nullptr) {
3099 cb_state->AddChild(src_as_state);
3100 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003101 }
3102 }
3103 cb_state->hasBuildAccelerationStructureCmd = true;
3104}
3105
3106void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructuresIndirectKHR(
3107 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
3108 const VkDeviceAddress *pIndirectDeviceAddresses, const uint32_t *pIndirectStrides,
3109 const uint32_t *const *ppMaxPrimitiveCounts) {
3110 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3111 if (cb_state == nullptr) {
3112 return;
3113 }
3114 for (uint32_t i = 0; i < infoCount; ++i) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003115 auto *dst_as_state = GetAccelerationStructureStateKHR(pInfos[i].dstAccelerationStructure);
sourav parmarcd5fb182020-07-17 12:58:44 -07003116 if (dst_as_state != nullptr) {
3117 dst_as_state->built = true;
3118 dst_as_state->build_info_khr.initialize(&pInfos[i]);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003119 if (!disabled[command_buffer_state]) {
3120 cb_state->AddChild(dst_as_state);
3121 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003122 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003123 if (!disabled[command_buffer_state]) {
3124 auto *src_as_state = GetAccelerationStructureStateKHR(pInfos[i].srcAccelerationStructure);
3125 if (src_as_state != nullptr) {
3126 cb_state->AddChild(src_as_state);
3127 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003128 }
3129 }
3130 cb_state->hasBuildAccelerationStructureCmd = true;
3131}
locke-lunargd556cc32019-09-17 01:21:23 -06003132void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
Mike Schuchardt2df08912020-12-15 16:28:09 -08003133 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2 *pMemoryRequirements) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003134 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureStateNV(pInfo->accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003135 if (as_state != nullptr) {
3136 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
3137 as_state->memory_requirements = *pMemoryRequirements;
3138 as_state->memory_requirements_checked = true;
3139 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
3140 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
3141 as_state->build_scratch_memory_requirements_checked = true;
3142 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
3143 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
3144 as_state->update_scratch_memory_requirements_checked = true;
3145 }
3146 }
3147}
3148
sourav parmarcd5fb182020-07-17 12:58:44 -07003149void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
3150 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06003151 if (VK_SUCCESS != result) return;
3152 for (uint32_t i = 0; i < bindInfoCount; i++) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003153 const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06003154
sourav parmarcd5fb182020-07-17 12:58:44 -07003155 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureStateNV(info.accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003156 if (as_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06003157 // Track objects tied to memory
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003158 auto mem_state = GetDevMemShared(info.memory);
3159 if (mem_state) {
3160 as_state->SetMemBinding(mem_state, info.memoryOffset);
3161 }
locke-lunargd556cc32019-09-17 01:21:23 -06003162
3163 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05003164 // XXX TODO: Query device address for KHR extension
sourav parmarcd5fb182020-07-17 12:58:44 -07003165 if (enabled[gpu_validation]) {
locke-lunargd556cc32019-09-17 01:21:23 -06003166 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
3167 }
3168 }
3169 }
3170}
3171
3172void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
3173 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
3174 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
3175 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3176 if (cb_state == nullptr) {
3177 return;
3178 }
3179
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003180 auto *dst_as_state = GetAccelerationStructureStateNV(dst);
locke-lunargd556cc32019-09-17 01:21:23 -06003181 if (dst_as_state != nullptr) {
3182 dst_as_state->built = true;
3183 dst_as_state->build_info.initialize(pInfo);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003184 if (!disabled[command_buffer_state]) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003185 cb_state->AddChild(dst_as_state);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003186 }
locke-lunargd556cc32019-09-17 01:21:23 -06003187 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003188 if (!disabled[command_buffer_state]) {
3189 auto *src_as_state = GetAccelerationStructureStateNV(src);
3190 if (src_as_state != nullptr) {
3191 cb_state->AddChild(src_as_state);
3192 }
locke-lunargd556cc32019-09-17 01:21:23 -06003193 }
3194 cb_state->hasBuildAccelerationStructureCmd = true;
3195}
3196
3197void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
3198 VkAccelerationStructureNV dst,
3199 VkAccelerationStructureNV src,
3200 VkCopyAccelerationStructureModeNV mode) {
3201 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3202 if (cb_state) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003203 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureStateNV(src);
3204 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureStateNV(dst);
locke-lunargd556cc32019-09-17 01:21:23 -06003205 if (dst_as_state != nullptr && src_as_state != nullptr) {
3206 dst_as_state->built = true;
3207 dst_as_state->build_info = src_as_state->build_info;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003208 if (!disabled[command_buffer_state]) {
3209 cb_state->AddChild(dst_as_state);
3210 cb_state->AddChild(src_as_state);
3211 }
locke-lunargd556cc32019-09-17 01:21:23 -06003212 }
3213 }
3214}
3215
Jeff Bolz95176d02020-04-01 00:36:16 -05003216void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
3217 VkAccelerationStructureKHR accelerationStructure,
3218 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06003219 if (!accelerationStructure) return;
sourav parmarcd5fb182020-07-17 12:58:44 -07003220 auto *as_state = GetAccelerationStructureStateKHR(accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003221 if (as_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003222 as_state->Destroy();
sourav parmarcd5fb182020-07-17 12:58:44 -07003223 accelerationStructureMap_khr.erase(accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003224 }
3225}
3226
Jeff Bolz95176d02020-04-01 00:36:16 -05003227void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
3228 VkAccelerationStructureNV accelerationStructure,
3229 const VkAllocationCallbacks *pAllocator) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003230 if (!accelerationStructure) return;
3231 auto *as_state = GetAccelerationStructureStateNV(accelerationStructure);
3232 if (as_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003233 as_state->Destroy();
sourav parmarcd5fb182020-07-17 12:58:44 -07003234 accelerationStructureMap.erase(accelerationStructure);
3235 }
Jeff Bolz95176d02020-04-01 00:36:16 -05003236}
3237
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003238void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3239 uint32_t viewportCount,
3240 const VkViewportWScalingNV *pViewportWScalings) {
3241 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3242 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003243 cb_state->static_status &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003244}
3245
locke-lunargd556cc32019-09-17 01:21:23 -06003246void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
3247 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3248 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003249 cb_state->static_status &= ~CBSTATUS_LINE_WIDTH_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003250}
3251
3252void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
3253 uint16_t lineStipplePattern) {
3254 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3255 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003256 cb_state->static_status &= ~CBSTATUS_LINE_STIPPLE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003257}
3258
3259void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
3260 float depthBiasClamp, float depthBiasSlopeFactor) {
3261 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3262 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003263 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003264}
3265
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003266void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
3267 const VkRect2D *pScissors) {
3268 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07003269 uint32_t bits = ((1u << scissorCount) - 1u) << firstScissor;
3270 cb_state->scissorMask |= bits;
3271 cb_state->trashedScissorMask &= ~bits;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003272 cb_state->status |= CBSTATUS_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003273 cb_state->static_status &= ~CBSTATUS_SCISSOR_SET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003274}
3275
locke-lunargd556cc32019-09-17 01:21:23 -06003276void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
3277 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3278 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003279 cb_state->static_status &= ~CBSTATUS_BLEND_CONSTANTS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003280}
3281
3282void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
3283 float maxDepthBounds) {
3284 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3285 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003286 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003287}
3288
3289void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3290 uint32_t compareMask) {
3291 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3292 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003293 cb_state->static_status &= ~CBSTATUS_STENCIL_READ_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003294}
3295
3296void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3297 uint32_t writeMask) {
3298 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3299 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003300 cb_state->static_status &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003301}
3302
3303void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3304 uint32_t reference) {
3305 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3306 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003307 cb_state->static_status &= ~CBSTATUS_STENCIL_REFERENCE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003308}
3309
3310// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
3311// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
3312// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
3313void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
3314 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
3315 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
3316 cvdescriptorset::DescriptorSet *push_descriptor_set,
3317 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
3318 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
3319 // Defensive
3320 assert(pipeline_layout);
3321 if (!pipeline_layout) return;
3322
3323 uint32_t required_size = first_set + set_count;
3324 const uint32_t last_binding_index = required_size - 1;
3325 assert(last_binding_index < pipeline_layout->compat_for_set.size());
3326
3327 // Some useful shorthand
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003328 const auto lv_bind_point = ConvertToLvlBindPoint(pipeline_bind_point);
3329 auto &last_bound = cb_state->lastBound[lv_bind_point];
locke-lunargd556cc32019-09-17 01:21:23 -06003330 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
3331 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
3332
3333 // We need this three times in this function, but nowhere else
3334 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
3335 if (ds && ds->IsPushDescriptor()) {
3336 assert(ds == last_bound.push_descriptor_set.get());
3337 last_bound.push_descriptor_set = nullptr;
3338 return true;
3339 }
3340 return false;
3341 };
3342
3343 // Clean up the "disturbed" before and after the range to be set
3344 if (required_size < current_size) {
3345 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
3346 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
3347 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
3348 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
3349 }
3350 } else {
3351 // We're not disturbing past last, so leave the upper binding data alone.
3352 required_size = current_size;
3353 }
3354 }
3355
3356 // We resize if we need more set entries or if those past "last" are disturbed
3357 if (required_size != current_size) {
3358 last_bound.per_set.resize(required_size);
3359 }
3360
3361 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
3362 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
3363 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
3364 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3365 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
3366 last_bound.per_set[set_idx].dynamicOffsets.clear();
3367 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
3368 }
3369 }
3370
3371 // Now update the bound sets with the input sets
3372 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
3373 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
3374 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
3375 cvdescriptorset::DescriptorSet *descriptor_set =
3376 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
3377
3378 // Record binding (or push)
3379 if (descriptor_set != last_bound.push_descriptor_set.get()) {
3380 // Only cleanup the push descriptors if they aren't the currently used set.
3381 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3382 }
3383 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
3384 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
3385
3386 if (descriptor_set) {
3387 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
3388 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
3389 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
3390 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
3391 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
3392 input_dynamic_offsets = end_offset;
3393 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
3394 } else {
3395 last_bound.per_set[set_idx].dynamicOffsets.clear();
3396 }
3397 if (!descriptor_set->IsPushDescriptor()) {
3398 // Can't cache validation of push_descriptors
3399 cb_state->validated_descriptor_sets.insert(descriptor_set);
3400 }
3401 }
3402 }
3403}
3404
3405// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
3406void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
3407 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3408 uint32_t firstSet, uint32_t setCount,
3409 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
3410 const uint32_t *pDynamicOffsets) {
3411 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3412 auto pipeline_layout = GetPipelineLayout(layout);
3413
3414 // Resize binding arrays
3415 uint32_t last_set_index = firstSet + setCount - 1;
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003416 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
3417 if (last_set_index >= cb_state->lastBound[lv_bind_point].per_set.size()) {
3418 cb_state->lastBound[lv_bind_point].per_set.resize(last_set_index + 1);
locke-lunargd556cc32019-09-17 01:21:23 -06003419 }
3420
3421 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
3422 dynamicOffsetCount, pDynamicOffsets);
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003423 cb_state->lastBound[lv_bind_point].pipeline_layout = layout;
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -06003424 cb_state->lastBound[lv_bind_point].UpdateSamplerDescriptorsUsedByImage();
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003425}
3426
locke-lunargd556cc32019-09-17 01:21:23 -06003427void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
3428 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
3429 const VkWriteDescriptorSet *pDescriptorWrites) {
3430 const auto &pipeline_layout = GetPipelineLayout(layout);
3431 // Short circuit invalid updates
3432 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003433 !pipeline_layout->set_layouts[set]->IsPushDescriptor()) {
locke-lunargd556cc32019-09-17 01:21:23 -06003434 return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003435 }
locke-lunargd556cc32019-09-17 01:21:23 -06003436
3437 // We need a descriptor set to update the bindings with, compatible with the passed layout
Jeremy Gebben50fb1832021-03-19 09:10:13 -06003438 const auto& dsl = pipeline_layout->set_layouts[set];
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003439 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
3440 auto &last_bound = cb_state->lastBound[lv_bind_point];
locke-lunargd556cc32019-09-17 01:21:23 -06003441 auto &push_descriptor_set = last_bound.push_descriptor_set;
3442 // If we are disturbing the current push_desriptor_set clear it
3443 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003444 last_bound.UnbindAndResetPushDescriptorSet(cb_state, new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003445 }
3446
3447 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3448 nullptr);
3449 last_bound.pipeline_layout = layout;
3450
3451 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003452 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003453}
3454
3455void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3456 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3457 uint32_t set, uint32_t descriptorWriteCount,
3458 const VkWriteDescriptorSet *pDescriptorWrites) {
3459 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3460 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3461}
3462
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003463void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3464 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3465 const void *pValues) {
3466 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3467 if (cb_state != nullptr) {
3468 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3469
3470 auto &push_constant_data = cb_state->push_constant_data;
3471 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3472 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
locke-lunargde3f0fa2020-09-10 11:55:31 -06003473 cb_state->push_constant_pipeline_layout_set = layout;
3474
3475 auto flags = stageFlags;
3476 uint32_t bit_shift = 0;
3477 while (flags) {
3478 if (flags & 1) {
3479 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
3480 const auto it = cb_state->push_constant_data_update.find(flag);
3481
3482 if (it != cb_state->push_constant_data_update.end()) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06003483 std::memset(it->second.data() + offset, PC_Byte_Updated, static_cast<std::size_t>(size));
locke-lunargde3f0fa2020-09-10 11:55:31 -06003484 }
3485 }
3486 flags = flags >> 1;
3487 ++bit_shift;
3488 }
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003489 }
3490}
3491
locke-lunargd556cc32019-09-17 01:21:23 -06003492void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3493 VkIndexType indexType) {
locke-lunargd556cc32019-09-17 01:21:23 -06003494 auto cb_state = GetCBState(commandBuffer);
3495
3496 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
Piers Daniell39842ee2020-07-10 16:42:33 -06003497 cb_state->static_status &= ~CBSTATUS_INDEX_BUFFER_BOUND;
locke-lunarg1ae57d62020-11-18 10:49:19 -07003498 cb_state->index_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(buffer);
3499 cb_state->index_buffer_binding.size = cb_state->index_buffer_binding.buffer_state->createInfo.size;
locke-lunargd556cc32019-09-17 01:21:23 -06003500 cb_state->index_buffer_binding.offset = offset;
3501 cb_state->index_buffer_binding.index_type = indexType;
3502 // Add binding for this index buffer to this commandbuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003503 if (!disabled[command_buffer_state]) {
3504 cb_state->AddChild(cb_state->index_buffer_binding.buffer_state.get());
3505 }
locke-lunargd556cc32019-09-17 01:21:23 -06003506}
3507
3508void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3509 uint32_t bindingCount, const VkBuffer *pBuffers,
3510 const VkDeviceSize *pOffsets) {
3511 auto cb_state = GetCBState(commandBuffer);
3512
3513 uint32_t end = firstBinding + bindingCount;
3514 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3515 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3516 }
3517
3518 for (uint32_t i = 0; i < bindingCount; ++i) {
3519 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07003520 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003521 vertex_buffer_binding.offset = pOffsets[i];
Piers Daniell39842ee2020-07-10 16:42:33 -06003522 vertex_buffer_binding.size = VK_WHOLE_SIZE;
3523 vertex_buffer_binding.stride = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06003524 // Add binding for this vertex buffer to this commandbuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003525 if (pBuffers[i] && !disabled[command_buffer_state]) {
3526 cb_state->AddChild(vertex_buffer_binding.buffer_state.get());
Jeff Bolz165818a2020-05-08 11:19:03 -05003527 }
locke-lunargd556cc32019-09-17 01:21:23 -06003528 }
3529}
3530
3531void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3532 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003533 if (disabled[command_buffer_state]) return;
3534
locke-lunargd556cc32019-09-17 01:21:23 -06003535 auto cb_state = GetCBState(commandBuffer);
3536 auto dst_buffer_state = GetBufferState(dstBuffer);
3537
3538 // Update bindings between buffer and cmd buffer
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003539 if (cb_state && dst_buffer_state) {
3540 cb_state->AddChild(dst_buffer_state);
3541 }
locke-lunargd556cc32019-09-17 01:21:23 -06003542}
3543
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06003544static bool SetEventStageMask(VkEvent event, VkPipelineStageFlags2KHR stageMask,
Jeff Bolz310775c2019-10-09 00:46:33 -05003545 EventToStageMap *localEventToStageMap) {
3546 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003547 return false;
3548}
3549
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003550void ValidationStateTracker::RecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask) {
locke-lunargd556cc32019-09-17 01:21:23 -06003551 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003552 if (!disabled[command_buffer_state]) {
3553 auto event_state = GetEventState(event);
3554 if (event_state) {
3555 cb_state->AddChild(event_state);
3556 }
locke-lunargd556cc32019-09-17 01:21:23 -06003557 }
3558 cb_state->events.push_back(event);
3559 if (!cb_state->waitedEvents.count(event)) {
3560 cb_state->writeEventsBeforeWait.push_back(event);
3561 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003562 cb_state->eventUpdates.emplace_back(
3563 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3564 return SetEventStageMask(event, stageMask, localEventToStageMap);
3565 });
locke-lunargd556cc32019-09-17 01:21:23 -06003566}
3567
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003568void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3569 VkPipelineStageFlags stageMask) {
3570 RecordCmdSetEvent(commandBuffer, event, stageMask);
3571}
3572
3573void ValidationStateTracker::PreCallRecordCmdSetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event,
3574 const VkDependencyInfoKHR *pDependencyInfo) {
3575 auto stage_masks = sync_utils::GetGlobalStageMasks(*pDependencyInfo);
3576
3577 RecordCmdSetEvent(commandBuffer, event, stage_masks.src);
Jeremy Gebben79649152021-06-22 14:46:24 -06003578
3579 RecordBarriers(commandBuffer, pDependencyInfo);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003580}
3581
3582void ValidationStateTracker::RecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3583 VkPipelineStageFlags2KHR stageMask) {
locke-lunargd556cc32019-09-17 01:21:23 -06003584 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003585 if (!disabled[command_buffer_state]) {
3586 auto event_state = GetEventState(event);
3587 if (event_state) {
3588 cb_state->AddChild(event_state);
3589 }
locke-lunargd556cc32019-09-17 01:21:23 -06003590 }
3591 cb_state->events.push_back(event);
3592 if (!cb_state->waitedEvents.count(event)) {
3593 cb_state->writeEventsBeforeWait.push_back(event);
3594 }
3595
3596 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003597 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003598 return SetEventStageMask(event, VkPipelineStageFlags2KHR(0), localEventToStageMap);
Jeff Bolz310775c2019-10-09 00:46:33 -05003599 });
locke-lunargd556cc32019-09-17 01:21:23 -06003600}
3601
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003602void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3603 VkPipelineStageFlags stageMask) {
3604 RecordCmdResetEvent(commandBuffer, event, stageMask);
3605}
3606
3607void ValidationStateTracker::PreCallRecordCmdResetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event,
3608 VkPipelineStageFlags2KHR stageMask) {
3609 RecordCmdResetEvent(commandBuffer, event, stageMask);
3610}
3611
3612void ValidationStateTracker::RecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents) {
locke-lunargd556cc32019-09-17 01:21:23 -06003613 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3614 for (uint32_t i = 0; i < eventCount; ++i) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003615 if (!disabled[command_buffer_state]) {
3616 auto event_state = GetEventState(pEvents[i]);
3617 if (event_state) {
3618 cb_state->AddChild(event_state);
3619 }
locke-lunargd556cc32019-09-17 01:21:23 -06003620 }
3621 cb_state->waitedEvents.insert(pEvents[i]);
3622 cb_state->events.push_back(pEvents[i]);
3623 }
3624}
3625
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003626void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3627 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3628 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3629 uint32_t bufferMemoryBarrierCount,
3630 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3631 uint32_t imageMemoryBarrierCount,
3632 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3633 RecordCmdWaitEvents(commandBuffer, eventCount, pEvents);
Jeremy Gebben79649152021-06-22 14:46:24 -06003634 RecordBarriers(commandBuffer, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers,
3635 imageMemoryBarrierCount, pImageMemoryBarriers);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003636}
3637
3638void ValidationStateTracker::PreCallRecordCmdWaitEvents2KHR(VkCommandBuffer commandBuffer, uint32_t eventCount,
3639 const VkEvent *pEvents, const VkDependencyInfoKHR *pDependencyInfos) {
3640 RecordCmdWaitEvents(commandBuffer, eventCount, pEvents);
Jeremy Gebben79649152021-06-22 14:46:24 -06003641 for (uint32_t i = 0; i < eventCount; i++) {
3642 RecordBarriers(commandBuffer, &pDependencyInfos[i]);
3643 }
3644}
3645
3646void ValidationStateTracker::PostCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
3647 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
3648 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3649 uint32_t bufferMemoryBarrierCount,
3650 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3651 uint32_t imageMemoryBarrierCount,
3652 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3653 RecordBarriers(commandBuffer, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers,
3654 imageMemoryBarrierCount, pImageMemoryBarriers);
3655}
3656
3657void ValidationStateTracker::PreCallRecordCmdPipelineBarrier2KHR(VkCommandBuffer commandBuffer,
3658 const VkDependencyInfoKHR *pDependencyInfo) {
3659 RecordBarriers(commandBuffer, pDependencyInfo);
3660}
3661
3662void ValidationStateTracker::RecordBarriers(VkCommandBuffer commandBuffer, uint32_t memoryBarrierCount,
3663 const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount,
3664 const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount,
3665 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3666 if (disabled[command_buffer_state]) return;
3667
3668 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3669 for (uint32_t i = 0; i < bufferMemoryBarrierCount; i++) {
3670 auto buffer_state = GetBufferState(pBufferMemoryBarriers[i].buffer);
3671 if (buffer_state) {
3672 cb_state->AddChild(buffer_state);
3673 }
3674 }
3675 for (uint32_t i = 0; i < imageMemoryBarrierCount; i++) {
3676 auto image_state = GetImageState(pImageMemoryBarriers[i].image);
3677 if (image_state) {
3678 cb_state->AddChild(image_state);
3679 }
3680 }
3681}
3682
3683void ValidationStateTracker::RecordBarriers(VkCommandBuffer commandBuffer, const VkDependencyInfoKHR *pDependencyInfo) {
3684 if (disabled[command_buffer_state]) return;
3685
3686 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3687 for (uint32_t i = 0; i < pDependencyInfo->bufferMemoryBarrierCount; i++) {
3688 auto buffer_state = GetBufferState(pDependencyInfo->pBufferMemoryBarriers[i].buffer);
3689 if (buffer_state) {
3690 cb_state->AddChild(buffer_state);
3691 }
3692 }
3693 for (uint32_t i = 0; i < pDependencyInfo->imageMemoryBarrierCount; i++) {
3694 auto image_state = GetImageState(pDependencyInfo->pImageMemoryBarriers[i].image);
3695 if (image_state) {
3696 cb_state->AddChild(image_state);
3697 }
3698 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003699}
3700
Jeff Bolz310775c2019-10-09 00:46:33 -05003701bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
3702 (*localQueryToStateMap)[object] = value;
3703 return false;
3704}
3705
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003706bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
3707 QueryState value, QueryMap *localQueryToStateMap) {
Jeff Bolz310775c2019-10-09 00:46:33 -05003708 for (uint32_t i = 0; i < queryCount; i++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003709 QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
Jeff Bolz310775c2019-10-09 00:46:33 -05003710 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06003711 }
3712 return false;
3713}
3714
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003715QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex,
3716 uint32_t perfPass) const {
3717 QueryObject query = QueryObject(QueryObject(queryPool, queryIndex), perfPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003718
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003719 auto iter = localQueryToStateMap->find(query);
3720 if (iter != localQueryToStateMap->end()) return iter->second;
Jeff Bolz310775c2019-10-09 00:46:33 -05003721
Jeff Bolz310775c2019-10-09 00:46:33 -05003722 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06003723}
3724
3725void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003726 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003727 cb_state->activeQueries.insert(query_obj);
3728 cb_state->startedQueries.insert(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003729 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3730 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3731 QueryMap *localQueryToStateMap) {
3732 SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
3733 return false;
3734 });
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003735 if (!disabled[command_buffer_state]) {
3736 auto pool_state = GetQueryPoolState(query_obj.pool);
3737 cb_state->AddChild(pool_state);
3738 }
locke-lunargd556cc32019-09-17 01:21:23 -06003739}
3740
3741void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3742 VkFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003743 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003744 QueryObject query = {queryPool, slot};
3745 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3746 RecordCmdBeginQuery(cb_state, query);
3747}
3748
3749void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003750 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003751 cb_state->activeQueries.erase(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003752 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3753 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3754 QueryMap *localQueryToStateMap) {
3755 return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3756 });
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003757 if (!disabled[command_buffer_state]) {
3758 auto pool_state = GetQueryPoolState(query_obj.pool);
3759 cb_state->AddChild(pool_state);
3760 }
locke-lunargd556cc32019-09-17 01:21:23 -06003761}
3762
3763void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003764 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003765 QueryObject query_obj = {queryPool, slot};
3766 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3767 RecordCmdEndQuery(cb_state, query_obj);
3768}
3769
3770void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3771 uint32_t firstQuery, uint32_t queryCount) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003772 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003773 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3774
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02003775 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
3776 QueryObject query = {queryPool, slot};
3777 cb_state->resetQueries.insert(query);
3778 }
3779
Jeff Bolz310775c2019-10-09 00:46:33 -05003780 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003781 bool do_validate, VkQueryPool &firstPerfQueryPool,
3782 uint32_t perfQueryPass,
3783 QueryMap *localQueryToStateMap) {
3784 return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06003785 });
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003786 if (!disabled[command_buffer_state]) {
3787 auto pool_state = GetQueryPoolState(queryPool);
3788 cb_state->AddChild(pool_state);
3789 }
locke-lunargd556cc32019-09-17 01:21:23 -06003790}
3791
3792void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3793 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3794 VkDeviceSize dstOffset, VkDeviceSize stride,
3795 VkQueryResultFlags flags) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003796 if (disabled[query_validation] || disabled[command_buffer_state]) return;
3797
locke-lunargd556cc32019-09-17 01:21:23 -06003798 auto cb_state = GetCBState(commandBuffer);
3799 auto dst_buff_state = GetBufferState(dstBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003800 cb_state->AddChild(dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003801 auto pool_state = GetQueryPoolState(queryPool);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003802 cb_state->AddChild(pool_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003803}
3804
3805void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3806 VkQueryPool queryPool, uint32_t slot) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003807 PostCallRecordCmdWriteTimestamp2KHR(commandBuffer, pipelineStage, queryPool, slot);
3808}
3809
3810void ValidationStateTracker::PostCallRecordCmdWriteTimestamp2KHR(VkCommandBuffer commandBuffer,
3811 VkPipelineStageFlags2KHR pipelineStage, VkQueryPool queryPool,
3812 uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003813 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003814 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003815 if (!disabled[command_buffer_state]) {
3816 auto pool_state = GetQueryPoolState(queryPool);
3817 cb_state->AddChild(pool_state);
3818 }
locke-lunargd556cc32019-09-17 01:21:23 -06003819 QueryObject query = {queryPool, slot};
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003820 cb_state->queryUpdates.emplace_back([query](const ValidationStateTracker *device_data, bool do_validate,
3821 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3822 QueryMap *localQueryToStateMap) {
3823 return SetQueryState(QueryObject(query, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3824 });
locke-lunargd556cc32019-09-17 01:21:23 -06003825}
3826
Marijn Suijten6750fdc2020-12-30 22:06:42 +01003827void ValidationStateTracker::PostCallRecordCmdWriteAccelerationStructuresPropertiesKHR(
3828 VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR *pAccelerationStructures,
3829 VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) {
3830 if (disabled[query_validation]) return;
3831 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003832 if (!disabled[command_buffer_state]) {
3833 auto pool_state = GetQueryPoolState(queryPool);
3834 cb_state->AddChild(pool_state);
3835 }
Marijn Suijten6750fdc2020-12-30 22:06:42 +01003836 cb_state->queryUpdates.emplace_back(
3837 [queryPool, firstQuery, accelerationStructureCount](const ValidationStateTracker *device_data, bool do_validate,
3838 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3839 QueryMap *localQueryToStateMap) {
3840 return SetQueryStateMulti(queryPool, firstQuery, accelerationStructureCount, perfQueryPass, QUERYSTATE_ENDED,
3841 localQueryToStateMap);
3842 });
3843}
3844
locke-lunargd556cc32019-09-17 01:21:23 -06003845void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3846 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3847 VkResult result) {
3848 if (VK_SUCCESS != result) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003849
Jeremy Gebben88f58142021-06-01 10:07:52 -06003850 std::vector<std::shared_ptr<IMAGE_VIEW_STATE>> views;
Mike Schuchardt2df08912020-12-15 16:28:09 -08003851 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) == 0) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003852 views.resize(pCreateInfo->attachmentCount);
locke-lunarg1ae57d62020-11-18 10:49:19 -07003853
locke-lunargd556cc32019-09-17 01:21:23 -06003854 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003855 views[i] = GetShared<IMAGE_VIEW_STATE>(pCreateInfo->pAttachments[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003856 }
3857 }
Jeremy Gebben88f58142021-06-01 10:07:52 -06003858
3859 frameBufferMap[*pFramebuffer] = std::make_shared<FRAMEBUFFER_STATE>(
3860 *pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass), std::move(views));
locke-lunargd556cc32019-09-17 01:21:23 -06003861}
3862
locke-lunargd556cc32019-09-17 01:21:23 -06003863void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
3864 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3865 VkResult result) {
3866 if (VK_SUCCESS != result) return;
Jeremy Gebben88f58142021-06-01 10:07:52 -06003867 renderPassMap[*pRenderPass] = std::make_shared<RENDER_PASS_STATE>(*pRenderPass, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003868}
3869
Mike Schuchardt2df08912020-12-15 16:28:09 -08003870void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07003871 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3872 VkResult result) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003873 if (VK_SUCCESS != result) return;
3874
3875 renderPassMap[*pRenderPass] = std::make_shared<RENDER_PASS_STATE>(*pRenderPass, pCreateInfo);
Tony-LunarG977448c2019-12-02 14:52:02 -07003876}
3877
Mike Schuchardt2df08912020-12-15 16:28:09 -08003878void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07003879 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3880 VkResult result) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003881 if (VK_SUCCESS != result) return;
3882
3883 renderPassMap[*pRenderPass] = std::make_shared<RENDER_PASS_STATE>(*pRenderPass, pCreateInfo);
Tony-LunarG977448c2019-12-02 14:52:02 -07003884}
3885
locke-lunargd556cc32019-09-17 01:21:23 -06003886void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
3887 const VkRenderPassBeginInfo *pRenderPassBegin,
3888 const VkSubpassContents contents) {
3889 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunargaecf2152020-05-12 17:15:41 -06003890 auto render_pass_state = pRenderPassBegin ? GetShared<RENDER_PASS_STATE>(pRenderPassBegin->renderPass) : nullptr;
3891 auto framebuffer = pRenderPassBegin ? GetShared<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer) : nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06003892
3893 if (render_pass_state) {
locke-lunargaecf2152020-05-12 17:15:41 -06003894 cb_state->activeFramebuffer = framebuffer;
locke-lunargd556cc32019-09-17 01:21:23 -06003895 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07003896 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06003897 cb_state->activeSubpass = 0;
3898 cb_state->activeSubpassContents = contents;
locke-lunargfc78e932020-11-19 17:06:24 -07003899
locke-lunargd556cc32019-09-17 01:21:23 -06003900 // Connect this RP to cmdBuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003901 if (!disabled[command_buffer_state]) {
3902 cb_state->AddChild(render_pass_state.get());
3903 }
locke-lunargd556cc32019-09-17 01:21:23 -06003904
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07003905 auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06003906 if (chained_device_group_struct) {
3907 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
3908 } else {
3909 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
3910 }
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003911
locke-lunargfc78e932020-11-19 17:06:24 -07003912 cb_state->active_subpasses = nullptr;
3913 cb_state->active_attachments = nullptr;
3914
3915 if (framebuffer) {
3916 cb_state->framebuffers.insert(framebuffer);
3917
3918 // Set cb_state->active_subpasses
3919 cb_state->active_subpasses =
3920 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
3921 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
3922 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
3923
3924 // Set cb_state->active_attachments & cb_state->attachments_view_states
3925 cb_state->active_attachments =
3926 std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(framebuffer->createInfo.attachmentCount);
3927 UpdateAttachmentsView(*this, *cb_state, *cb_state->activeFramebuffer, pRenderPassBegin);
3928
3929 // Connect this framebuffer and its children to this cmdBuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003930 cb_state->AddChild(framebuffer.get());
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003931 }
locke-lunargd556cc32019-09-17 01:21:23 -06003932 }
3933}
3934
3935void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
3936 const VkRenderPassBeginInfo *pRenderPassBegin,
3937 VkSubpassContents contents) {
3938 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
3939}
3940
3941void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
3942 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08003943 const VkSubpassBeginInfo *pSubpassBeginInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06003944 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3945}
3946
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06003947void ValidationStateTracker::PostCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
3948 uint32_t counterBufferCount,
3949 const VkBuffer *pCounterBuffers,
3950 const VkDeviceSize *pCounterBufferOffsets) {
3951 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3952
3953 cb_state->transform_feedback_active = true;
3954}
3955
3956void ValidationStateTracker::PostCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
3957 uint32_t counterBufferCount, const VkBuffer *pCounterBuffers,
3958 const VkDeviceSize *pCounterBufferOffsets) {
3959 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3960
3961 cb_state->transform_feedback_active = false;
3962}
3963
Tony-LunarG977448c2019-12-02 14:52:02 -07003964void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
3965 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08003966 const VkSubpassBeginInfo *pSubpassBeginInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07003967 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3968}
3969
locke-lunargd556cc32019-09-17 01:21:23 -06003970void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3971 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3972 cb_state->activeSubpass++;
3973 cb_state->activeSubpassContents = contents;
locke-lunargfc78e932020-11-19 17:06:24 -07003974
3975 // Update cb_state->active_subpasses
3976 if (cb_state->activeRenderPass && cb_state->activeFramebuffer) {
3977 cb_state->active_subpasses = nullptr;
3978 cb_state->active_subpasses =
3979 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
3980
3981 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
3982 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
3983 }
locke-lunargd556cc32019-09-17 01:21:23 -06003984}
3985
3986void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3987 RecordCmdNextSubpass(commandBuffer, contents);
3988}
3989
3990void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08003991 const VkSubpassBeginInfo *pSubpassBeginInfo,
3992 const VkSubpassEndInfo *pSubpassEndInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06003993 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3994}
3995
Tony-LunarG977448c2019-12-02 14:52:02 -07003996void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08003997 const VkSubpassBeginInfo *pSubpassBeginInfo,
3998 const VkSubpassEndInfo *pSubpassEndInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07003999 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4000}
4001
locke-lunargd556cc32019-09-17 01:21:23 -06004002void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
4003 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4004 cb_state->activeRenderPass = nullptr;
locke-lunargfc78e932020-11-19 17:06:24 -07004005 cb_state->active_attachments = nullptr;
4006 cb_state->active_subpasses = nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004007 cb_state->activeSubpass = 0;
4008 cb_state->activeFramebuffer = VK_NULL_HANDLE;
4009}
4010
4011void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
4012 RecordCmdEndRenderPassState(commandBuffer);
4013}
4014
4015void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004016 const VkSubpassEndInfo *pSubpassEndInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06004017 RecordCmdEndRenderPassState(commandBuffer);
4018}
4019
Tony-LunarG977448c2019-12-02 14:52:02 -07004020void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004021 const VkSubpassEndInfo *pSubpassEndInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004022 RecordCmdEndRenderPassState(commandBuffer);
4023}
locke-lunargd556cc32019-09-17 01:21:23 -06004024void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
4025 const VkCommandBuffer *pCommandBuffers) {
4026 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4027
4028 CMD_BUFFER_STATE *sub_cb_state = NULL;
4029 for (uint32_t i = 0; i < commandBuffersCount; i++) {
4030 sub_cb_state = GetCBState(pCommandBuffers[i]);
4031 assert(sub_cb_state);
4032 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
4033 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
4034 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
4035 // from the validation step to the recording step
4036 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
4037 }
4038 }
4039
4040 // Propagate inital layout and current layout state to the primary cmd buffer
4041 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
4042 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
4043 // for those other classes.
4044 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
4045 const auto image = sub_layout_map_entry.first;
4046 const auto *image_state = GetImageState(image);
4047 if (!image_state) continue; // Can't set layouts of a dead image
4048
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06004049 auto *cb_subres_map = cb_state->GetImageSubresourceLayoutMap(*image_state);
John Zulauf17708d02021-02-22 11:20:58 -07004050 const auto *sub_cb_subres_map = &sub_layout_map_entry.second;
locke-lunargd556cc32019-09-17 01:21:23 -06004051 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
4052 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
4053 }
4054
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06004055 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer();
locke-lunargd556cc32019-09-17 01:21:23 -06004056 cb_state->linkedCommandBuffers.insert(sub_cb_state);
Jeremy Gebben5570abe2021-05-16 18:35:13 -06004057 cb_state->AddChild(sub_cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004058 for (auto &function : sub_cb_state->queryUpdates) {
4059 cb_state->queryUpdates.push_back(function);
4060 }
4061 for (auto &function : sub_cb_state->queue_submit_functions) {
4062 cb_state->queue_submit_functions.push_back(function);
4063 }
David Zhao Akeley44139b12021-04-26 16:16:13 -07004064
4065 // State is trashed after executing secondary command buffers.
4066 // Importantly, this function runs after CoreChecks::PreCallValidateCmdExecuteCommands.
4067 cb_state->trashedViewportMask = ~uint32_t(0);
4068 cb_state->trashedScissorMask = ~uint32_t(0);
4069 cb_state->trashedViewportCount = true;
4070 cb_state->trashedScissorCount = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004071 }
4072}
4073
4074void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
4075 VkFlags flags, void **ppData, VkResult result) {
4076 if (VK_SUCCESS != result) return;
4077 RecordMappedMemory(mem, offset, size, ppData);
4078}
4079
4080void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
4081 auto mem_info = GetDevMemState(mem);
4082 if (mem_info) {
4083 mem_info->mapped_range = MemRange();
4084 mem_info->p_driver_data = nullptr;
4085 }
4086}
4087
4088void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
Jeremy Gebben8ee02af2021-07-16 10:15:55 -06004089 auto image_state = GetShared<IMAGE_STATE>(bindInfo.image);
locke-lunargd556cc32019-09-17 01:21:23 -06004090 if (image_state) {
locke-lunargae26eac2020-04-16 15:29:05 -06004091 // An Android sepcial image cannot get VkSubresourceLayout until the image binds a memory.
4092 // See: VUID-vkGetImageSubresourceLayout-image-01895
4093 image_state->fragment_encoder =
4094 std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(new subresource_adapter::ImageRangeEncoder(*image_state));
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07004095 const auto swapchain_info = LvlFindInChain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06004096 if (swapchain_info) {
John Zulauf29d00532021-03-04 13:28:54 -07004097 auto *swapchain = GetSwapchainState(swapchain_info->swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004098 if (swapchain) {
John Zulaufd13b38e2021-03-05 08:17:38 -07004099 SWAPCHAIN_IMAGE &swap_image = swapchain->images[swapchain_info->imageIndex];
John Zulauf29d00532021-03-04 13:28:54 -07004100 if (swap_image.bound_images.empty()) {
4101 // If this is the first "binding" of an image to this swapchain index, get a fake allocation
4102 image_state->swapchain_fake_address = fake_memory.Alloc(image_state->fragment_encoder->TotalSize());
4103 } else {
Jeremy Gebben8ee02af2021-07-16 10:15:55 -06004104 image_state->swapchain_fake_address = (*swap_image.bound_images.cbegin()).second->swapchain_fake_address;
John Zulauf29d00532021-03-04 13:28:54 -07004105 }
Jeremy Gebben8ee02af2021-07-16 10:15:55 -06004106 swap_image.bound_images.emplace(bindInfo.image, image_state);
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06004107 image_state->AddParent(swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004108 image_state->bind_swapchain = swapchain_info->swapchain;
4109 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
John Zulaufd13b38e2021-03-05 08:17:38 -07004110
John Zulauf29d00532021-03-04 13:28:54 -07004111 // All images bound to this swapchain and index are aliases
Jeremy Gebben8ee02af2021-07-16 10:15:55 -06004112 for (auto &entry: swap_image.bound_images) {
4113 image_state->AddAliasingImage(entry.second.get());
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06004114 }
locke-lunargd556cc32019-09-17 01:21:23 -06004115 }
4116 } else {
4117 // Track bound memory range information
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004118 auto mem_info = GetDevMemShared(bindInfo.memory);
locke-lunargd556cc32019-09-17 01:21:23 -06004119 if (mem_info) {
John Zulaufd13b38e2021-03-05 08:17:38 -07004120 if (image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) {
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06004121 for (auto *base_node : mem_info->ObjectBindings()) {
4122 if (base_node->Handle().type == kVulkanObjectTypeImage) {
4123 auto other_image = static_cast<IMAGE_STATE *>(base_node);
4124 image_state->AddAliasingImage(other_image);
4125 }
4126 }
John Zulaufd13b38e2021-03-05 08:17:38 -07004127 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004128 // Track objects tied to memory
4129 image_state->SetMemBinding(mem_info, bindInfo.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06004130 }
locke-lunargd556cc32019-09-17 01:21:23 -06004131 }
locke-lunargd556cc32019-09-17 01:21:23 -06004132 }
4133}
4134
4135void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
4136 VkDeviceSize memoryOffset, VkResult result) {
4137 if (VK_SUCCESS != result) return;
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06004138 auto bind_info = LvlInitStruct<VkBindImageMemoryInfo>();
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004139 bind_info.image = image;
4140 bind_info.memory = mem;
4141 bind_info.memoryOffset = memoryOffset;
4142 UpdateBindImageMemoryState(bind_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004143}
4144
4145void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004146 const VkBindImageMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004147 if (VK_SUCCESS != result) return;
4148 for (uint32_t i = 0; i < bindInfoCount; i++) {
4149 UpdateBindImageMemoryState(pBindInfos[i]);
4150 }
4151}
4152
4153void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004154 const VkBindImageMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004155 if (VK_SUCCESS != result) return;
4156 for (uint32_t i = 0; i < bindInfoCount; i++) {
4157 UpdateBindImageMemoryState(pBindInfos[i]);
4158 }
4159}
4160
4161void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
4162 auto event_state = GetEventState(event);
4163 if (event_state) {
4164 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
4165 }
locke-lunargd556cc32019-09-17 01:21:23 -06004166}
4167
4168void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
4169 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
4170 VkResult result) {
4171 if (VK_SUCCESS != result) return;
4172 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
4173 pImportSemaphoreFdInfo->flags);
4174}
4175
4176void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004177 VkExternalSemaphoreHandleTypeFlagBits handle_type) {
locke-lunargd556cc32019-09-17 01:21:23 -06004178 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
Mike Schuchardt2df08912020-12-15 16:28:09 -08004179 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT) {
locke-lunargd556cc32019-09-17 01:21:23 -06004180 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
4181 semaphore_state->scope = kSyncScopeExternalPermanent;
4182 }
4183}
4184
4185#ifdef VK_USE_PLATFORM_WIN32_KHR
4186void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
4187 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
4188 if (VK_SUCCESS != result) return;
4189 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
4190 pImportSemaphoreWin32HandleInfo->flags);
4191}
4192
4193void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
4194 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4195 HANDLE *pHandle, VkResult result) {
4196 if (VK_SUCCESS != result) return;
4197 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
4198}
4199
4200void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
4201 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
4202 if (VK_SUCCESS != result) return;
4203 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
4204 pImportFenceWin32HandleInfo->flags);
4205}
4206
4207void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
4208 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4209 HANDLE *pHandle, VkResult result) {
4210 if (VK_SUCCESS != result) return;
4211 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
4212}
4213#endif
4214
4215void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
4216 VkResult result) {
4217 if (VK_SUCCESS != result) return;
4218 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
4219}
4220
Mike Schuchardt2df08912020-12-15 16:28:09 -08004221void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBits handle_type,
4222 VkFenceImportFlags flags) {
locke-lunargd556cc32019-09-17 01:21:23 -06004223 FENCE_STATE *fence_node = GetFenceState(fence);
4224 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08004225 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT || flags & VK_FENCE_IMPORT_TEMPORARY_BIT) &&
locke-lunargd556cc32019-09-17 01:21:23 -06004226 fence_node->scope == kSyncScopeInternal) {
4227 fence_node->scope = kSyncScopeExternalTemporary;
4228 } else {
4229 fence_node->scope = kSyncScopeExternalPermanent;
4230 }
4231 }
4232}
4233
4234void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
4235 VkResult result) {
4236 if (VK_SUCCESS != result) return;
4237 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
4238}
4239
Mike Schuchardt2df08912020-12-15 16:28:09 -08004240void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBits handle_type) {
locke-lunargd556cc32019-09-17 01:21:23 -06004241 FENCE_STATE *fence_state = GetFenceState(fence);
4242 if (fence_state) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08004243 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT) {
locke-lunargd556cc32019-09-17 01:21:23 -06004244 // Export with reference transference becomes external
4245 fence_state->scope = kSyncScopeExternalPermanent;
4246 } else if (fence_state->scope == kSyncScopeInternal) {
4247 // Export with copy transference has a side effect of resetting the fence
4248 fence_state->state = FENCE_UNSIGNALED;
4249 }
4250 }
4251}
4252
4253void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
4254 VkResult result) {
4255 if (VK_SUCCESS != result) return;
4256 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
4257}
4258
4259void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
4260 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
4261 if (VK_SUCCESS != result) return;
John Zulaufd5115702021-01-18 12:34:33 -07004262 const auto event = *pEvent;
Jeremy Gebbencbf22862021-03-03 12:01:22 -07004263 eventMap.emplace(event, std::make_shared<EVENT_STATE>(event, pCreateInfo->flags));
locke-lunargd556cc32019-09-17 01:21:23 -06004264}
4265
4266void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
4267 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
4268 SWAPCHAIN_NODE *old_swapchain_state) {
4269 if (VK_SUCCESS == result) {
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06004270 if (surface_state->swapchain) {
4271 surface_state->swapchain->RemoveParent(surface_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004272 }
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06004273 surface_state->swapchain = CreateSwapchainState(pCreateInfo, *pSwapchain);
4274 surface_state->swapchain->AddParent(surface_state);
4275 swapchainMap[*pSwapchain] = surface_state->swapchain;
locke-lunargd556cc32019-09-17 01:21:23 -06004276 } else {
4277 surface_state->swapchain = nullptr;
4278 }
4279 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
4280 if (old_swapchain_state) {
4281 old_swapchain_state->retired = true;
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06004282 old_swapchain_state->RemoveParent(surface_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004283 }
4284 return;
4285}
4286
4287void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
4288 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
4289 VkResult result) {
4290 auto surface_state = GetSurfaceState(pCreateInfo->surface);
4291 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
4292 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
4293}
4294
4295void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
4296 const VkAllocationCallbacks *pAllocator) {
4297 if (!swapchain) return;
4298 auto swapchain_data = GetSwapchainState(swapchain);
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06004299 if (!swapchain_data) return;
John Zulauffaa7a522021-03-05 12:22:45 -07004300
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06004301 for (auto &swapchain_image : swapchain_data->images) {
Jeremy Gebben8ee02af2021-07-16 10:15:55 -06004302 for (auto &entry: swapchain_image.bound_images) {
4303 imageMap.erase(entry.second->image());
locke-lunargd556cc32019-09-17 01:21:23 -06004304 }
locke-lunargd556cc32019-09-17 01:21:23 -06004305 }
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06004306
4307 swapchain_data->Destroy();
4308 swapchainMap.erase(swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004309}
4310
sfricke-samsung5c1b7392020-12-13 22:17:15 -08004311void ValidationStateTracker::PostCallRecordCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
4312 const VkDisplayModeCreateInfoKHR *pCreateInfo,
4313 const VkAllocationCallbacks *pAllocator, VkDisplayModeKHR *pMode,
4314 VkResult result) {
4315 if (VK_SUCCESS != result) return;
4316 if (!pMode) return;
Jeremy Gebben5573a8c2021-06-04 08:55:10 -06004317 display_mode_map[*pMode] = std::make_shared<DISPLAY_MODE_STATE>(*pMode, physicalDevice);
sfricke-samsung5c1b7392020-12-13 22:17:15 -08004318}
4319
locke-lunargd556cc32019-09-17 01:21:23 -06004320void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
4321 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
4322 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004323 auto semaphore_state = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
4324 if (semaphore_state) {
4325 semaphore_state->signaler.first = VK_NULL_HANDLE;
4326 semaphore_state->signaled = false;
locke-lunargd556cc32019-09-17 01:21:23 -06004327 }
4328 }
4329
4330 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
4331 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
4332 // confused itself just as much.
4333 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
4334 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
4335 // Mark the image as having been released to the WSI
4336 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06004337 if (swapchain_data) {
4338 swapchain_data->PresentImage(pPresentInfo->pImageIndices[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06004339 }
4340 }
4341 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
4342 // its semaphore waits) /never/ participate in any completion proof.
4343}
4344
4345void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
4346 const VkSwapchainCreateInfoKHR *pCreateInfos,
4347 const VkAllocationCallbacks *pAllocator,
4348 VkSwapchainKHR *pSwapchains, VkResult result) {
4349 if (pCreateInfos) {
4350 for (uint32_t i = 0; i < swapchainCount; i++) {
4351 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
4352 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
4353 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
4354 }
4355 }
4356}
4357
4358void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4359 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004360 auto fence_state = GetFenceState(fence);
4361 if (fence_state && fence_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06004362 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
4363 // import
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004364 fence_state->state = FENCE_INFLIGHT;
4365 fence_state->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
locke-lunargd556cc32019-09-17 01:21:23 -06004366 }
4367
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004368 auto semaphore_state = GetSemaphoreState(semaphore);
4369 if (semaphore_state && semaphore_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06004370 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
4371 // temporary import
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004372 semaphore_state->signaled = true;
4373 semaphore_state->signaler.first = VK_NULL_HANDLE;
locke-lunargd556cc32019-09-17 01:21:23 -06004374 }
4375
4376 // Mark the image as acquired.
4377 auto swapchain_data = GetSwapchainState(swapchain);
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06004378 if (swapchain_data) {
4379 swapchain_data->AcquireImage(*pImageIndex);
locke-lunargd556cc32019-09-17 01:21:23 -06004380 }
4381}
4382
4383void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4384 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
4385 VkResult result) {
4386 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4387 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
4388}
4389
4390void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
4391 uint32_t *pImageIndex, VkResult result) {
4392 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4393 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
4394 pAcquireInfo->fence, pImageIndex);
4395}
4396
4397void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
4398 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
4399 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
4400 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
4401 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
4402 phys_device_state.phys_device = pPhysicalDevices[i];
4403 // Init actual features for each physical device
4404 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
4405 }
4406 }
4407}
4408
4409// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
4410static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004411 VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004412 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
4413
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004414 if (pQueueFamilyProperties) { // Save queue family properties
locke-lunargd556cc32019-09-17 01:21:23 -06004415 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
4416 for (uint32_t i = 0; i < count; ++i) {
4417 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
4418 }
4419 }
4420}
4421
4422void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
4423 uint32_t *pQueueFamilyPropertyCount,
4424 VkQueueFamilyProperties *pQueueFamilyProperties) {
4425 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4426 assert(physical_device_state);
Mike Schuchardt2df08912020-12-15 16:28:09 -08004427 VkQueueFamilyProperties2 *pqfp = nullptr;
4428 std::vector<VkQueueFamilyProperties2> qfp;
locke-lunargd556cc32019-09-17 01:21:23 -06004429 qfp.resize(*pQueueFamilyPropertyCount);
4430 if (pQueueFamilyProperties) {
4431 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06004432 qfp[i] = LvlInitStruct<VkQueueFamilyProperties2>();
locke-lunargd556cc32019-09-17 01:21:23 -06004433 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
4434 }
4435 pqfp = qfp.data();
4436 }
4437 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
4438}
4439
4440void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004441 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004442 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4443 assert(physical_device_state);
4444 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4445 pQueueFamilyProperties);
4446}
4447
4448void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004449 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004450 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4451 assert(physical_device_state);
4452 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4453 pQueueFamilyProperties);
4454}
4455void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
4456 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004457 if (!surface) return;
4458 auto surface_state = GetSurfaceState(surface);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004459 surface_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06004460 surface_map.erase(surface);
4461}
4462
4463void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004464 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06004465}
4466
4467void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
4468 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
4469 const VkAllocationCallbacks *pAllocator,
4470 VkSurfaceKHR *pSurface, VkResult result) {
4471 if (VK_SUCCESS != result) return;
4472 RecordVulkanSurface(pSurface);
4473}
4474
4475#ifdef VK_USE_PLATFORM_ANDROID_KHR
4476void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
4477 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
4478 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4479 VkResult result) {
4480 if (VK_SUCCESS != result) return;
4481 RecordVulkanSurface(pSurface);
4482}
4483#endif // VK_USE_PLATFORM_ANDROID_KHR
4484
4485#ifdef VK_USE_PLATFORM_IOS_MVK
4486void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
4487 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4488 VkResult result) {
4489 if (VK_SUCCESS != result) return;
4490 RecordVulkanSurface(pSurface);
4491}
4492#endif // VK_USE_PLATFORM_IOS_MVK
4493
4494#ifdef VK_USE_PLATFORM_MACOS_MVK
4495void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
4496 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
4497 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4498 VkResult result) {
4499 if (VK_SUCCESS != result) return;
4500 RecordVulkanSurface(pSurface);
4501}
4502#endif // VK_USE_PLATFORM_MACOS_MVK
4503
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07004504#ifdef VK_USE_PLATFORM_METAL_EXT
4505void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
4506 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
4507 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4508 VkResult result) {
4509 if (VK_SUCCESS != result) return;
4510 RecordVulkanSurface(pSurface);
4511}
4512#endif // VK_USE_PLATFORM_METAL_EXT
4513
locke-lunargd556cc32019-09-17 01:21:23 -06004514#ifdef VK_USE_PLATFORM_WAYLAND_KHR
4515void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
4516 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
4517 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4518 VkResult result) {
4519 if (VK_SUCCESS != result) return;
4520 RecordVulkanSurface(pSurface);
4521}
4522#endif // VK_USE_PLATFORM_WAYLAND_KHR
4523
4524#ifdef VK_USE_PLATFORM_WIN32_KHR
4525void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
4526 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
4527 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4528 VkResult result) {
4529 if (VK_SUCCESS != result) return;
4530 RecordVulkanSurface(pSurface);
4531}
4532#endif // VK_USE_PLATFORM_WIN32_KHR
4533
4534#ifdef VK_USE_PLATFORM_XCB_KHR
4535void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
4536 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4537 VkResult result) {
4538 if (VK_SUCCESS != result) return;
4539 RecordVulkanSurface(pSurface);
4540}
4541#endif // VK_USE_PLATFORM_XCB_KHR
4542
4543#ifdef VK_USE_PLATFORM_XLIB_KHR
4544void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
4545 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4546 VkResult result) {
4547 if (VK_SUCCESS != result) return;
4548 RecordVulkanSurface(pSurface);
4549}
4550#endif // VK_USE_PLATFORM_XLIB_KHR
4551
Niklas Haas8b84af12020-04-19 22:20:11 +02004552void ValidationStateTracker::PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance,
4553 const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
4554 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4555 VkResult result) {
4556 if (VK_SUCCESS != result) return;
4557 RecordVulkanSurface(pSurface);
4558}
4559
Cort23cf2282019-09-20 18:58:18 +02004560void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004561 VkPhysicalDeviceFeatures *pFeatures) {
4562 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Yilong Li358152a2020-07-08 02:16:45 -07004563 // Reset the features2 safe struct before setting up the features field.
4564 physical_device_state->features2 = safe_VkPhysicalDeviceFeatures2();
Cortffba2642019-09-20 22:09:41 +02004565 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02004566}
4567
4568void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004569 VkPhysicalDeviceFeatures2 *pFeatures) {
4570 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02004571 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004572}
4573
4574void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004575 VkPhysicalDeviceFeatures2 *pFeatures) {
4576 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02004577 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004578}
4579
locke-lunargd556cc32019-09-17 01:21:23 -06004580void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
4581 VkSurfaceKHR surface,
4582 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
4583 VkResult result) {
4584 if (VK_SUCCESS != result) return;
4585 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004586 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004587
4588 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
4589 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004590}
4591
4592void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
4593 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4594 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
4595 if (VK_SUCCESS != result) return;
4596 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004597 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004598
4599 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
4600 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004601}
4602
4603void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
4604 VkSurfaceKHR surface,
4605 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
4606 VkResult result) {
4607 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004608 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
4609 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
4610 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
4611 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
4612 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
4613 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
4614 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
4615 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
4616 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
4617 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004618
4619 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
4620 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004621}
4622
4623void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
4624 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
4625 VkBool32 *pSupported, VkResult result) {
4626 if (VK_SUCCESS != result) return;
4627 auto surface_state = GetSurfaceState(surface);
4628 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
4629}
4630
4631void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
4632 VkSurfaceKHR surface,
4633 uint32_t *pPresentModeCount,
4634 VkPresentModeKHR *pPresentModes,
4635 VkResult result) {
4636 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4637
4638 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
4639 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004640
4641 if (*pPresentModeCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004642 if (*pPresentModeCount > physical_device_state->present_modes.size()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004643 physical_device_state->present_modes.resize(*pPresentModeCount);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004644 }
locke-lunargd556cc32019-09-17 01:21:23 -06004645 }
4646 if (pPresentModes) {
locke-lunargd556cc32019-09-17 01:21:23 -06004647 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
4648 physical_device_state->present_modes[i] = pPresentModes[i];
4649 }
4650 }
4651}
4652
4653void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
4654 uint32_t *pSurfaceFormatCount,
4655 VkSurfaceFormatKHR *pSurfaceFormats,
4656 VkResult result) {
4657 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4658
4659 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004660
4661 if (*pSurfaceFormatCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004662 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004663 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004664 }
locke-lunargd556cc32019-09-17 01:21:23 -06004665 }
4666 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06004667 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4668 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
4669 }
4670 }
4671}
4672
4673void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
4674 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4675 uint32_t *pSurfaceFormatCount,
4676 VkSurfaceFormat2KHR *pSurfaceFormats,
4677 VkResult result) {
4678 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4679
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004680 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004681 if (*pSurfaceFormatCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004682 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size()) {
4683 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
4684 }
locke-lunargd556cc32019-09-17 01:21:23 -06004685 }
4686 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06004687 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004688 physical_device_state->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
locke-lunargd556cc32019-09-17 01:21:23 -06004689 }
4690 }
4691}
4692
4693void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4694 const VkDebugUtilsLabelEXT *pLabelInfo) {
4695 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4696}
4697
4698void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
4699 EndCmdDebugUtilsLabel(report_data, commandBuffer);
4700}
4701
4702void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4703 const VkDebugUtilsLabelEXT *pLabelInfo) {
4704 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4705
4706 // Squirrel away an easily accessible copy.
4707 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4708 cb_state->debug_label = LoggingLabel(pLabelInfo);
4709}
4710
4711void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004712 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004713 if (NULL != pPhysicalDeviceGroupProperties) {
4714 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
4715 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
4716 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
4717 auto &phys_device_state = physical_device_map[cur_phys_dev];
4718 phys_device_state.phys_device = cur_phys_dev;
4719 // Init actual features for each physical device
4720 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
4721 }
4722 }
4723 }
4724}
4725
4726void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004727 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties,
locke-lunargd556cc32019-09-17 01:21:23 -06004728 VkResult result) {
4729 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4730 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4731}
4732
4733void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004734 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties,
locke-lunargd556cc32019-09-17 01:21:23 -06004735 VkResult result) {
4736 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4737 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4738}
4739
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004740void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
4741 uint32_t queueFamilyIndex,
4742 uint32_t *pCounterCount,
4743 VkPerformanceCounterKHR *pCounters) {
4744 if (NULL == pCounters) return;
4745
4746 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4747 assert(physical_device_state);
4748
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004749 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queue_family_counters(new QUEUE_FAMILY_PERF_COUNTERS());
4750 queue_family_counters->counters.resize(*pCounterCount);
4751 for (uint32_t i = 0; i < *pCounterCount; i++) queue_family_counters->counters[i] = pCounters[i];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004752
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004753 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queue_family_counters);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004754}
4755
4756void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
4757 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
4758 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
4759 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4760 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
4761}
4762
4763void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
4764 VkResult result) {
4765 if (result == VK_SUCCESS) performance_lock_acquired = true;
4766}
4767
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004768void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
4769 performance_lock_acquired = false;
4770 for (auto &cmd_buffer : commandBufferMap) {
4771 cmd_buffer.second->performance_lock_released = true;
4772 }
4773}
4774
locke-lunargd556cc32019-09-17 01:21:23 -06004775void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004776 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004777 const VkAllocationCallbacks *pAllocator) {
4778 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004779 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4780 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004781 desc_template_map.erase(descriptorUpdateTemplate);
4782}
4783
4784void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004785 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004786 const VkAllocationCallbacks *pAllocator) {
4787 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004788 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4789 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004790 desc_template_map.erase(descriptorUpdateTemplate);
4791}
4792
Mike Schuchardt2df08912020-12-15 16:28:09 -08004793void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
4794 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate) {
locke-lunargd556cc32019-09-17 01:21:23 -06004795 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004796 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004797 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
4798}
4799
Mike Schuchardt2df08912020-12-15 16:28:09 -08004800void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(VkDevice device,
4801 const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
4802 const VkAllocationCallbacks *pAllocator,
4803 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate,
4804 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004805 if (VK_SUCCESS != result) return;
4806 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4807}
4808
4809void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004810 VkDevice device, const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4811 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004812 if (VK_SUCCESS != result) return;
4813 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4814}
4815
4816void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004817 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004818 const void *pData) {
4819 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
4820 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
4821 assert(0);
4822 } else {
4823 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
4824 // TODO: Record template push descriptor updates
4825 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
4826 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
4827 }
4828 }
4829}
4830
4831void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
4832 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4833 const void *pData) {
4834 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4835}
4836
4837void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004838 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004839 const void *pData) {
4840 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4841}
4842
Mike Schuchardt2df08912020-12-15 16:28:09 -08004843void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
4844 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4845 VkPipelineLayout layout, uint32_t set,
4846 const void *pData) {
locke-lunargd556cc32019-09-17 01:21:23 -06004847 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4848
4849 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4850 if (template_state) {
4851 auto layout_data = GetPipelineLayout(layout);
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -06004852 auto dsl = layout_data ? layout_data->GetDsl(set) : nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004853 const auto &template_ci = template_state->create_info;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004854 if (dsl && !dsl->Destroyed()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004855 // Decode the template into a set of write updates
4856 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
4857 dsl->GetDescriptorSetLayout());
4858 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
4859 static_cast<uint32_t>(decoded_template.desc_writes.size()),
4860 decoded_template.desc_writes.data());
4861 }
4862 }
4863}
4864
4865void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
4866 uint32_t *pPropertyCount, void *pProperties) {
4867 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4868 if (*pPropertyCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06004869 physical_device_state->display_plane_property_count = *pPropertyCount;
4870 }
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004871 if (*pPropertyCount || pProperties) {
4872 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004873 }
4874}
4875
4876void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
4877 uint32_t *pPropertyCount,
4878 VkDisplayPlanePropertiesKHR *pProperties,
4879 VkResult result) {
4880 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4881 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4882}
4883
4884void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
4885 uint32_t *pPropertyCount,
4886 VkDisplayPlaneProperties2KHR *pProperties,
4887 VkResult result) {
4888 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4889 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4890}
4891
4892void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4893 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
4894 QueryObject query_obj = {queryPool, query, index};
4895 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4896 RecordCmdBeginQuery(cb_state, query_obj);
4897}
4898
4899void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4900 uint32_t query, uint32_t index) {
4901 QueryObject query_obj = {queryPool, query, index};
4902 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4903 RecordCmdEndQuery(cb_state, query_obj);
4904}
4905
4906void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
4907 VkSamplerYcbcrConversion ycbcr_conversion) {
Jeremy Gebbenf4fb2a02021-07-08 09:57:46 -06004908 VkFormatFeatureFlags format_features = 0;
4909
4910 if (create_info->format != VK_FORMAT_UNDEFINED) {
4911 format_features = GetPotentialFormatFeatures(create_info->format);
4912 } else if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4913 // If format is VK_FORMAT_UNDEFINED, format_features will be set by external AHB features
4914 format_features = GetExternalFormatFeaturesANDROID(create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004915 }
Jeremy Gebbenf4fb2a02021-07-08 09:57:46 -06004916
4917 samplerYcbcrConversionMap[ycbcr_conversion] =
4918 std::make_shared<SAMPLER_YCBCR_CONVERSION_STATE>(ycbcr_conversion, create_info, format_features);
locke-lunargd556cc32019-09-17 01:21:23 -06004919}
4920
4921void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
4922 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4923 const VkAllocationCallbacks *pAllocator,
4924 VkSamplerYcbcrConversion *pYcbcrConversion,
4925 VkResult result) {
4926 if (VK_SUCCESS != result) return;
4927 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4928}
4929
4930void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
4931 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4932 const VkAllocationCallbacks *pAllocator,
4933 VkSamplerYcbcrConversion *pYcbcrConversion,
4934 VkResult result) {
4935 if (VK_SUCCESS != result) return;
4936 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4937}
4938
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004939void ValidationStateTracker::RecordDestroySamplerYcbcrConversionState(VkSamplerYcbcrConversion ycbcr_conversion) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004940 auto ycbcr_state = GetSamplerYcbcrConversionState(ycbcr_conversion);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004941 ycbcr_state->Destroy();
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004942 samplerYcbcrConversionMap.erase(ycbcr_conversion);
4943}
4944
locke-lunargd556cc32019-09-17 01:21:23 -06004945void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
4946 const VkAllocationCallbacks *pAllocator) {
4947 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004948 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06004949}
4950
4951void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
4952 VkSamplerYcbcrConversion ycbcrConversion,
4953 const VkAllocationCallbacks *pAllocator) {
4954 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004955 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06004956}
4957
Tony-LunarG977448c2019-12-02 14:52:02 -07004958void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4959 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06004960 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07004961 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004962
4963 // Do nothing if the query pool has been destroyed.
4964 auto query_pool_state = GetQueryPoolState(queryPool);
4965 if (!query_pool_state) return;
4966
4967 // Reset the state of existing entries.
4968 QueryObject query_obj{queryPool, 0};
4969 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
4970 for (uint32_t i = 0; i < max_query_count; ++i) {
4971 query_obj.query = firstQuery + i;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004972 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004973 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004974 for (uint32_t pass_index = 0; pass_index < query_pool_state->n_performance_passes; pass_index++) {
4975 query_obj.perf_pass = pass_index;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004976 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004977 }
4978 }
locke-lunargd556cc32019-09-17 01:21:23 -06004979 }
4980}
4981
Tony-LunarG977448c2019-12-02 14:52:02 -07004982void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4983 uint32_t queryCount) {
4984 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
4985}
4986
4987void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4988 uint32_t queryCount) {
4989 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
4990}
4991
locke-lunargd556cc32019-09-17 01:21:23 -06004992void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
4993 const TEMPLATE_STATE *template_state, const void *pData) {
4994 // Translate the templated update into a normal update for validation...
4995 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
4996 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
4997 decoded_update.desc_writes.data(), 0, NULL);
4998}
4999
5000// Update the common AllocateDescriptorSetsData
5001void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005002 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005003 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05005004 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06005005 if (layout) {
5006 ds_data->layout_nodes[i] = layout;
5007 // Count total descriptors required per type
5008 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
5009 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005010 uint32_t type_index = static_cast<uint32_t>(binding_layout->descriptorType);
5011 ds_data->required_descriptors_by_type[type_index] += binding_layout->descriptorCount;
locke-lunargd556cc32019-09-17 01:21:23 -06005012 }
5013 }
5014 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
5015 }
5016}
5017
5018// Decrement allocated sets from the pool and insert new sets into set_map
5019void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
5020 const VkDescriptorSet *descriptor_sets,
5021 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
5022 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
5023 // Account for sets and individual descriptors allocated from pool
5024 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
5025 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
5026 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
5027 }
5028
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07005029 const auto *variable_count_info = LvlFindInChain<VkDescriptorSetVariableDescriptorCountAllocateInfo>(p_alloc_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06005030 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
5031
5032 // Create tracking object for each descriptor set; insert into global map and the pool's set.
5033 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
5034 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
5035
Jeff Bolz41a1ced2019-10-11 11:40:49 -05005036 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07005037 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06005038 pool_state->sets.insert(new_ds.get());
locke-lunargd556cc32019-09-17 01:21:23 -06005039 setMap[descriptor_sets[i]] = std::move(new_ds);
5040 }
5041}
5042
5043// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
Jeremy Kniager05631e72020-06-08 14:21:35 -06005044void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type,
locke-lunarg540b2252020-08-03 13:23:36 -06005045 VkPipelineBindPoint bind_point, const char *function) {
5046 UpdateDrawState(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005047 cb_state->hasDispatchCmd = true;
5048}
5049
locke-lunargd556cc32019-09-17 01:21:23 -06005050// Generic function to handle state update for all CmdDraw* type functions
locke-lunarg540b2252020-08-03 13:23:36 -06005051void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, VkPipelineBindPoint bind_point,
5052 const char *function) {
5053 UpdateStateCmdDrawDispatchType(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005054 cb_state->hasDrawCmd = true;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005055
5056 // Update the consumed viewport/scissor count.
5057 uint32_t& used = cb_state->usedViewportScissorCount;
5058 used = std::max(used, cb_state->pipelineStaticViewportCount);
5059 used = std::max(used, cb_state->pipelineStaticScissorCount);
5060 cb_state->usedDynamicViewportCount |= !!(cb_state->dynamic_status & CBSTATUS_VIEWPORT_WITH_COUNT_SET); // !! silences MSVC warn
5061 cb_state->usedDynamicScissorCount |= !!(cb_state->dynamic_status & CBSTATUS_SCISSOR_WITH_COUNT_SET);
locke-lunargd556cc32019-09-17 01:21:23 -06005062}
5063
5064void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
5065 uint32_t firstVertex, uint32_t firstInstance) {
5066 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005067 UpdateStateCmdDrawType(cb_state, CMD_DRAW, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDraw()");
locke-lunargd556cc32019-09-17 01:21:23 -06005068}
5069
Tony-LunarG745150c2021-07-02 15:07:31 -06005070void ValidationStateTracker::PostCallRecordCmdDrawMultiEXT(VkCommandBuffer commandBuffer, uint32_t drawCount,
5071 const VkMultiDrawInfoEXT *pVertexInfo, uint32_t instanceCount,
5072 uint32_t firstInstance, uint32_t stride) {
5073 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5074 UpdateStateCmdDrawType(cb_state, CMD_DRAWMULTIEXT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawMultiEXT()");
5075}
5076
locke-lunargd556cc32019-09-17 01:21:23 -06005077void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
5078 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
5079 uint32_t firstInstance) {
5080 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005081 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXED, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexed()");
locke-lunargd556cc32019-09-17 01:21:23 -06005082}
5083
Tony-LunarG745150c2021-07-02 15:07:31 -06005084void ValidationStateTracker::PostCallRecordCmdDrawMultiIndexedEXT(VkCommandBuffer commandBuffer, uint32_t drawCount,
5085 const VkMultiDrawIndexedInfoEXT *pIndexInfo,
5086 uint32_t instanceCount, uint32_t firstInstance, uint32_t stride,
5087 const int32_t *pVertexOffset) {
5088 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5089 UpdateStateCmdDrawType(cb_state, CMD_DRAWMULTIINDEXEDEXT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawMultiIndexedEXT()");
5090}
5091
locke-lunargd556cc32019-09-17 01:21:23 -06005092void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5093 uint32_t count, uint32_t stride) {
5094 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5095 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005096 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndirect()");
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005097 if (!disabled[command_buffer_state]) {
5098 cb_state->AddChild(buffer_state);
5099 }
locke-lunargd556cc32019-09-17 01:21:23 -06005100}
5101
5102void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5103 VkDeviceSize offset, uint32_t count, uint32_t stride) {
5104 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5105 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005106 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexedIndirect()");
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005107 if (!disabled[command_buffer_state]) {
5108 cb_state->AddChild(buffer_state);
5109 }
locke-lunargd556cc32019-09-17 01:21:23 -06005110}
5111
5112void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
5113 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005114 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCH, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatch()");
locke-lunargd556cc32019-09-17 01:21:23 -06005115}
5116
5117void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5118 VkDeviceSize offset) {
5119 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005120 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCHINDIRECT, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatchIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06005121 BUFFER_STATE *buffer_state = GetBufferState(buffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005122 if (!disabled[command_buffer_state]) {
5123 cb_state->AddChild(buffer_state);
5124 }
locke-lunargd556cc32019-09-17 01:21:23 -06005125}
5126
Tony-LunarG977448c2019-12-02 14:52:02 -07005127void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5128 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
locke-lunarg540b2252020-08-03 13:23:36 -06005129 uint32_t stride, const char *function) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005130 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5131 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5132 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005133 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005134 if (!disabled[command_buffer_state]) {
5135 cb_state->AddChild(buffer_state);
5136 cb_state->AddChild(count_buffer_state);
5137 }
Tony-LunarG977448c2019-12-02 14:52:02 -07005138}
5139
locke-lunargd556cc32019-09-17 01:21:23 -06005140void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5141 VkDeviceSize offset, VkBuffer countBuffer,
5142 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5143 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005144 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5145 "vkCmdDrawIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005146}
5147
5148void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5149 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5150 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005151 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5152 "vkCmdDrawIndirectCount()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005153}
5154
5155void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5156 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
locke-lunarg540b2252020-08-03 13:23:36 -06005157 uint32_t maxDrawCount, uint32_t stride, const char *function) {
locke-lunargd556cc32019-09-17 01:21:23 -06005158 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5159 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5160 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005161 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005162 if (!disabled[command_buffer_state]) {
5163 cb_state->AddChild(buffer_state);
5164 cb_state->AddChild(count_buffer_state);
5165 }
locke-lunargd556cc32019-09-17 01:21:23 -06005166}
5167
5168void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5169 VkDeviceSize offset, VkBuffer countBuffer,
5170 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5171 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005172 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5173 "vkCmdDrawIndexedIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005174}
5175
5176void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
5177 VkDeviceSize offset, VkBuffer countBuffer,
5178 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5179 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005180 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5181 "vkCmdDrawIndexedIndirectCount()");
locke-lunargd556cc32019-09-17 01:21:23 -06005182}
5183
5184void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
5185 uint32_t firstTask) {
5186 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005187 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSNV, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawMeshTasksNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005188}
5189
5190void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5191 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
5192 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005193 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
5194 "vkCmdDrawMeshTasksIndirectNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005195 BUFFER_STATE *buffer_state = GetBufferState(buffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005196 if (!disabled[command_buffer_state] && buffer_state) {
5197 cb_state->AddChild(buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -06005198 }
5199}
5200
5201void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5202 VkDeviceSize offset, VkBuffer countBuffer,
5203 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5204 uint32_t stride) {
5205 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5206 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5207 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005208 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTCOUNTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
5209 "vkCmdDrawMeshTasksIndirectCountNV()");
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005210 if (!disabled[command_buffer_state]) {
5211 if (buffer_state) {
5212 cb_state->AddChild(buffer_state);
5213 }
5214 if (count_buffer_state) {
5215 cb_state->AddChild(count_buffer_state);
5216 }
locke-lunargd556cc32019-09-17 01:21:23 -06005217 }
5218}
5219
Jeremy Gebben252f60c2021-07-15 14:54:30 -06005220void ValidationStateTracker::PostCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer,
5221 VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer,
5222 VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride,
5223 VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset,
5224 VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer,
5225 VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride,
5226 uint32_t width, uint32_t height, uint32_t depth) {
5227 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5228 UpdateStateCmdDrawDispatchType(cb_state, CMD_TRACERAYSNV, VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, "vkCmdTraceRaysNV()");
5229 cb_state->hasTraceRaysCmd = true;
5230}
5231
5232
5233void ValidationStateTracker::PostCallRecordCmdTraceRaysKHR(VkCommandBuffer commandBuffer,
5234 const VkStridedDeviceAddressRegionKHR *pRaygenShaderBindingTable,
5235 const VkStridedDeviceAddressRegionKHR *pMissShaderBindingTable,
5236 const VkStridedDeviceAddressRegionKHR *pHitShaderBindingTable,
5237 const VkStridedDeviceAddressRegionKHR *pCallableShaderBindingTable, uint32_t width,
5238 uint32_t height, uint32_t depth) {
5239 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5240 UpdateStateCmdDrawDispatchType(cb_state, CMD_TRACERAYSKHR, VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, "vkCmdTraceRaysKHR()");
5241 cb_state->hasTraceRaysCmd = true;
5242}
5243
5244void ValidationStateTracker::PostCallRecordCmdTraceRaysIndirectKHR(VkCommandBuffer commandBuffer,
5245 const VkStridedDeviceAddressRegionKHR *pRaygenShaderBindingTable,
5246 const VkStridedDeviceAddressRegionKHR *pMissShaderBindingTable,
5247 const VkStridedDeviceAddressRegionKHR *pHitShaderBindingTable,
5248 const VkStridedDeviceAddressRegionKHR *pCallableShaderBindingTable,
5249 VkDeviceAddress indirectDeviceAddress) {
5250 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5251 UpdateStateCmdDrawDispatchType(cb_state, CMD_TRACERAYSINDIRECTKHR, VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR,
5252 "vkCmdTraceRaysIndirectKHR()");
5253 cb_state->hasTraceRaysCmd = true;
5254}
5255
locke-lunargd556cc32019-09-17 01:21:23 -06005256void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
5257 const VkAllocationCallbacks *pAllocator,
5258 VkShaderModule *pShaderModule, VkResult result,
5259 void *csm_state_data) {
5260 if (VK_SUCCESS != result) return;
5261 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
5262
Tony-LunarG8a51b7d2020-07-01 15:57:23 -06005263 spv_target_env spirv_environment = PickSpirvEnv(api_version, (device_extensions.vk_khr_spirv_1_4 != kNotEnabled));
locke-lunargd556cc32019-09-17 01:21:23 -06005264 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005265 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
5266 csm_state->unique_shader_id)
5267 : std::make_shared<SHADER_MODULE_STATE>();
sfricke-samsung962cad92021-04-13 00:46:29 -07005268 new_shader_module->SetPushConstantUsedInShader();
locke-lunargd556cc32019-09-17 01:21:23 -06005269 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
5270}
5271
5272void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06005273 PipelineStageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005274 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
locke-lunargde3f0fa2020-09-10 11:55:31 -06005275 stage_state->entry_point_name = pStage->pName;
5276 stage_state->shader_state = GetShared<SHADER_MODULE_STATE>(pStage->module);
5277 auto module = stage_state->shader_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -06005278 if (!module->has_valid_spirv) return;
5279
5280 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
sfricke-samsung962cad92021-04-13 00:46:29 -07005281 auto entrypoint = module->FindEntrypoint(pStage->pName, pStage->stage);
locke-lunargd556cc32019-09-17 01:21:23 -06005282 if (entrypoint == module->end()) return;
5283
locke-lunarg654e3692020-06-04 17:19:15 -06005284 stage_state->stage_flag = pStage->stage;
5285
locke-lunargd556cc32019-09-17 01:21:23 -06005286 // Mark accessible ids
sfricke-samsung962cad92021-04-13 00:46:29 -07005287 stage_state->accessible_ids = module->MarkAccessibleIds(entrypoint);
5288 module->ProcessExecutionModes(entrypoint, pipeline);
locke-lunargd556cc32019-09-17 01:21:23 -06005289
sfricke-samsung962cad92021-04-13 00:46:29 -07005290 stage_state->descriptor_uses = module->CollectInterfaceByDescriptorSlot(
5291 stage_state->accessible_ids, &stage_state->has_writable_descriptor, &stage_state->has_atomic_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06005292 // Capture descriptor uses for the pipeline
locke-lunarg36045992020-08-20 16:54:37 -06005293 for (const auto &use : stage_state->descriptor_uses) {
locke-lunargd556cc32019-09-17 01:21:23 -06005294 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06005295 const uint32_t slot = use.first.first;
locke-lunarg351c9d82020-10-23 14:43:21 -06005296 pipeline->active_slots[slot][use.first.second].is_writable |= use.second.is_writable;
locke-lunarg36045992020-08-20 16:54:37 -06005297 auto &reqs = pipeline->active_slots[slot][use.first.second].reqs;
sfricke-samsung962cad92021-04-13 00:46:29 -07005298 reqs = descriptor_req(reqs | module->DescriptorTypeToReqs(use.second.type_id));
locke-lunarg25b6c352020-08-06 17:44:18 -06005299 if (use.second.is_atomic_operation) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_VIEW_ATOMIC_OPERATION);
locke-lunarg12d20992020-09-21 12:46:49 -06005300 if (use.second.is_sampler_implicitLod_dref_proj) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_IMPLICITLOD_DREF_PROJ);
locke-lunargae2a43c2020-09-22 17:21:57 -06005301 if (use.second.is_sampler_bias_offset) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_BIAS_OFFSET);
locke-lunarg12d20992020-09-21 12:46:49 -06005302
John Zulauf649edd52019-10-02 14:39:41 -06005303 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunarg36045992020-08-20 16:54:37 -06005304 if (use.second.samplers_used_by_image.size()) {
locke-lunarg654a9052020-10-13 16:28:42 -06005305 auto &samplers_used_by_image = pipeline->active_slots[slot][use.first.second].samplers_used_by_image;
5306 if (use.second.samplers_used_by_image.size() > samplers_used_by_image.size()) {
5307 samplers_used_by_image.resize(use.second.samplers_used_by_image.size());
5308 }
locke-lunarg654a9052020-10-13 16:28:42 -06005309 uint32_t image_index = 0;
5310 for (const auto &samplers : use.second.samplers_used_by_image) {
5311 for (const auto &sampler : samplers) {
locke-lunargb8be8222020-10-20 00:34:37 -06005312 samplers_used_by_image[image_index].emplace(sampler, nullptr);
locke-lunarg654a9052020-10-13 16:28:42 -06005313 }
5314 ++image_index;
5315 }
locke-lunarg36045992020-08-20 16:54:37 -06005316 }
locke-lunargd556cc32019-09-17 01:21:23 -06005317 }
locke-lunarg78486832020-09-09 19:39:42 -06005318
locke-lunarg96dc9632020-06-10 17:22:18 -06005319 if (pStage->stage == VK_SHADER_STAGE_FRAGMENT_BIT) {
sfricke-samsung962cad92021-04-13 00:46:29 -07005320 pipeline->fragmentShader_writable_output_location_list = module->CollectWritableOutputLocationinFS(*pStage);
locke-lunarg96dc9632020-06-10 17:22:18 -06005321 }
locke-lunargd556cc32019-09-17 01:21:23 -06005322}
5323
sfricke-samsung70ad9ce2021-04-04 00:53:54 -07005324// Discussed in details in https://github.com/KhronosGroup/Vulkan-Docs/issues/1081
5325// Internal discussion and CTS were written to prove that this is not called after an incompatible vkCmdBindPipeline
5326// "Binding a pipeline with a layout that is not compatible with the push constant layout does not disturb the push constant values"
5327//
5328// vkCmdBindDescriptorSet has nothing to do with push constants and don't need to call this after neither
5329//
5330// Part of this assumes apps at draw/dispath/traceRays/etc time will have it properly compatabile or else other VU will be triggered
locke-lunargd556cc32019-09-17 01:21:23 -06005331void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
5332 if (cb_state == nullptr) {
5333 return;
5334 }
5335
5336 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
5337 if (pipeline_layout_state == nullptr) {
5338 return;
5339 }
5340
5341 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
5342 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
5343 cb_state->push_constant_data.clear();
locke-lunargde3f0fa2020-09-10 11:55:31 -06005344 cb_state->push_constant_data_update.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06005345 uint32_t size_needed = 0;
John Zulauf79f06582021-02-27 18:38:39 -07005346 for (const auto &push_constant_range : *cb_state->push_constant_data_ranges) {
locke-lunargde3f0fa2020-09-10 11:55:31 -06005347 auto size = push_constant_range.offset + push_constant_range.size;
5348 size_needed = std::max(size_needed, size);
5349
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005350 auto stage_flags = push_constant_range.stageFlags;
locke-lunargde3f0fa2020-09-10 11:55:31 -06005351 uint32_t bit_shift = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005352 while (stage_flags) {
5353 if (stage_flags & 1) {
locke-lunargde3f0fa2020-09-10 11:55:31 -06005354 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
5355 const auto it = cb_state->push_constant_data_update.find(flag);
5356
5357 if (it != cb_state->push_constant_data_update.end()) {
5358 if (it->second.size() < push_constant_range.offset) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06005359 it->second.resize(push_constant_range.offset, PC_Byte_Not_Set);
locke-lunargde3f0fa2020-09-10 11:55:31 -06005360 }
5361 if (it->second.size() < size) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06005362 it->second.resize(size, PC_Byte_Not_Updated);
locke-lunargde3f0fa2020-09-10 11:55:31 -06005363 }
5364 } else {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06005365 std::vector<uint8_t> bytes;
5366 bytes.resize(push_constant_range.offset, PC_Byte_Not_Set);
5367 bytes.resize(size, PC_Byte_Not_Updated);
locke-lunargde3f0fa2020-09-10 11:55:31 -06005368 cb_state->push_constant_data_update[flag] = bytes;
5369 }
5370 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005371 stage_flags = stage_flags >> 1;
locke-lunargde3f0fa2020-09-10 11:55:31 -06005372 ++bit_shift;
5373 }
locke-lunargd556cc32019-09-17 01:21:23 -06005374 }
5375 cb_state->push_constant_data.resize(size_needed, 0);
5376 }
5377}
John Zulauf22b0fbe2019-10-15 06:26:16 -06005378
5379void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
5380 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
5381 VkResult result) {
5382 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
5383 auto swapchain_state = GetSwapchainState(swapchain);
5384
5385 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
5386
5387 if (pSwapchainImages) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005388 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
John Zulauf29d00532021-03-04 13:28:54 -07005389 SWAPCHAIN_IMAGE &swapchain_image = swapchain_state->images[i];
John Zulauffaa7a522021-03-05 12:22:45 -07005390 if (swapchain_image.image_state) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06005391
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06005392 auto format_features =
5393 GetImageFormatFeatures(physical_device, device, pSwapchainImages[i], swapchain_state->image_create_info.format,
5394 swapchain_state->image_create_info.tiling);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005395
Jeremy Gebbenbcba6d32021-07-16 11:41:41 -06005396 auto image_state = std::make_shared<IMAGE_STATE>(device, pSwapchainImages[i], swapchain_state->image_create_info.ptr(),
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06005397 swapchain, i, format_features);
John Zulauf29d00532021-03-04 13:28:54 -07005398
5399 if (swapchain_image.bound_images.empty()) {
5400 // First time "bind" allocates
5401 image_state->swapchain_fake_address = fake_memory.Alloc(image_state->fragment_encoder->TotalSize());
5402 } else {
5403 // All others reuse
Jeremy Gebben8ee02af2021-07-16 10:15:55 -06005404 image_state->swapchain_fake_address = (*swapchain_image.bound_images.cbegin()).second->swapchain_fake_address;
John Zulauf29d00532021-03-04 13:28:54 -07005405 // Since there are others, need to update the aliasing information
Jeremy Gebben8ee02af2021-07-16 10:15:55 -06005406 for (auto& entry: swapchain_image.bound_images) {
5407 image_state->AddAliasingImage(entry.second.get());
Jeremy Gebben6fbf8242021-06-21 09:14:46 -06005408 }
John Zulauf29d00532021-03-04 13:28:54 -07005409 }
5410
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06005411 swapchain_image.image_state = image_state.get();
Jeremy Gebben8ee02af2021-07-16 10:15:55 -06005412 swapchain_image.bound_images.emplace(pSwapchainImages[i], image_state);
Petr Kraus44f1c482020-04-25 20:09:25 +02005413
Jeremy Gebbenb4d17012021-07-08 13:18:15 -06005414 image_state->AddParent(swapchain_state);
5415 imageMap[pSwapchainImages[i]] = std::move(image_state);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005416 }
5417 }
5418
5419 if (*pSwapchainImageCount) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005420 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
5421 }
5422}
sourav parmar35e7a002020-06-09 17:58:44 -07005423
sourav parmar35e7a002020-06-09 17:58:44 -07005424void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer,
5425 const VkCopyAccelerationStructureInfoKHR *pInfo) {
5426 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5427 if (cb_state) {
sourav parmarcd5fb182020-07-17 12:58:44 -07005428 ACCELERATION_STRUCTURE_STATE_KHR *src_as_state = GetAccelerationStructureStateKHR(pInfo->src);
5429 ACCELERATION_STRUCTURE_STATE_KHR *dst_as_state = GetAccelerationStructureStateKHR(pInfo->dst);
sourav parmar35e7a002020-06-09 17:58:44 -07005430 if (dst_as_state != nullptr && src_as_state != nullptr) {
5431 dst_as_state->built = true;
5432 dst_as_state->build_info_khr = src_as_state->build_info_khr;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005433 if (!disabled[command_buffer_state]) {
5434 cb_state->AddChild(dst_as_state);
5435 cb_state->AddChild(src_as_state);
5436 }
sourav parmar35e7a002020-06-09 17:58:44 -07005437 }
5438 }
5439}
Piers Daniell39842ee2020-07-10 16:42:33 -06005440
5441void ValidationStateTracker::PreCallRecordCmdSetCullModeEXT(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) {
5442 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5443 cb_state->status |= CBSTATUS_CULL_MODE_SET;
5444 cb_state->static_status &= ~CBSTATUS_CULL_MODE_SET;
5445}
5446
5447void ValidationStateTracker::PreCallRecordCmdSetFrontFaceEXT(VkCommandBuffer commandBuffer, VkFrontFace frontFace) {
5448 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5449 cb_state->status |= CBSTATUS_FRONT_FACE_SET;
5450 cb_state->static_status &= ~CBSTATUS_FRONT_FACE_SET;
5451}
5452
5453void ValidationStateTracker::PreCallRecordCmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer,
5454 VkPrimitiveTopology primitiveTopology) {
5455 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5456 cb_state->primitiveTopology = primitiveTopology;
5457 cb_state->status |= CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5458 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5459}
5460
5461void ValidationStateTracker::PreCallRecordCmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer, uint32_t viewportCount,
5462 const VkViewport *pViewports) {
5463 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07005464 uint32_t bits = (1u << viewportCount) - 1u;
5465 cb_state->viewportWithCountMask |= bits;
5466 cb_state->trashedViewportMask &= ~bits;
Tobias Hector6663c9b2020-11-05 10:18:02 +00005467 cb_state->viewportWithCountCount = viewportCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005468 cb_state->trashedViewportCount = false;
Piers Daniell39842ee2020-07-10 16:42:33 -06005469 cb_state->status |= CBSTATUS_VIEWPORT_WITH_COUNT_SET;
5470 cb_state->static_status &= ~CBSTATUS_VIEWPORT_WITH_COUNT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005471
5472 cb_state->dynamicViewports.resize(std::max(size_t(viewportCount), cb_state->dynamicViewports.size()));
5473 for (size_t i = 0; i < viewportCount; ++i) {
5474 cb_state->dynamicViewports[i] = pViewports[i];
5475 }
Piers Daniell39842ee2020-07-10 16:42:33 -06005476}
5477
5478void ValidationStateTracker::PreCallRecordCmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer, uint32_t scissorCount,
5479 const VkRect2D *pScissors) {
5480 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07005481 uint32_t bits = (1u << scissorCount) - 1u;
5482 cb_state->scissorWithCountMask |= bits;
5483 cb_state->trashedScissorMask &= ~bits;
5484 cb_state->scissorWithCountCount = scissorCount;
5485 cb_state->trashedScissorCount = false;
Piers Daniell39842ee2020-07-10 16:42:33 -06005486 cb_state->status |= CBSTATUS_SCISSOR_WITH_COUNT_SET;
5487 cb_state->static_status &= ~CBSTATUS_SCISSOR_WITH_COUNT_SET;
5488}
5489
5490void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer, uint32_t firstBinding,
5491 uint32_t bindingCount, const VkBuffer *pBuffers,
5492 const VkDeviceSize *pOffsets, const VkDeviceSize *pSizes,
5493 const VkDeviceSize *pStrides) {
5494 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5495 if (pStrides) {
5496 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5497 cb_state->static_status &= ~CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5498 }
5499
5500 uint32_t end = firstBinding + bindingCount;
5501 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
5502 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
5503 }
5504
5505 for (uint32_t i = 0; i < bindingCount; ++i) {
5506 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07005507 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
Piers Daniell39842ee2020-07-10 16:42:33 -06005508 vertex_buffer_binding.offset = pOffsets[i];
5509 vertex_buffer_binding.size = (pSizes) ? pSizes[i] : VK_WHOLE_SIZE;
5510 vertex_buffer_binding.stride = (pStrides) ? pStrides[i] : 0;
5511 // Add binding for this vertex buffer to this commandbuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005512 if (!disabled[command_buffer_state] && pBuffers[i]) {
5513 cb_state->AddChild(vertex_buffer_binding.buffer_state.get());
Piers Daniell39842ee2020-07-10 16:42:33 -06005514 }
5515 }
5516}
5517
5518void ValidationStateTracker::PreCallRecordCmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) {
5519 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5520 cb_state->status |= CBSTATUS_DEPTH_TEST_ENABLE_SET;
5521 cb_state->static_status &= ~CBSTATUS_DEPTH_TEST_ENABLE_SET;
5522}
5523
5524void ValidationStateTracker::PreCallRecordCmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) {
5525 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5526 cb_state->status |= CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5527 cb_state->static_status &= ~CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5528}
5529
5530void ValidationStateTracker::PreCallRecordCmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) {
5531 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5532 cb_state->status |= CBSTATUS_DEPTH_COMPARE_OP_SET;
5533 cb_state->static_status &= ~CBSTATUS_DEPTH_COMPARE_OP_SET;
5534}
5535
5536void ValidationStateTracker::PreCallRecordCmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer,
5537 VkBool32 depthBoundsTestEnable) {
5538 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5539 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5540 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5541}
5542void ValidationStateTracker::PreCallRecordCmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) {
5543 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5544 cb_state->status |= CBSTATUS_STENCIL_TEST_ENABLE_SET;
5545 cb_state->static_status &= ~CBSTATUS_STENCIL_TEST_ENABLE_SET;
5546}
5547
5548void ValidationStateTracker::PreCallRecordCmdSetStencilOpEXT(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
5549 VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp,
5550 VkCompareOp compareOp) {
5551 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5552 cb_state->status |= CBSTATUS_STENCIL_OP_SET;
5553 cb_state->static_status &= ~CBSTATUS_STENCIL_OP_SET;
5554}
locke-lunarg4189aa22020-10-21 00:23:48 -06005555
5556void ValidationStateTracker::PreCallRecordCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle,
5557 uint32_t discardRectangleCount,
5558 const VkRect2D *pDiscardRectangles) {
5559 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5560 cb_state->status |= CBSTATUS_DISCARD_RECTANGLE_SET;
5561 cb_state->static_status &= ~CBSTATUS_DISCARD_RECTANGLE_SET;
5562}
5563
5564void ValidationStateTracker::PreCallRecordCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,
5565 const VkSampleLocationsInfoEXT *pSampleLocationsInfo) {
5566 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5567 cb_state->status |= CBSTATUS_SAMPLE_LOCATIONS_SET;
5568 cb_state->static_status &= ~CBSTATUS_SAMPLE_LOCATIONS_SET;
5569}
5570
5571void ValidationStateTracker::PreCallRecordCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer,
5572 VkCoarseSampleOrderTypeNV sampleOrderType,
5573 uint32_t customSampleOrderCount,
5574 const VkCoarseSampleOrderCustomNV *pCustomSampleOrders) {
5575 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5576 cb_state->status |= CBSTATUS_COARSE_SAMPLE_ORDER_SET;
5577 cb_state->static_status &= ~CBSTATUS_COARSE_SAMPLE_ORDER_SET;
5578}
Vikram Kushwahaa57b0c32021-04-19 12:21:46 -07005579
5580void ValidationStateTracker::PreCallRecordCmdSetPatchControlPointsEXT(VkCommandBuffer commandBuffer, uint32_t patchControlPoints) {
5581 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5582 cb_state->status |= CBSTATUS_PATCH_CONTROL_POINTS_SET;
5583 cb_state->static_status &= ~CBSTATUS_PATCH_CONTROL_POINTS_SET;
5584}
5585
5586void ValidationStateTracker::PreCallRecordCmdSetLogicOpEXT(VkCommandBuffer commandBuffer, VkLogicOp logicOp) {
5587 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5588 cb_state->status |= CBSTATUS_LOGIC_OP_SET;
5589 cb_state->static_status &= ~CBSTATUS_LOGIC_OP_SET;
5590}
5591
5592void ValidationStateTracker::PreCallRecordCmdSetRasterizerDiscardEnableEXT(VkCommandBuffer commandBuffer,
5593 VkBool32 rasterizerDiscardEnable) {
5594 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5595 cb_state->status |= CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET;
5596 cb_state->static_status &= ~CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET;
5597}
5598
5599void ValidationStateTracker::PreCallRecordCmdSetDepthBiasEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable) {
5600 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5601 cb_state->status |= CBSTATUS_DEPTH_BIAS_ENABLE_SET;
5602 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_ENABLE_SET;
5603}
5604
5605void ValidationStateTracker::PreCallRecordCmdSetPrimitiveRestartEnableEXT(VkCommandBuffer commandBuffer,
5606 VkBool32 primitiveRestartEnable) {
5607 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5608 cb_state->status |= CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET;
5609 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005610}
Piers Daniell924cd832021-05-18 13:48:47 -06005611
5612void ValidationStateTracker::PreCallRecordCmdSetVertexInputEXT(
5613 VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount,
5614 const VkVertexInputBindingDescription2EXT *pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount,
5615 const VkVertexInputAttributeDescription2EXT *pVertexAttributeDescriptions) {
5616 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5617 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET | CBSTATUS_VERTEX_INPUT_SET;
5618 cb_state->static_status &= ~(CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET | CBSTATUS_VERTEX_INPUT_SET);
5619}
Nathaniel Cesario42ac6ca2021-06-15 17:23:05 -06005620
5621void ValidationStateTracker::RecordGetBufferDeviceAddress(const VkBufferDeviceAddressInfo *pInfo, VkDeviceAddress address) {
5622 BUFFER_STATE *buffer_state = GetBufferState(pInfo->buffer);
5623 if (buffer_state) {
5624 // address is used for GPU-AV and ray tracing buffer validation
5625 buffer_state->deviceAddress = address;
5626 buffer_address_map_.emplace(address, buffer_state);
5627 }
5628}
5629
5630void ValidationStateTracker::PostCallRecordGetBufferDeviceAddress(VkDevice device, const VkBufferDeviceAddressInfo *pInfo,
5631 VkDeviceAddress address) {
5632 RecordGetBufferDeviceAddress(pInfo, address);
5633}
5634
5635void ValidationStateTracker::PostCallRecordGetBufferDeviceAddressKHR(VkDevice device, const VkBufferDeviceAddressInfo *pInfo,
5636 VkDeviceAddress address) {
5637 RecordGetBufferDeviceAddress(pInfo, address);
5638}
5639
5640void ValidationStateTracker::PostCallRecordGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfo *pInfo,
5641 VkDeviceAddress address) {
5642 RecordGetBufferDeviceAddress(pInfo, address);
Nathaniel Cesario39152e62021-07-02 13:04:16 -06005643}
5644
5645std::shared_ptr<SWAPCHAIN_NODE> ValidationStateTracker::CreateSwapchainState(const VkSwapchainCreateInfoKHR *create_info,
5646 VkSwapchainKHR swapchain) {
5647 return std::make_shared<SWAPCHAIN_NODE>(create_info, swapchain);
5648}