blob: c4d61f857a11a90d4a8f657729fa88da663790ec [file] [log] [blame]
sfricke-samsung486a51e2021-01-02 00:10:15 -08001/* Copyright (c) 2015-2021 The Khronos Group Inc.
2 * Copyright (c) 2015-2021 Valve Corporation
3 * Copyright (c) 2015-2021 LunarG, Inc.
4 * Copyright (C) 2015-2021 Google Inc.
Tobias Hector6663c9b2020-11-05 10:18:02 +00005 * Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved.
locke-lunargd556cc32019-09-17 01:21:23 -06006 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Mark Lobodzinski <mark@lunarg.com>
20 * Author: Dave Houlton <daveh@lunarg.com>
21 * Shannon McPherson <shannon@lunarg.com>
Tobias Hector6663c9b2020-11-05 10:18:02 +000022 * Author: Tobias Hector <tobias.hector@amd.com>
locke-lunargd556cc32019-09-17 01:21:23 -060023 */
24
David Zhao Akeley44139b12021-04-26 16:16:13 -070025#include <algorithm>
locke-lunargd556cc32019-09-17 01:21:23 -060026#include <cmath>
locke-lunargd556cc32019-09-17 01:21:23 -060027
28#include "vk_enum_string_helper.h"
29#include "vk_format_utils.h"
30#include "vk_layer_data.h"
31#include "vk_layer_utils.h"
32#include "vk_layer_logging.h"
33#include "vk_typemap_helper.h"
34
35#include "chassis.h"
36#include "state_tracker.h"
37#include "shader_validation.h"
Jeremy Gebben74aa7622020-12-15 11:18:00 -070038#include "sync_utils.h"
Jeremy Gebben159b3cc2021-06-03 09:09:03 -060039#include "cmd_buffer_state.h"
40#include "render_pass_state.h"
locke-lunarg4189aa22020-10-21 00:23:48 -060041
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -060042void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
43 if (add_obj) {
44 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
45 // Call base class
46 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
47 }
48}
49
John Zulauf2bc1fde2020-04-24 15:09:51 -060050// NOTE: Beware the lifespan of the rp_begin when holding the return. If the rp_begin isn't a "safe" copy, "IMAGELESS"
51// attachments won't persist past the API entry point exit.
Jeremy Gebben88f58142021-06-01 10:07:52 -060052static std::pair<uint32_t, const VkImageView *> GetFramebufferAttachments(const VkRenderPassBeginInfo &rp_begin,
53 const FRAMEBUFFER_STATE &fb_state) {
John Zulauf2bc1fde2020-04-24 15:09:51 -060054 const VkImageView *attachments = fb_state.createInfo.pAttachments;
55 uint32_t count = fb_state.createInfo.attachmentCount;
56 if (fb_state.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -070057 const auto *framebuffer_attachments = LvlFindInChain<VkRenderPassAttachmentBeginInfo>(rp_begin.pNext);
John Zulauf2bc1fde2020-04-24 15:09:51 -060058 if (framebuffer_attachments) {
59 attachments = framebuffer_attachments->pAttachments;
60 count = framebuffer_attachments->attachmentCount;
61 }
62 }
63 return std::make_pair(count, attachments);
64}
65
John Zulauf64ffe552021-02-06 10:25:07 -070066template <typename ImageViewPointer, typename Get>
67std::vector<ImageViewPointer> GetAttachmentViewsImpl(const VkRenderPassBeginInfo &rp_begin, const FRAMEBUFFER_STATE &fb_state,
68 const Get &get_fn) {
69 std::vector<ImageViewPointer> views;
John Zulauf2bc1fde2020-04-24 15:09:51 -060070
71 const auto count_attachment = GetFramebufferAttachments(rp_begin, fb_state);
72 const auto attachment_count = count_attachment.first;
73 const auto *attachments = count_attachment.second;
74 views.resize(attachment_count, nullptr);
75 for (uint32_t i = 0; i < attachment_count; i++) {
76 if (attachments[i] != VK_NULL_HANDLE) {
John Zulauf64ffe552021-02-06 10:25:07 -070077 views[i] = get_fn(attachments[i]);
John Zulauf2bc1fde2020-04-24 15:09:51 -060078 }
79 }
80 return views;
81}
82
John Zulauf64ffe552021-02-06 10:25:07 -070083std::vector<std::shared_ptr<const IMAGE_VIEW_STATE>> ValidationStateTracker::GetSharedAttachmentViews(
84 const VkRenderPassBeginInfo &rp_begin, const FRAMEBUFFER_STATE &fb_state) const {
85 auto get_fn = [this](VkImageView handle) { return this->GetShared<IMAGE_VIEW_STATE>(handle); };
86 return GetAttachmentViewsImpl<std::shared_ptr<const IMAGE_VIEW_STATE>>(rp_begin, fb_state, get_fn);
87}
88
locke-lunargd556cc32019-09-17 01:21:23 -060089#ifdef VK_USE_PLATFORM_ANDROID_KHR
90// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
91// This could also move into a seperate core_validation_android.cpp file... ?
92
93void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -070094 const VkExternalMemoryImageCreateInfo *emici = LvlFindInChain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -060095 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
Spencer Fricke224c9852020-04-06 07:47:29 -070096 is_node->external_ahb = true;
locke-lunargd556cc32019-09-17 01:21:23 -060097 }
Mark Lobodzinski1f887d32020-12-30 15:31:33 -070098 const VkExternalFormatANDROID *ext_fmt_android = LvlFindInChain<VkExternalFormatANDROID>(create_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -060099 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
100 is_node->has_ahb_format = true;
101 is_node->ahb_format = ext_fmt_android->externalFormat;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700102 // VUID 01894 will catch if not found in map
103 auto it = ahb_ext_formats_map.find(ext_fmt_android->externalFormat);
104 if (it != ahb_ext_formats_map.end()) {
105 is_node->format_features = it->second;
106 }
locke-lunargd556cc32019-09-17 01:21:23 -0600107 }
108}
109
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700110void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700111 const VkExternalMemoryBufferCreateInfo *embci = LvlFindInChain<VkExternalMemoryBufferCreateInfo>(create_info->pNext);
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700112 if (embci && (embci->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
113 bs_node->external_ahb = true;
114 }
115}
116
locke-lunargd556cc32019-09-17 01:21:23 -0600117void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700118 VkSamplerYcbcrConversion ycbcr_conversion,
119 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700120 const VkExternalFormatANDROID *ext_format_android = LvlFindInChain<VkExternalFormatANDROID>(create_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600121 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
122 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700123 // VUID 01894 will catch if not found in map
124 auto it = ahb_ext_formats_map.find(ext_format_android->externalFormat);
125 if (it != ahb_ext_formats_map.end()) {
126 ycbcr_state->format_features = it->second;
127 }
locke-lunargd556cc32019-09-17 01:21:23 -0600128 }
129};
130
131void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
132 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
133};
134
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700135void ValidationStateTracker::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
136 VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties, VkResult result) {
137 if (VK_SUCCESS != result) return;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700138 auto ahb_format_props = LvlFindInChain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700139 if (ahb_format_props) {
Jeremy Gebbenfc6f8152021-03-18 16:58:55 -0600140 ahb_ext_formats_map.emplace(ahb_format_props->externalFormat, ahb_format_props->formatFeatures);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700141 }
142}
143
locke-lunargd556cc32019-09-17 01:21:23 -0600144#else
145
146void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
147
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700148void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {}
149
locke-lunargd556cc32019-09-17 01:21:23 -0600150void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700151 VkSamplerYcbcrConversion ycbcr_conversion,
152 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state){};
locke-lunargd556cc32019-09-17 01:21:23 -0600153
154void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
155
156#endif // VK_USE_PLATFORM_ANDROID_KHR
157
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -0600158std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPipelineLayout(PIPELINE_LAYOUT_STATE const *layout_data,
159 uint32_t set) {
160 std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> dsl = nullptr;
161 if (layout_data && (set < layout_data->set_layouts.size())) {
162 dsl = layout_data->set_layouts[set];
163 }
164 return dsl;
165}
166
Petr Kraus44f1c482020-04-25 20:09:25 +0200167void AddImageStateProps(IMAGE_STATE &image_state, const VkDevice device, const VkPhysicalDevice physical_device) {
168 // Add feature support according to Image Format Features (vkspec.html#resources-image-format-features)
169 // if format is AHB external format then the features are already set
170 if (image_state.has_ahb_format == false) {
171 const VkImageTiling image_tiling = image_state.createInfo.tiling;
172 const VkFormat image_format = image_state.createInfo.format;
173 if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
174 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {
175 VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, nullptr};
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600176 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state.image(), &drm_format_properties);
Petr Kraus44f1c482020-04-25 20:09:25 +0200177
178 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
179 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
180 nullptr};
181 format_properties_2.pNext = (void *)&drm_properties_list;
182 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Lionel Landwerlin09351a72020-06-22 18:15:59 +0300183 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
184 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
185 drm_properties_list.pDrmFormatModifierProperties = &drm_properties[0];
186 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Petr Kraus44f1c482020-04-25 20:09:25 +0200187
188 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300189 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier ==
190 drm_format_properties.drmFormatModifier) {
191 image_state.format_features =
Petr Kraus44f1c482020-04-25 20:09:25 +0200192 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300193 break;
Petr Kraus44f1c482020-04-25 20:09:25 +0200194 }
195 }
196 } else {
197 VkFormatProperties format_properties;
198 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_format, &format_properties);
199 image_state.format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
200 : format_properties.optimalTilingFeatures;
201 }
202 }
203}
204
locke-lunargd556cc32019-09-17 01:21:23 -0600205void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
206 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
207 if (VK_SUCCESS != result) return;
locke-lunarg296a3c92020-03-25 01:04:29 -0600208 auto is_node = std::make_shared<IMAGE_STATE>(device, *pImage, pCreateInfo);
sfricke-samsung71bc6572020-04-29 15:49:43 -0700209 is_node->disjoint = ((pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT) != 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600210 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
211 RecordCreateImageANDROID(pCreateInfo, is_node.get());
212 }
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700213 const auto swapchain_info = LvlFindInChain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600214 if (swapchain_info) {
215 is_node->create_from_swapchain = swapchain_info->swapchain;
216 }
217
locke-lunargd556cc32019-09-17 01:21:23 -0600218 // Record the memory requirements in case they won't be queried
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700219 // External AHB memory can't be queried until after memory is bound
Spencer Fricke224c9852020-04-06 07:47:29 -0700220 if (is_node->external_ahb == false) {
sfricke-samsung71bc6572020-04-29 15:49:43 -0700221 if (is_node->disjoint == false) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700222 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
223 } else {
224 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
225 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
226 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
227 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
228 mem_req_info2.pNext = &image_plane_req;
229 mem_req_info2.image = *pImage;
230
231 assert(plane_count != 0); // assumes each format has at least first plane
232 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
233 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
234 is_node->plane0_requirements = mem_reqs2.memoryRequirements;
235
236 if (plane_count >= 2) {
237 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
238 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
239 is_node->plane1_requirements = mem_reqs2.memoryRequirements;
240 }
241 if (plane_count >= 3) {
242 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
243 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
244 is_node->plane2_requirements = mem_reqs2.memoryRequirements;
245 }
246 }
locke-lunargd556cc32019-09-17 01:21:23 -0600247 }
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700248
Petr Kraus44f1c482020-04-25 20:09:25 +0200249 AddImageStateProps(*is_node, device, physical_device);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700250
sfricke-samsungedce77a2020-07-03 22:35:13 -0700251 is_node->unprotected = ((pCreateInfo->flags & VK_IMAGE_CREATE_PROTECTED_BIT) == 0);
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700252 imageMap.emplace(*pImage, std::move(is_node));
locke-lunargd556cc32019-09-17 01:21:23 -0600253}
254
255void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
256 if (!image) return;
257 IMAGE_STATE *image_state = GetImageState(image);
locke-lunargd556cc32019-09-17 01:21:23 -0600258 // Clean up memory mapping, bindings and range references for image
locke-lunargd556cc32019-09-17 01:21:23 -0600259 if (image_state->bind_swapchain) {
260 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
261 if (swapchain) {
John Zulaufd13b38e2021-03-05 08:17:38 -0700262 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600263 }
264 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600265 image_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -0600266 imageMap.erase(image);
267}
268
269void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
270 VkImageLayout imageLayout, const VkClearColorValue *pColor,
271 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600272
273 if (disabled[command_buffer_state]) return;
274
locke-lunargd556cc32019-09-17 01:21:23 -0600275 auto cb_node = GetCBState(commandBuffer);
276 auto image_state = GetImageState(image);
277 if (cb_node && image_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600278 cb_node->AddChild(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600279 }
280}
281
282void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
283 VkImageLayout imageLayout,
284 const VkClearDepthStencilValue *pDepthStencil,
285 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600286 if (disabled[command_buffer_state]) return;
287
locke-lunargd556cc32019-09-17 01:21:23 -0600288 auto cb_node = GetCBState(commandBuffer);
289 auto image_state = GetImageState(image);
290 if (cb_node && image_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600291 cb_node->AddChild(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600292 }
293}
294
295void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
296 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
297 uint32_t regionCount, const VkImageCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600298 if (disabled[command_buffer_state]) return;
299
locke-lunargd556cc32019-09-17 01:21:23 -0600300 auto cb_node = GetCBState(commandBuffer);
301 auto src_image_state = GetImageState(srcImage);
302 auto dst_image_state = GetImageState(dstImage);
303
304 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600305 cb_node->AddChild(src_image_state);
306 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600307}
308
Jeff Leger178b1e52020-10-05 12:22:23 -0400309void ValidationStateTracker::PreCallRecordCmdCopyImage2KHR(VkCommandBuffer commandBuffer,
310 const VkCopyImageInfo2KHR *pCopyImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600311 if (disabled[command_buffer_state]) return;
312
Jeff Leger178b1e52020-10-05 12:22:23 -0400313 auto cb_node = GetCBState(commandBuffer);
314 auto src_image_state = GetImageState(pCopyImageInfo->srcImage);
315 auto dst_image_state = GetImageState(pCopyImageInfo->dstImage);
316
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600317 cb_node->AddChild(src_image_state);
318 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400319}
320
locke-lunargd556cc32019-09-17 01:21:23 -0600321void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
322 VkImageLayout srcImageLayout, VkImage dstImage,
323 VkImageLayout dstImageLayout, uint32_t regionCount,
324 const VkImageResolve *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600325 if (disabled[command_buffer_state]) return;
326
locke-lunargd556cc32019-09-17 01:21:23 -0600327 auto cb_node = GetCBState(commandBuffer);
328 auto src_image_state = GetImageState(srcImage);
329 auto dst_image_state = GetImageState(dstImage);
330
331 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600332 cb_node->AddChild(src_image_state);
333 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600334}
335
Jeff Leger178b1e52020-10-05 12:22:23 -0400336void ValidationStateTracker::PreCallRecordCmdResolveImage2KHR(VkCommandBuffer commandBuffer,
337 const VkResolveImageInfo2KHR *pResolveImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600338 if (disabled[command_buffer_state]) return;
339
Jeff Leger178b1e52020-10-05 12:22:23 -0400340 auto cb_node = GetCBState(commandBuffer);
341 auto src_image_state = GetImageState(pResolveImageInfo->srcImage);
342 auto dst_image_state = GetImageState(pResolveImageInfo->dstImage);
343
344 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600345 cb_node->AddChild(src_image_state);
346 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400347}
348
locke-lunargd556cc32019-09-17 01:21:23 -0600349void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
350 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
351 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600352 if (disabled[command_buffer_state]) return;
353
locke-lunargd556cc32019-09-17 01:21:23 -0600354 auto cb_node = GetCBState(commandBuffer);
355 auto src_image_state = GetImageState(srcImage);
356 auto dst_image_state = GetImageState(dstImage);
357
358 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600359 cb_node->AddChild(src_image_state);
360 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600361}
362
Jeff Leger178b1e52020-10-05 12:22:23 -0400363void ValidationStateTracker::PreCallRecordCmdBlitImage2KHR(VkCommandBuffer commandBuffer,
364 const VkBlitImageInfo2KHR *pBlitImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600365 if (disabled[command_buffer_state]) return;
366
Jeff Leger178b1e52020-10-05 12:22:23 -0400367 auto cb_node = GetCBState(commandBuffer);
368 auto src_image_state = GetImageState(pBlitImageInfo->srcImage);
369 auto dst_image_state = GetImageState(pBlitImageInfo->dstImage);
370
371 // Update bindings between images and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600372 cb_node->AddChild(src_image_state);
373 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400374}
375
locke-lunargd556cc32019-09-17 01:21:23 -0600376void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
377 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
378 VkResult result) {
379 if (result != VK_SUCCESS) return;
380 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500381 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600382
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700383 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
384 RecordCreateBufferANDROID(pCreateInfo, buffer_state.get());
385 }
locke-lunargd556cc32019-09-17 01:21:23 -0600386 // Get a set of requirements in the case the app does not
sfricke-samsungad90e722020-07-08 20:54:24 -0700387 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
locke-lunargd556cc32019-09-17 01:21:23 -0600388
sfricke-samsungedce77a2020-07-03 22:35:13 -0700389 buffer_state->unprotected = ((pCreateInfo->flags & VK_BUFFER_CREATE_PROTECTED_BIT) == 0);
390
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700391 bufferMap.emplace(*pBuffer, std::move(buffer_state));
locke-lunargd556cc32019-09-17 01:21:23 -0600392}
393
394void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
395 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
396 VkResult result) {
397 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500398 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
locke-lunarg25b6c352020-08-06 17:44:18 -0600399 auto buffer_view_state = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
400
401 VkFormatProperties format_properties;
402 DispatchGetPhysicalDeviceFormatProperties(physical_device, pCreateInfo->format, &format_properties);
403 buffer_view_state->format_features = format_properties.bufferFeatures;
404
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700405 bufferViewMap.emplace(*pView, std::move(buffer_view_state));
locke-lunargd556cc32019-09-17 01:21:23 -0600406}
407
408void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
409 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
410 VkResult result) {
411 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500412 auto image_state = GetImageShared(pCreateInfo->image);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700413 auto image_view_state = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
414
415 // Add feature support according to Image View Format Features (vkspec.html#resources-image-view-format-features)
416 const VkImageTiling image_tiling = image_state->createInfo.tiling;
417 const VkFormat image_view_format = pCreateInfo->format;
418 if (image_state->has_ahb_format == true) {
419 // The ImageView uses same Image's format feature since they share same AHB
420 image_view_state->format_features = image_state->format_features;
421 } else if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
422 // Parameter validation should catch if this is used without VK_EXT_image_drm_format_modifier
423 assert(device_extensions.vk_ext_image_drm_format_modifier);
424 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
425 nullptr};
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600426 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state->image(), &drm_format_properties);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700427
428 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
429 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
430 nullptr};
431 format_properties_2.pNext = (void *)&drm_properties_list;
nyorain38a9d232021-03-06 13:06:12 +0100432
433 // First call is to get the number of modifiers compatible with the queried format
434 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
435
436 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
437 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
438 drm_properties_list.pDrmFormatModifierProperties = drm_properties.data();
439
440 // Second call, now with an allocated array in pDrmFormatModifierProperties, is to get the modifiers
441 // compatible with the queried format
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700442 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
443
444 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300445 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier == drm_format_properties.drmFormatModifier) {
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700446 image_view_state->format_features |=
447 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300448 break;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700449 }
450 }
451 } else {
452 VkFormatProperties format_properties;
453 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_view_format, &format_properties);
454 image_view_state->format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
455 : format_properties.optimalTilingFeatures;
456 }
457
sfricke-samsungd23f6e62021-01-17 09:05:47 -0800458 auto usage_create_info = LvlFindInChain<VkImageViewUsageCreateInfo>(pCreateInfo->pNext);
459 image_view_state->inherited_usage = (usage_create_info) ? usage_create_info->usage : image_state->createInfo.usage;
460
locke-lunarg9939d4b2020-10-26 20:11:08 -0600461 // filter_cubic_props is used in CmdDraw validation. But it takes a lot of performance if it does in CmdDraw.
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700462 image_view_state->filter_cubic_props = LvlInitStruct<VkFilterCubicImageViewImageFormatPropertiesEXT>();
locke-lunarg9939d4b2020-10-26 20:11:08 -0600463 if (IsExtEnabled(device_extensions.vk_ext_filter_cubic)) {
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700464 auto imageview_format_info = LvlInitStruct<VkPhysicalDeviceImageViewImageFormatInfoEXT>();
locke-lunarg9939d4b2020-10-26 20:11:08 -0600465 imageview_format_info.imageViewType = pCreateInfo->viewType;
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700466 auto image_format_info = LvlInitStruct<VkPhysicalDeviceImageFormatInfo2>(&imageview_format_info);
locke-lunarg9939d4b2020-10-26 20:11:08 -0600467 image_format_info.type = image_state->createInfo.imageType;
468 image_format_info.format = image_state->createInfo.format;
469 image_format_info.tiling = image_state->createInfo.tiling;
sfricke-samsungd23f6e62021-01-17 09:05:47 -0800470 image_format_info.usage = image_view_state->inherited_usage;
locke-lunarg9939d4b2020-10-26 20:11:08 -0600471 image_format_info.flags = image_state->createInfo.flags;
472
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -0700473 auto image_format_properties = LvlInitStruct<VkImageFormatProperties2>(&image_view_state->filter_cubic_props);
locke-lunarg9939d4b2020-10-26 20:11:08 -0600474
475 DispatchGetPhysicalDeviceImageFormatProperties2(physical_device, &image_format_info, &image_format_properties);
476 }
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700477 imageViewMap.emplace(*pView, std::move(image_view_state));
locke-lunargd556cc32019-09-17 01:21:23 -0600478}
479
480void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
481 uint32_t regionCount, const VkBufferCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600482 if (disabled[command_buffer_state]) return;
483
locke-lunargd556cc32019-09-17 01:21:23 -0600484 auto cb_node = GetCBState(commandBuffer);
485 auto src_buffer_state = GetBufferState(srcBuffer);
486 auto dst_buffer_state = GetBufferState(dstBuffer);
487
488 // Update bindings between buffers and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600489 cb_node->AddChild(src_buffer_state);
490 cb_node->AddChild(dst_buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600491}
492
Jeff Leger178b1e52020-10-05 12:22:23 -0400493void ValidationStateTracker::PreCallRecordCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer,
494 const VkCopyBufferInfo2KHR *pCopyBufferInfos) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600495 if (disabled[command_buffer_state]) return;
496
Jeff Leger178b1e52020-10-05 12:22:23 -0400497 auto cb_node = GetCBState(commandBuffer);
498 auto src_buffer_state = GetBufferState(pCopyBufferInfos->srcBuffer);
499 auto dst_buffer_state = GetBufferState(pCopyBufferInfos->dstBuffer);
500
501 // Update bindings between buffers and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600502 cb_node->AddChild(src_buffer_state);
503 cb_node->AddChild(dst_buffer_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400504}
505
locke-lunargd556cc32019-09-17 01:21:23 -0600506void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
507 const VkAllocationCallbacks *pAllocator) {
508 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
509 if (!image_view_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -0600510
511 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600512 image_view_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -0600513 imageViewMap.erase(imageView);
514}
515
516void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
517 if (!buffer) return;
518 auto buffer_state = GetBufferState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -0600519
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600520 buffer_state->Destroy();
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600521 bufferMap.erase(buffer_state->buffer());
locke-lunargd556cc32019-09-17 01:21:23 -0600522}
523
524void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
525 const VkAllocationCallbacks *pAllocator) {
526 if (!bufferView) return;
527 auto buffer_view_state = GetBufferViewState(bufferView);
locke-lunargd556cc32019-09-17 01:21:23 -0600528
529 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600530 buffer_view_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -0600531 bufferViewMap.erase(bufferView);
532}
533
534void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
535 VkDeviceSize size, uint32_t data) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600536 if (disabled[command_buffer_state]) return;
537
locke-lunargd556cc32019-09-17 01:21:23 -0600538 auto cb_node = GetCBState(commandBuffer);
539 auto buffer_state = GetBufferState(dstBuffer);
540 // Update bindings between buffer and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600541 cb_node->AddChild(buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600542}
543
544void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
545 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
546 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600547 if (disabled[command_buffer_state]) return;
548
locke-lunargd556cc32019-09-17 01:21:23 -0600549 auto cb_node = GetCBState(commandBuffer);
550 auto src_image_state = GetImageState(srcImage);
551 auto dst_buffer_state = GetBufferState(dstBuffer);
552
553 // Update bindings between buffer/image and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600554 cb_node->AddChild(src_image_state);
555 cb_node->AddChild(dst_buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600556}
557
Jeff Leger178b1e52020-10-05 12:22:23 -0400558void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
559 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600560 if (disabled[command_buffer_state]) return;
561
Jeff Leger178b1e52020-10-05 12:22:23 -0400562 auto cb_node = GetCBState(commandBuffer);
563 auto src_image_state = GetImageState(pCopyImageToBufferInfo->srcImage);
564 auto dst_buffer_state = GetBufferState(pCopyImageToBufferInfo->dstBuffer);
565
566 // Update bindings between buffer/image and cmd buffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600567 cb_node->AddChild(src_image_state);
568 cb_node->AddChild(dst_buffer_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400569}
570
locke-lunargd556cc32019-09-17 01:21:23 -0600571void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
572 VkImageLayout dstImageLayout, uint32_t regionCount,
573 const VkBufferImageCopy *pRegions) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600574 if (disabled[command_buffer_state]) return;
575
locke-lunargd556cc32019-09-17 01:21:23 -0600576 auto cb_node = GetCBState(commandBuffer);
577 auto src_buffer_state = GetBufferState(srcBuffer);
578 auto dst_image_state = GetImageState(dstImage);
579
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600580 cb_node->AddChild(src_buffer_state);
581 cb_node->AddChild(dst_image_state);
locke-lunargd556cc32019-09-17 01:21:23 -0600582}
583
Jeff Leger178b1e52020-10-05 12:22:23 -0400584void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
585 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600586
587 if (disabled[command_buffer_state]) return;
588
Jeff Leger178b1e52020-10-05 12:22:23 -0400589 auto cb_node = GetCBState(commandBuffer);
590 auto src_buffer_state = GetBufferState(pCopyBufferToImageInfo->srcBuffer);
591 auto dst_image_state = GetImageState(pCopyBufferToImageInfo->dstImage);
592
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600593 cb_node->AddChild(src_buffer_state);
594 cb_node->AddChild(dst_image_state);
Jeff Leger178b1e52020-10-05 12:22:23 -0400595}
596
Jeremy Gebben159b3cc2021-06-03 09:09:03 -0600597QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
598 auto it = queueMap.find(queue);
599 if (it == queueMap.end()) {
600 return nullptr;
601 }
602 return &it->second;
locke-lunargd556cc32019-09-17 01:21:23 -0600603}
604
Jeremy Gebben5570abe2021-05-16 18:35:13 -0600605const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
606 auto it = queueMap.find(queue);
607 if (it == queueMap.cend()) {
608 return nullptr;
609 }
610 return &it->second;
locke-lunargd556cc32019-09-17 01:21:23 -0600611}
612
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700613void ValidationStateTracker::RemoveAliasingImages(const layer_data::unordered_set<IMAGE_STATE *> &bound_images) {
locke-lunargd556cc32019-09-17 01:21:23 -0600614 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
615 // reference. It doesn't need two ways clear.
John Zulaufd13b38e2021-03-05 08:17:38 -0700616 for (auto *bound_image : bound_images) {
617 if (bound_image) {
618 bound_image->aliasing_images.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600619 }
620 }
621}
622
locke-lunargd556cc32019-09-17 01:21:23 -0600623const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
624 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
625 auto it = phys_dev_map->find(phys);
626 if (it == phys_dev_map->end()) {
627 return nullptr;
628 }
629 return &it->second;
630}
631
632PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
633 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
634 auto it = phys_dev_map->find(phys);
635 if (it == phys_dev_map->end()) {
636 return nullptr;
637 }
638 return &it->second;
639}
640
641PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
642const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
643
644// Return ptr to memory binding for given handle of specified type
645template <typename State, typename Result>
646static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
647 switch (typed_handle.type) {
648 case kVulkanObjectTypeImage:
649 return state->GetImageState(typed_handle.Cast<VkImage>());
650 case kVulkanObjectTypeBuffer:
651 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
652 case kVulkanObjectTypeAccelerationStructureNV:
sourav parmarcd5fb182020-07-17 12:58:44 -0700653 return state->GetAccelerationStructureStateNV(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -0600654 default:
655 break;
656 }
657 return nullptr;
658}
659
660const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
661 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
662}
663
664BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
665 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
666}
667
668void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
669 assert(object != NULL);
670
John Zulauf79952712020-04-07 11:25:54 -0600671 auto fake_address = fake_memory.Alloc(pAllocateInfo->allocationSize);
672 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo, fake_address);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500673 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600674
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700675 auto dedicated = LvlFindInChain<VkMemoryDedicatedAllocateInfo>(pAllocateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600676 if (dedicated) {
Jeremy Gebben7c313fa2021-05-28 13:21:44 -0600677 if (dedicated->buffer) {
678 const auto *buffer_state = GetBufferState(dedicated->buffer);
679 if (buffer_state) {
680 mem_info->dedicated_handle = VulkanTypedHandle(dedicated->buffer, kVulkanObjectTypeBuffer);
681 mem_info->dedicated_create_info.buffer = buffer_state->createInfo;
682 }
683 } else if (dedicated->image) {
684 const auto *image_state = GetImageState(dedicated->image);
685 if (image_state) {
686 mem_info->dedicated_handle = VulkanTypedHandle(dedicated->image, kVulkanObjectTypeImage);
687 mem_info->dedicated_create_info.image = image_state->createInfo;
688 }
689 }
locke-lunargd556cc32019-09-17 01:21:23 -0600690 }
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700691 auto export_info = LvlFindInChain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600692 if (export_info) {
693 mem_info->is_export = true;
694 mem_info->export_handle_type_flags = export_info->handleTypes;
695 }
sfricke-samsung23068272020-06-21 14:49:51 -0700696
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700697 auto alloc_flags = LvlFindInChain<VkMemoryAllocateFlagsInfo>(pAllocateInfo->pNext);
Mark Lobodzinski7bda6ed2020-07-24 09:57:38 -0600698 if (alloc_flags) {
699 auto dev_mask = alloc_flags->deviceMask;
700 if ((dev_mask != 0) && (dev_mask & (dev_mask - 1))) {
701 mem_info->multi_instance = true;
702 }
703 }
704 auto heap_index = phys_dev_mem_props.memoryTypes[mem_info->alloc_info.memoryTypeIndex].heapIndex;
Tony-LunarGb399fb02020-08-06 14:20:59 -0600705 mem_info->multi_instance |= (((phys_dev_mem_props.memoryHeaps[heap_index].flags & VK_MEMORY_HEAP_MULTI_INSTANCE_BIT) != 0) &&
706 physical_device_count > 1);
Mark Lobodzinski7bda6ed2020-07-24 09:57:38 -0600707
sfricke-samsung23068272020-06-21 14:49:51 -0700708 // Assumes validation already for only a single import operation in the pNext
709#ifdef VK_USE_PLATFORM_WIN32_KHR
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700710 auto win32_import = LvlFindInChain<VkImportMemoryWin32HandleInfoKHR>(pAllocateInfo->pNext);
sfricke-samsung23068272020-06-21 14:49:51 -0700711 if (win32_import) {
712 mem_info->is_import = true;
713 mem_info->import_handle_type_flags = win32_import->handleType;
714 }
715#endif
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700716 auto fd_import = LvlFindInChain<VkImportMemoryFdInfoKHR>(pAllocateInfo->pNext);
sfricke-samsung23068272020-06-21 14:49:51 -0700717 if (fd_import) {
718 mem_info->is_import = true;
719 mem_info->import_handle_type_flags = fd_import->handleType;
720 }
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700721 auto host_pointer_import = LvlFindInChain<VkImportMemoryHostPointerInfoEXT>(pAllocateInfo->pNext);
sfricke-samsung23068272020-06-21 14:49:51 -0700722 if (host_pointer_import) {
723 mem_info->is_import = true;
724 mem_info->import_handle_type_flags = host_pointer_import->handleType;
725 }
726#ifdef VK_USE_PLATFORM_ANDROID_KHR
727 // AHB Import doesn't have handle in the pNext struct
728 // It should be assumed that all imported AHB can only have the same, single handleType
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700729 auto ahb_import = LvlFindInChain<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
sfricke-samsung23068272020-06-21 14:49:51 -0700730 if ((ahb_import) && (ahb_import->buffer != nullptr)) {
731 mem_info->is_import_ahb = true;
732 mem_info->is_import = true;
733 mem_info->import_handle_type_flags = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
734 }
sfricke-samsung98acb882020-11-03 19:16:10 -0800735#endif // VK_USE_PLATFORM_ANDROID_KHR
sfricke-samsungedce77a2020-07-03 22:35:13 -0700736
737 const VkMemoryType memory_type = phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex];
738 mem_info->unprotected = ((memory_type.propertyFlags & VK_MEMORY_PROPERTY_PROTECTED_BIT) == 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600739}
740
locke-lunarg540b2252020-08-03 13:23:36 -0600741void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, const VkPipelineBindPoint bind_point,
742 const char *function) {
locke-lunargb8d7a7a2020-10-25 16:01:52 -0600743 const auto lv_bind_point = ConvertToLvlBindPoint(bind_point);
744 auto &state = cb_state->lastBound[lv_bind_point];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700745 PIPELINE_STATE *pipe = state.pipeline_state;
locke-lunargd556cc32019-09-17 01:21:23 -0600746 if (VK_NULL_HANDLE != state.pipeline_layout) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700747 for (const auto &set_binding_pair : pipe->active_slots) {
748 uint32_t set_index = set_binding_pair.first;
locke-lunargd556cc32019-09-17 01:21:23 -0600749 // Pull the set node
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700750 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[set_index].bound_descriptor_set;
locke-lunargd556cc32019-09-17 01:21:23 -0600751
Tony-LunarG77822802020-05-28 16:35:46 -0600752 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
locke-lunargd556cc32019-09-17 01:21:23 -0600753
Tony-LunarG77822802020-05-28 16:35:46 -0600754 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
755 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
756 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700757 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pipe);
Tony-LunarG77822802020-05-28 16:35:46 -0600758
759 if (reduced_map.IsManyDescriptors()) {
760 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700761 descriptor_set->UpdateValidationCache(*cb_state, *pipe, binding_req_map);
Tony-LunarG77822802020-05-28 16:35:46 -0600762 }
763
764 // We can skip updating the state if "nothing" has changed since the last validation.
765 // See CoreChecks::ValidateCmdBufDrawState for more details.
766 bool descriptor_set_changed =
767 !reduced_map.IsManyDescriptors() ||
768 // Update if descriptor set (or contents) has changed
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700769 state.per_set[set_index].validated_set != descriptor_set ||
770 state.per_set[set_index].validated_set_change_count != descriptor_set->GetChangeCount() ||
Tony-LunarG77822802020-05-28 16:35:46 -0600771 (!disabled[image_layout_validation] &&
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700772 state.per_set[set_index].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
Tony-LunarG77822802020-05-28 16:35:46 -0600773 bool need_update = descriptor_set_changed ||
774 // Update if previous bindingReqMap doesn't include new bindingReqMap
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700775 !std::includes(state.per_set[set_index].validated_set_binding_req_map.begin(),
776 state.per_set[set_index].validated_set_binding_req_map.end(), binding_req_map.begin(),
Tony-LunarG77822802020-05-28 16:35:46 -0600777 binding_req_map.end());
778
779 if (need_update) {
780 // Bind this set and its active descriptor resources to the command buffer
781 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
782 // Only record the bindings that haven't already been recorded
783 BindingReqMap delta_reqs;
784 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700785 state.per_set[set_index].validated_set_binding_req_map.begin(),
786 state.per_set[set_index].validated_set_binding_req_map.end(),
Jeremy Gebbencbf22862021-03-03 12:01:22 -0700787 layer_data::insert_iterator<BindingReqMap>(delta_reqs, delta_reqs.begin()));
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700788 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pipe, delta_reqs, function);
Tony-LunarG77822802020-05-28 16:35:46 -0600789 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700790 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pipe, binding_req_map, function);
locke-lunargd556cc32019-09-17 01:21:23 -0600791 }
792
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700793 state.per_set[set_index].validated_set = descriptor_set;
794 state.per_set[set_index].validated_set_change_count = descriptor_set->GetChangeCount();
795 state.per_set[set_index].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
Tony-LunarG77822802020-05-28 16:35:46 -0600796 if (reduced_map.IsManyDescriptors()) {
797 // Check whether old == new before assigning, the equality check is much cheaper than
798 // freeing and reallocating the map.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700799 if (state.per_set[set_index].validated_set_binding_req_map != set_binding_pair.second) {
800 state.per_set[set_index].validated_set_binding_req_map = set_binding_pair.second;
Jeff Bolz56308942019-10-06 22:05:23 -0500801 }
Tony-LunarG77822802020-05-28 16:35:46 -0600802 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700803 state.per_set[set_index].validated_set_binding_req_map = BindingReqMap();
locke-lunargd556cc32019-09-17 01:21:23 -0600804 }
805 }
806 }
807 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700808 if (!pipe->vertex_binding_descriptions_.empty()) {
locke-lunargd556cc32019-09-17 01:21:23 -0600809 cb_state->vertex_buffer_used = true;
810 }
811}
812
813// Remove set from setMap and delete the set
814void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500815 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600816 descriptor_set->Destroy();
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500817
locke-lunargd556cc32019-09-17 01:21:23 -0600818 setMap.erase(descriptor_set->GetSet());
819}
820
821// Free all DS Pools including their Sets & related sub-structs
822// NOTE : Calls to this function should be wrapped in mutex
823void ValidationStateTracker::DeleteDescriptorSetPools() {
824 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
825 // Remove this pools' sets from setMap and delete them
John Zulauf79f06582021-02-27 18:38:39 -0700826 for (auto *ds : ii->second->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -0600827 FreeDescriptorSet(ds);
828 }
829 ii->second->sets.clear();
830 ii = descriptorPoolMap.erase(ii);
831 }
832}
833
834// For given object struct return a ptr of BASE_NODE type for its wrapping struct
835BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500836 if (object_struct.node) {
837#ifdef _DEBUG
838 // assert that lookup would find the same object
839 VulkanTypedHandle other = object_struct;
840 other.node = nullptr;
841 assert(object_struct.node == GetStateStructPtrFromObject(other));
842#endif
843 return object_struct.node;
844 }
locke-lunargd556cc32019-09-17 01:21:23 -0600845 BASE_NODE *base_ptr = nullptr;
846 switch (object_struct.type) {
847 case kVulkanObjectTypeDescriptorSet: {
848 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
849 break;
850 }
851 case kVulkanObjectTypeSampler: {
852 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
853 break;
854 }
855 case kVulkanObjectTypeQueryPool: {
856 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
857 break;
858 }
859 case kVulkanObjectTypePipeline: {
860 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
861 break;
862 }
863 case kVulkanObjectTypeBuffer: {
864 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
865 break;
866 }
867 case kVulkanObjectTypeBufferView: {
868 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
869 break;
870 }
871 case kVulkanObjectTypeImage: {
872 base_ptr = GetImageState(object_struct.Cast<VkImage>());
873 break;
874 }
875 case kVulkanObjectTypeImageView: {
876 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
877 break;
878 }
879 case kVulkanObjectTypeEvent: {
880 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
881 break;
882 }
883 case kVulkanObjectTypeDescriptorPool: {
884 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
885 break;
886 }
887 case kVulkanObjectTypeCommandPool: {
888 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
889 break;
890 }
891 case kVulkanObjectTypeFramebuffer: {
892 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
893 break;
894 }
895 case kVulkanObjectTypeRenderPass: {
896 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
897 break;
898 }
899 case kVulkanObjectTypeDeviceMemory: {
900 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
901 break;
902 }
903 case kVulkanObjectTypeAccelerationStructureNV: {
sourav parmarcd5fb182020-07-17 12:58:44 -0700904 base_ptr = GetAccelerationStructureStateNV(object_struct.Cast<VkAccelerationStructureNV>());
905 break;
906 }
907 case kVulkanObjectTypeAccelerationStructureKHR: {
908 base_ptr = GetAccelerationStructureStateKHR(object_struct.Cast<VkAccelerationStructureKHR>());
locke-lunargd556cc32019-09-17 01:21:23 -0600909 break;
910 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500911 case kVulkanObjectTypeUnknown:
912 // This can happen if an element of the object_bindings vector has been
913 // zeroed out, after an object is destroyed.
914 break;
locke-lunargd556cc32019-09-17 01:21:23 -0600915 default:
916 // TODO : Any other objects to be handled here?
917 assert(0);
918 break;
919 }
920 return base_ptr;
921}
922
sfricke-samsungbf1a2ed2020-06-14 23:31:00 -0700923// Gets union of all features defined by Potential Format Features
924// except, does not handle the external format case for AHB as that only can be used for sampled images
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700925VkFormatFeatureFlags ValidationStateTracker::GetPotentialFormatFeatures(VkFormat format) const {
926 VkFormatFeatureFlags format_features = 0;
927
928 if (format != VK_FORMAT_UNDEFINED) {
929 VkFormatProperties format_properties;
930 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
931 format_features |= format_properties.linearTilingFeatures;
932 format_features |= format_properties.optimalTilingFeatures;
933 if (device_extensions.vk_ext_image_drm_format_modifier) {
934 // VK_KHR_get_physical_device_properties2 is required in this case
935 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2};
936 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
937 nullptr};
938 format_properties_2.pNext = (void *)&drm_properties_list;
Marc Alcala Prieto773871c2021-02-04 19:24:43 +0100939
940 // First call is to get the number of modifiers compatible with the queried format
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700941 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
Marc Alcala Prieto773871c2021-02-04 19:24:43 +0100942
943 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
944 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
945 drm_properties_list.pDrmFormatModifierProperties = drm_properties.data();
946
947 // Second call, now with an allocated array in pDrmFormatModifierProperties, is to get the modifiers
948 // compatible with the queried format
949 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
950
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700951 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
952 format_features |= drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
953 }
954 }
955 }
956
957 return format_features;
958}
959
locke-lunargd556cc32019-09-17 01:21:23 -0600960// Reset the command buffer state
961// Maintain the createInfo and set state to CB_NEW, but clear all other state
962void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700963 CMD_BUFFER_STATE *cb_state = GetCBState(cb);
964 if (cb_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600965 cb_state->Reset();
locke-lunargd556cc32019-09-17 01:21:23 -0600966 // Clean up the label data
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600967 ResetCmdDebugUtilsLabel(report_data, cb_state->commandBuffer());
locke-lunargd556cc32019-09-17 01:21:23 -0600968 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -0600969
locke-lunargd556cc32019-09-17 01:21:23 -0600970 if (command_buffer_reset_callback) {
971 (*command_buffer_reset_callback)(cb);
972 }
973}
974
975void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
976 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
977 VkResult result) {
978 if (VK_SUCCESS != result) return;
979
Locke Linf3873542021-04-26 11:25:10 -0600980 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
981 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
982 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
983
locke-lunargd556cc32019-09-17 01:21:23 -0600984 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
985 if (nullptr == enabled_features_found) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700986 const auto *features2 = LvlFindInChain<VkPhysicalDeviceFeatures2>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -0600987 if (features2) {
988 enabled_features_found = &(features2->features);
Locke Linf3873542021-04-26 11:25:10 -0600989
990 const auto *provoking_vertex_features = lvl_find_in_chain<VkPhysicalDeviceProvokingVertexFeaturesEXT>(features2->pNext);
991 if (provoking_vertex_features) {
992 state_tracker->enabled_features.provoking_vertex_features = *provoking_vertex_features;
993 }
locke-lunargd556cc32019-09-17 01:21:23 -0600994 }
995 }
996
locke-lunargd556cc32019-09-17 01:21:23 -0600997 if (nullptr == enabled_features_found) {
998 state_tracker->enabled_features.core = {};
999 } else {
1000 state_tracker->enabled_features.core = *enabled_features_found;
1001 }
1002
1003 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
1004 // previously set them through an explicit API call.
1005 uint32_t count;
1006 auto pd_state = GetPhysicalDeviceState(gpu);
1007 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
1008 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
1009 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
1010 // Save local link to this device's physical device state
1011 state_tracker->physical_device_state = pd_state;
1012
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001013 const auto *vulkan_12_features = LvlFindInChain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001014 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001015 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001016 } else {
sfricke-samsung27c70722020-05-02 08:42:39 -07001017 // Set Extension Feature Aliases to false as there is no struct to check
1018 state_tracker->enabled_features.core12.drawIndirectCount = VK_FALSE;
1019 state_tracker->enabled_features.core12.samplerMirrorClampToEdge = VK_FALSE;
1020 state_tracker->enabled_features.core12.descriptorIndexing = VK_FALSE;
1021 state_tracker->enabled_features.core12.samplerFilterMinmax = VK_FALSE;
1022 state_tracker->enabled_features.core12.shaderOutputLayer = VK_FALSE;
1023 state_tracker->enabled_features.core12.shaderOutputViewportIndex = VK_FALSE;
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001024 state_tracker->enabled_features.core12.subgroupBroadcastDynamicId = VK_FALSE;
sfricke-samsung27c70722020-05-02 08:42:39 -07001025
1026 // These structs are only allowed in pNext chain if there is no VkPhysicalDeviceVulkan12Features
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001027
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001028 const auto *eight_bit_storage_features = LvlFindInChain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001029 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001030 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
1031 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
1032 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
1033 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001034 }
1035
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001036 const auto *float16_int8_features = LvlFindInChain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001037 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001038 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
1039 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001040 }
1041
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001042 const auto *descriptor_indexing_features = LvlFindInChain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001043 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001044 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
1045 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
1046 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
1047 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
1048 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
1049 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
1050 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
1051 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
1052 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
1053 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
1054 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
1055 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
1056 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
1057 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
1058 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
1059 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
1060 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
1061 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
1062 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
1063 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
1064 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
1065 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
1066 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
1067 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
1068 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
1069 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
1070 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
1071 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
1072 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
1073 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
1074 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
1075 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
1076 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
1077 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
1078 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
1079 descriptor_indexing_features->descriptorBindingPartiallyBound;
1080 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
1081 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
1082 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001083 }
1084
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001085 const auto *scalar_block_layout_features = LvlFindInChain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001086 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001087 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001088 }
1089
1090 const auto *imageless_framebuffer_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001091 LvlFindInChain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001092 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001093 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001094 }
1095
1096 const auto *uniform_buffer_standard_layout_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001097 LvlFindInChain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001098 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001099 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
1100 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001101 }
1102
1103 const auto *subgroup_extended_types_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001104 LvlFindInChain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001105 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001106 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
1107 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001108 }
1109
1110 const auto *separate_depth_stencil_layouts_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001111 LvlFindInChain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001112 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001113 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
1114 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001115 }
1116
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001117 const auto *host_query_reset_features = LvlFindInChain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001118 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001119 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001120 }
1121
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001122 const auto *timeline_semaphore_features = LvlFindInChain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001123 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001124 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001125 }
1126
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001127 const auto *buffer_device_address = LvlFindInChain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001128 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001129 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
1130 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1131 buffer_device_address->bufferDeviceAddressCaptureReplay;
1132 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1133 buffer_device_address->bufferDeviceAddressMultiDevice;
1134 }
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001135
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001136 const auto *atomic_int64_features = LvlFindInChain<VkPhysicalDeviceShaderAtomicInt64Features>(pCreateInfo->pNext);
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001137 if (atomic_int64_features) {
1138 state_tracker->enabled_features.core12.shaderBufferInt64Atomics = atomic_int64_features->shaderBufferInt64Atomics;
1139 state_tracker->enabled_features.core12.shaderSharedInt64Atomics = atomic_int64_features->shaderSharedInt64Atomics;
1140 }
1141
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001142 const auto *memory_model_features = LvlFindInChain<VkPhysicalDeviceVulkanMemoryModelFeatures>(pCreateInfo->pNext);
sfricke-samsunga4143ac2020-12-18 00:00:53 -08001143 if (memory_model_features) {
1144 state_tracker->enabled_features.core12.vulkanMemoryModel = memory_model_features->vulkanMemoryModel;
1145 state_tracker->enabled_features.core12.vulkanMemoryModelDeviceScope =
1146 memory_model_features->vulkanMemoryModelDeviceScope;
1147 state_tracker->enabled_features.core12.vulkanMemoryModelAvailabilityVisibilityChains =
1148 memory_model_features->vulkanMemoryModelAvailabilityVisibilityChains;
1149 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001150 }
1151
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001152 const auto *vulkan_11_features = LvlFindInChain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001153 if (vulkan_11_features) {
1154 state_tracker->enabled_features.core11 = *vulkan_11_features;
1155 } else {
1156 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
1157
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001158 const auto *sixteen_bit_storage_features = LvlFindInChain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001159 if (sixteen_bit_storage_features) {
1160 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
1161 sixteen_bit_storage_features->storageBuffer16BitAccess;
1162 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
1163 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
1164 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
1165 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
1166 }
1167
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001168 const auto *multiview_features = LvlFindInChain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001169 if (multiview_features) {
1170 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1171 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1172 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1173 }
1174
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001175 const auto *variable_pointers_features = LvlFindInChain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001176 if (variable_pointers_features) {
1177 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1178 variable_pointers_features->variablePointersStorageBuffer;
1179 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1180 }
1181
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001182 const auto *protected_memory_features = LvlFindInChain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001183 if (protected_memory_features) {
1184 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1185 }
1186
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001187 const auto *ycbcr_conversion_features = LvlFindInChain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001188 if (ycbcr_conversion_features) {
1189 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1190 }
1191
1192 const auto *shader_draw_parameters_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001193 LvlFindInChain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001194 if (shader_draw_parameters_features) {
1195 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001196 }
1197 }
1198
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001199 const auto *device_group_ci = LvlFindInChain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
Tony-LunarGca4891a2020-08-10 15:46:46 -06001200 if (device_group_ci) {
1201 state_tracker->physical_device_count = device_group_ci->physicalDeviceCount;
1202 state_tracker->device_group_create_info = *device_group_ci;
1203 } else {
1204 state_tracker->physical_device_count = 1;
1205 }
locke-lunargd556cc32019-09-17 01:21:23 -06001206
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001207 const auto *exclusive_scissor_features = LvlFindInChain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001208 if (exclusive_scissor_features) {
1209 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1210 }
1211
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001212 const auto *shading_rate_image_features = LvlFindInChain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001213 if (shading_rate_image_features) {
1214 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1215 }
1216
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001217 const auto *mesh_shader_features = LvlFindInChain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001218 if (mesh_shader_features) {
1219 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1220 }
1221
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001222 const auto *inline_uniform_block_features = LvlFindInChain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001223 if (inline_uniform_block_features) {
1224 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1225 }
1226
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001227 const auto *transform_feedback_features = LvlFindInChain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001228 if (transform_feedback_features) {
1229 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1230 }
1231
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001232 const auto *vtx_attrib_div_features = LvlFindInChain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001233 if (vtx_attrib_div_features) {
1234 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1235 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001236
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001237 const auto *buffer_device_address_ext = LvlFindInChain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001238 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001239 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001240 }
1241
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001242 const auto *cooperative_matrix_features = LvlFindInChain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001243 if (cooperative_matrix_features) {
1244 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1245 }
1246
locke-lunargd556cc32019-09-17 01:21:23 -06001247 const auto *compute_shader_derivatives_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001248 LvlFindInChain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001249 if (compute_shader_derivatives_features) {
1250 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1251 }
1252
1253 const auto *fragment_shader_barycentric_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001254 LvlFindInChain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001255 if (fragment_shader_barycentric_features) {
1256 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1257 }
1258
1259 const auto *shader_image_footprint_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001260 LvlFindInChain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001261 if (shader_image_footprint_features) {
1262 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1263 }
1264
1265 const auto *fragment_shader_interlock_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001266 LvlFindInChain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001267 if (fragment_shader_interlock_features) {
1268 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1269 }
1270
1271 const auto *demote_to_helper_invocation_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001272 LvlFindInChain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001273 if (demote_to_helper_invocation_features) {
1274 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1275 }
1276
1277 const auto *texel_buffer_alignment_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001278 LvlFindInChain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001279 if (texel_buffer_alignment_features) {
1280 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1281 }
1282
locke-lunargd556cc32019-09-17 01:21:23 -06001283 const auto *pipeline_exe_props_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001284 LvlFindInChain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001285 if (pipeline_exe_props_features) {
1286 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1287 }
1288
Jeff Bolz82f854d2019-09-17 14:56:47 -05001289 const auto *dedicated_allocation_image_aliasing_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001290 LvlFindInChain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
Jeff Bolz82f854d2019-09-17 14:56:47 -05001291 if (dedicated_allocation_image_aliasing_features) {
1292 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1293 *dedicated_allocation_image_aliasing_features;
1294 }
1295
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001296 const auto *performance_query_features = LvlFindInChain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001297 if (performance_query_features) {
1298 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1299 }
1300
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001301 const auto *device_coherent_memory_features = LvlFindInChain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
Tobias Hector782bcde2019-11-28 16:19:42 +00001302 if (device_coherent_memory_features) {
1303 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1304 }
1305
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001306 const auto *ycbcr_image_array_features = LvlFindInChain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsungcead0802020-01-30 22:20:10 -08001307 if (ycbcr_image_array_features) {
1308 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1309 }
1310
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001311 const auto *ray_query_features = LvlFindInChain<VkPhysicalDeviceRayQueryFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001312 if (ray_query_features) {
1313 state_tracker->enabled_features.ray_query_features = *ray_query_features;
1314 }
1315
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001316 const auto *ray_tracing_pipeline_features = LvlFindInChain<VkPhysicalDeviceRayTracingPipelineFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001317 if (ray_tracing_pipeline_features) {
1318 state_tracker->enabled_features.ray_tracing_pipeline_features = *ray_tracing_pipeline_features;
1319 }
1320
1321 const auto *ray_tracing_acceleration_structure_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001322 LvlFindInChain<VkPhysicalDeviceAccelerationStructureFeaturesKHR>(pCreateInfo->pNext);
sourav parmarcd5fb182020-07-17 12:58:44 -07001323 if (ray_tracing_acceleration_structure_features) {
1324 state_tracker->enabled_features.ray_tracing_acceleration_structure_features = *ray_tracing_acceleration_structure_features;
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001325 }
1326
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001327 const auto *robustness2_features = LvlFindInChain<VkPhysicalDeviceRobustness2FeaturesEXT>(pCreateInfo->pNext);
Jeff Bolz165818a2020-05-08 11:19:03 -05001328 if (robustness2_features) {
1329 state_tracker->enabled_features.robustness2_features = *robustness2_features;
1330 }
1331
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001332 const auto *fragment_density_map_features = LvlFindInChain<VkPhysicalDeviceFragmentDensityMapFeaturesEXT>(pCreateInfo->pNext);
janharaldfredriksen-arm3b793772020-05-12 18:55:53 +02001333 if (fragment_density_map_features) {
1334 state_tracker->enabled_features.fragment_density_map_features = *fragment_density_map_features;
1335 }
1336
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001337 const auto *fragment_density_map_features2 = LvlFindInChain<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT>(pCreateInfo->pNext);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001338 if (fragment_density_map_features2) {
1339 state_tracker->enabled_features.fragment_density_map2_features = *fragment_density_map_features2;
1340 }
1341
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001342 const auto *astc_decode_features = LvlFindInChain<VkPhysicalDeviceASTCDecodeFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0c4a06f2020-06-27 01:24:32 -07001343 if (astc_decode_features) {
1344 state_tracker->enabled_features.astc_decode_features = *astc_decode_features;
1345 }
1346
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001347 const auto *custom_border_color_features = LvlFindInChain<VkPhysicalDeviceCustomBorderColorFeaturesEXT>(pCreateInfo->pNext);
Tony-LunarG7337b312020-04-15 16:40:25 -06001348 if (custom_border_color_features) {
1349 state_tracker->enabled_features.custom_border_color_features = *custom_border_color_features;
1350 }
1351
sfricke-samsungfd661d62020-05-16 00:57:27 -07001352 const auto *pipeline_creation_cache_control_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001353 LvlFindInChain<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsungfd661d62020-05-16 00:57:27 -07001354 if (pipeline_creation_cache_control_features) {
1355 state_tracker->enabled_features.pipeline_creation_cache_control_features = *pipeline_creation_cache_control_features;
1356 }
1357
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001358 const auto *fragment_shading_rate_features = LvlFindInChain<VkPhysicalDeviceFragmentShadingRateFeaturesKHR>(pCreateInfo->pNext);
Tobias Hector6663c9b2020-11-05 10:18:02 +00001359 if (fragment_shading_rate_features) {
1360 state_tracker->enabled_features.fragment_shading_rate_features = *fragment_shading_rate_features;
1361 }
1362
Piers Daniell39842ee2020-07-10 16:42:33 -06001363 const auto *extended_dynamic_state_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001364 LvlFindInChain<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT>(pCreateInfo->pNext);
Piers Daniell39842ee2020-07-10 16:42:33 -06001365 if (extended_dynamic_state_features) {
1366 state_tracker->enabled_features.extended_dynamic_state_features = *extended_dynamic_state_features;
1367 }
1368
Vikram Kushwahaa57b0c32021-04-19 12:21:46 -07001369 const auto *extended_dynamic_state2_features =
1370 LvlFindInChain<VkPhysicalDeviceExtendedDynamicState2FeaturesEXT>(pCreateInfo->pNext);
1371 if (extended_dynamic_state2_features) {
1372 state_tracker->enabled_features.extended_dynamic_state2_features = *extended_dynamic_state2_features;
1373 }
1374
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001375 const auto *multiview_features = LvlFindInChain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
locke-lunarg3fa463a2020-10-23 16:39:04 -06001376 if (multiview_features) {
1377 state_tracker->enabled_features.multiview_features = *multiview_features;
1378 }
1379
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001380 const auto *portability_features = LvlFindInChain<VkPhysicalDevicePortabilitySubsetFeaturesKHR>(pCreateInfo->pNext);
Nathaniel Cesariob3f2d702020-11-09 09:20:49 -07001381 if (portability_features) {
1382 state_tracker->enabled_features.portability_subset_features = *portability_features;
1383 }
1384
sfricke-samsung0065ce02020-12-03 22:46:37 -08001385 const auto *shader_integer_functions2_features =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001386 LvlFindInChain<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001387 if (shader_integer_functions2_features) {
1388 state_tracker->enabled_features.shader_integer_functions2_features = *shader_integer_functions2_features;
1389 }
1390
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001391 const auto *shader_sm_builtins_feature = LvlFindInChain<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001392 if (shader_sm_builtins_feature) {
1393 state_tracker->enabled_features.shader_sm_builtins_feature = *shader_sm_builtins_feature;
1394 }
1395
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001396 const auto *shader_atomic_float_feature = LvlFindInChain<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001397 if (shader_atomic_float_feature) {
1398 state_tracker->enabled_features.shader_atomic_float_feature = *shader_atomic_float_feature;
1399 }
1400
1401 const auto *shader_image_atomic_int64_feature =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001402 LvlFindInChain<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT>(pCreateInfo->pNext);
sfricke-samsung0065ce02020-12-03 22:46:37 -08001403 if (shader_image_atomic_int64_feature) {
1404 state_tracker->enabled_features.shader_image_atomic_int64_feature = *shader_image_atomic_int64_feature;
1405 }
1406
sfricke-samsung486a51e2021-01-02 00:10:15 -08001407 const auto *shader_clock_feature = LvlFindInChain<VkPhysicalDeviceShaderClockFeaturesKHR>(pCreateInfo->pNext);
1408 if (shader_clock_feature) {
1409 state_tracker->enabled_features.shader_clock_feature = *shader_clock_feature;
1410 }
1411
Jeremy Gebben5f585ae2021-02-02 09:03:06 -07001412 const auto *conditional_rendering_features =
1413 LvlFindInChain<VkPhysicalDeviceConditionalRenderingFeaturesEXT>(pCreateInfo->pNext);
1414 if (conditional_rendering_features) {
1415 state_tracker->enabled_features.conditional_rendering = *conditional_rendering_features;
1416 }
1417
Shannon McPhersondb287d42021-02-02 15:27:32 -07001418 const auto *workgroup_memory_explicit_layout_features =
1419 LvlFindInChain<VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>(pCreateInfo->pNext);
1420 if (workgroup_memory_explicit_layout_features) {
1421 state_tracker->enabled_features.workgroup_memory_explicit_layout_features = *workgroup_memory_explicit_layout_features;
1422 }
1423
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001424 const auto *synchronization2_features =
1425 LvlFindInChain<VkPhysicalDeviceSynchronization2FeaturesKHR>(pCreateInfo->pNext);
1426 if (synchronization2_features) {
1427 state_tracker->enabled_features.synchronization2_features = *synchronization2_features;
1428 }
1429
Locke Linf3873542021-04-26 11:25:10 -06001430 const auto *provoking_vertex_features = lvl_find_in_chain<VkPhysicalDeviceProvokingVertexFeaturesEXT>(pCreateInfo->pNext);
1431 if (provoking_vertex_features) {
1432 state_tracker->enabled_features.provoking_vertex_features = *provoking_vertex_features;
1433 }
1434
Piers Daniellcb6d8032021-04-19 18:51:26 -06001435 const auto *vertex_input_dynamic_state_features =
1436 LvlFindInChain<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT>(pCreateInfo->pNext);
1437 if (vertex_input_dynamic_state_features) {
1438 state_tracker->enabled_features.vertex_input_dynamic_state_features = *vertex_input_dynamic_state_features;
1439 }
1440
David Zhao Akeley44139b12021-04-26 16:16:13 -07001441 const auto *inherited_viewport_scissor_features =
1442 LvlFindInChain<VkPhysicalDeviceInheritedViewportScissorFeaturesNV>(pCreateInfo->pNext);
1443 if (inherited_viewport_scissor_features) {
1444 state_tracker->enabled_features.inherited_viewport_scissor_features = *inherited_viewport_scissor_features;
1445 }
1446
locke-lunargd556cc32019-09-17 01:21:23 -06001447 // Store physical device properties and physical device mem limits into CoreChecks structs
1448 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1449 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001450 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1451 &state_tracker->phys_dev_props_core11);
1452 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1453 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001454
1455 const auto &dev_ext = state_tracker->device_extensions;
1456 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1457
1458 if (dev_ext.vk_khr_push_descriptor) {
1459 // Get the needed push_descriptor limits
1460 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1461 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1462 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1463 }
1464
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001465 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001466 VkPhysicalDeviceDescriptorIndexingProperties descriptor_indexing_prop;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001467 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1468 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1469 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1470 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1471 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1472 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1473 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1474 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1475 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1476 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1477 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1478 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1479 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1480 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1481 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1482 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1483 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1484 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1485 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1486 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1487 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1488 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1489 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1490 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1491 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1492 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1493 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
1494 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1495 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
1496 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
1497 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
1498 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
1499 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
1500 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
1501 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1502 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1503 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
1504 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
1505 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1506 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
1507 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
1508 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
1509 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
1510 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
1511 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
1512 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
1513 }
1514
locke-lunargd556cc32019-09-17 01:21:23 -06001515 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1516 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1517 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1518 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001519
1520 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001521 VkPhysicalDeviceDepthStencilResolveProperties depth_stencil_resolve_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001522 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
1523 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
1524 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
1525 depth_stencil_resolve_props.supportedStencilResolveModes;
1526 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
1527 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
1528 }
1529
locke-lunargd556cc32019-09-17 01:21:23 -06001530 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001531 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
sourav parmarcd5fb182020-07-17 12:58:44 -07001532 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing_pipeline, &phys_dev_props->ray_tracing_propsKHR);
1533 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_acceleration_structure, &phys_dev_props->acc_structure_props);
locke-lunargd556cc32019-09-17 01:21:23 -06001534 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1535 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001536 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map_2, &phys_dev_props->fragment_density_map2_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001537 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
sfricke-samsung8f658d42020-05-03 20:12:24 -07001538 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_sample_locations, &phys_dev_props->sample_locations_props);
Tony-LunarG7337b312020-04-15 16:40:25 -06001539 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_custom_border_color, &phys_dev_props->custom_border_color_props);
locke-lunarg3fa463a2020-10-23 16:39:04 -06001540 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_multiview, &phys_dev_props->multiview_props);
Nathaniel Cesario3291c912020-11-17 16:54:41 -07001541 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_portability_subset, &phys_dev_props->portability_props);
Locke Lin016d8482021-05-27 12:11:31 -06001542 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_provoking_vertex, &phys_dev_props->provoking_vertex_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001543
1544 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001545 VkPhysicalDeviceTimelineSemaphoreProperties timeline_semaphore_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001546 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
1547 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
1548 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
1549 }
1550
1551 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08001552 VkPhysicalDeviceFloatControlsProperties float_controls_props;
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001553 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
1554 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
1555 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
1556 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
1557 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
1558 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
1559 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
1560 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
1561 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
1562 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
1563 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
1564 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
1565 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
1566 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
1567 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
1568 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
1569 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
1570 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
1571 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
1572 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
1573 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
1574 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07001575
locke-lunargd556cc32019-09-17 01:21:23 -06001576 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1577 // Get the needed cooperative_matrix properties
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07001578 auto cooperative_matrix_props = LvlInitStruct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1579 auto prop2 = LvlInitStruct<VkPhysicalDeviceProperties2>(&cooperative_matrix_props);
locke-lunargd556cc32019-09-17 01:21:23 -06001580 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1581 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1582
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001583 uint32_t num_cooperative_matrix_properties = 0;
1584 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &num_cooperative_matrix_properties, NULL);
1585 state_tracker->cooperative_matrix_properties.resize(num_cooperative_matrix_properties,
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07001586 LvlInitStruct<VkCooperativeMatrixPropertiesNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06001587
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001588 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &num_cooperative_matrix_properties,
locke-lunargd556cc32019-09-17 01:21:23 -06001589 state_tracker->cooperative_matrix_properties.data());
1590 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001591 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06001592 // Get the needed subgroup limits
Locke Lin016d8482021-05-27 12:11:31 -06001593 auto subgroup_prop = LvlInitStruct<VkPhysicalDeviceSubgroupProperties>();
Mark Lobodzinski6fe9e702020-12-30 15:36:39 -07001594 auto prop2 = LvlInitStruct<VkPhysicalDeviceProperties2>(&subgroup_prop);
locke-lunargd556cc32019-09-17 01:21:23 -06001595 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1596
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001597 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
1598 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
1599 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
1600 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06001601 }
1602
Tobias Hector6663c9b2020-11-05 10:18:02 +00001603 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_fragment_shading_rate, &phys_dev_props->fragment_shading_rate_props);
1604
locke-lunargd556cc32019-09-17 01:21:23 -06001605 // Store queue family data
1606 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1607 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001608 const VkDeviceQueueCreateInfo &queue_create_info = pCreateInfo->pQueueCreateInfos[i];
sfricke-samsungb585ec12021-05-06 03:10:13 -07001609 state_tracker->queue_family_index_set.insert(queue_create_info.queueFamilyIndex);
1610 state_tracker->device_queue_info_list.push_back(
1611 {i, queue_create_info.queueFamilyIndex, queue_create_info.flags, queue_create_info.queueCount});
locke-lunargd556cc32019-09-17 01:21:23 -06001612 }
1613 }
1614}
1615
1616void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1617 if (!device) return;
1618
locke-lunargd556cc32019-09-17 01:21:23 -06001619 // Reset all command buffers before destroying them, to unlink object_bindings.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001620 for (auto &command_buffer : commandBufferMap) {
1621 ResetCommandBufferState(command_buffer.first);
locke-lunargd556cc32019-09-17 01:21:23 -06001622 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001623 pipelineMap.clear();
1624 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001625 commandBufferMap.clear();
1626
1627 // This will also delete all sets in the pool & remove them from setMap
1628 DeleteDescriptorSetPools();
1629 // All sets should be removed
1630 assert(setMap.empty());
1631 descriptorSetLayoutMap.clear();
1632 imageViewMap.clear();
1633 imageMap.clear();
1634 bufferViewMap.clear();
1635 bufferMap.clear();
1636 // Queues persist until device is destroyed
1637 queueMap.clear();
1638}
1639
locke-lunargd556cc32019-09-17 01:21:23 -06001640// Track which resources are in-flight by atomically incrementing their "in_use" count
1641void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1642 cb_node->submitCount++;
locke-lunargd556cc32019-09-17 01:21:23 -06001643
locke-lunargd556cc32019-09-17 01:21:23 -06001644 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1645 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1646 // should then be flagged prior to calling this function
1647 for (auto event : cb_node->writeEventsBeforeWait) {
1648 auto event_state = GetEventState(event);
1649 if (event_state) event_state->write_in_use++;
1650 }
1651}
1652
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001653void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
Jeremy Gebbencbf22862021-03-03 12:01:22 -07001654 layer_data::unordered_map<VkQueue, uint64_t> other_queue_seqs;
1655 layer_data::unordered_map<VkSemaphore, uint64_t> timeline_semaphore_counters;
locke-lunargd556cc32019-09-17 01:21:23 -06001656
1657 // Roll this queue forward, one submission at a time.
1658 while (pQueue->seq < seq) {
1659 auto &submission = pQueue->submissions.front();
1660
1661 for (auto &wait : submission.waitSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001662 auto semaphore_state = GetSemaphoreState(wait.semaphore);
1663 if (semaphore_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001664 semaphore_state->EndUse();
locke-lunargd556cc32019-09-17 01:21:23 -06001665 }
Mike Schuchardt2df08912020-12-15 16:28:09 -08001666 if (wait.type == VK_SEMAPHORE_TYPE_TIMELINE) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001667 auto &last_counter = timeline_semaphore_counters[wait.semaphore];
1668 last_counter = std::max(last_counter, wait.payload);
Marshall Drew-Brook03847582020-11-06 15:10:45 -08001669 } else {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001670 auto &last_seq = other_queue_seqs[wait.queue];
1671 last_seq = std::max(last_seq, wait.seq);
Marshall Drew-Brook03847582020-11-06 15:10:45 -08001672 }
locke-lunargd556cc32019-09-17 01:21:23 -06001673 }
1674
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001675 for (auto &signal : submission.signalSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001676 auto semaphore_state = GetSemaphoreState(signal.semaphore);
1677 if (semaphore_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001678 semaphore_state->EndUse();
Mike Schuchardt2df08912020-12-15 16:28:09 -08001679 if (semaphore_state->type == VK_SEMAPHORE_TYPE_TIMELINE && semaphore_state->payload < signal.payload) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001680 semaphore_state->payload = signal.payload;
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001681 }
locke-lunargd556cc32019-09-17 01:21:23 -06001682 }
1683 }
1684
1685 for (auto &semaphore : submission.externalSemaphores) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001686 auto semaphore_state = GetSemaphoreState(semaphore);
1687 if (semaphore_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001688 semaphore_state->EndUse();
locke-lunargd556cc32019-09-17 01:21:23 -06001689 }
1690 }
1691
1692 for (auto cb : submission.cbs) {
1693 auto cb_node = GetCBState(cb);
1694 if (!cb_node) {
1695 continue;
1696 }
1697 // First perform decrement on general case bound objects
locke-lunargd556cc32019-09-17 01:21:23 -06001698 for (auto event : cb_node->writeEventsBeforeWait) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001699 auto event_node = eventMap.find(event);
1700 if (event_node != eventMap.end()) {
John Zulauf48057322020-12-02 11:59:31 -07001701 event_node->second->write_in_use--;
locke-lunargd556cc32019-09-17 01:21:23 -06001702 }
1703 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001704 QueryMap local_query_to_state_map;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001705 VkQueryPool first_pool = VK_NULL_HANDLE;
Jeff Bolz310775c2019-10-09 00:46:33 -05001706 for (auto &function : cb_node->queryUpdates) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001707 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &local_query_to_state_map);
Jeff Bolz310775c2019-10-09 00:46:33 -05001708 }
1709
John Zulauf79f06582021-02-27 18:38:39 -07001710 for (const auto &query_state_pair : local_query_to_state_map) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001711 if (query_state_pair.second == QUERYSTATE_ENDED) {
1712 queryToStateMap[query_state_pair.first] = QUERYSTATE_AVAILABLE;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001713 }
locke-lunargd556cc32019-09-17 01:21:23 -06001714 }
Jeremy Gebben5570abe2021-05-16 18:35:13 -06001715 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
1716 cb_node->EndUse();
1717 }
locke-lunargd556cc32019-09-17 01:21:23 -06001718 }
1719
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001720 auto fence_state = GetFenceState(submission.fence);
1721 if (fence_state && fence_state->scope == kSyncScopeInternal) {
1722 fence_state->state = FENCE_RETIRED;
locke-lunargd556cc32019-09-17 01:21:23 -06001723 }
1724
1725 pQueue->submissions.pop_front();
1726 pQueue->seq++;
1727 }
1728
1729 // Roll other queues forward to the highest seq we saw a wait for
John Zulauf79f06582021-02-27 18:38:39 -07001730 for (const auto &qs : other_queue_seqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001731 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001732 }
John Zulauf79f06582021-02-27 18:38:39 -07001733 for (const auto &sc : timeline_semaphore_counters) {
Marshall Drew-Brook03847582020-11-06 15:10:45 -08001734 RetireTimelineSemaphore(sc.first, sc.second);
1735 }
locke-lunargd556cc32019-09-17 01:21:23 -06001736}
1737
1738// Submit a fence to a queue, delimiting previous fences and previous untracked
1739// work by it.
1740static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1741 pFence->state = FENCE_INFLIGHT;
1742 pFence->signaler.first = pQueue->queue;
1743 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1744}
1745
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001746uint64_t ValidationStateTracker::RecordSubmitFence(QUEUE_STATE *queue_state, VkFence fence, uint32_t submit_count) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001747 auto fence_state = GetFenceState(fence);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001748 uint64_t early_retire_seq = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001749 if (fence_state) {
1750 if (fence_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06001751 // Mark fence in use
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001752 SubmitFence(queue_state, fence_state, std::max(1u, submit_count));
1753 if (!submit_count) {
locke-lunargd556cc32019-09-17 01:21:23 -06001754 // If no submissions, but just dropping a fence on the end of the queue,
1755 // record an empty submission with just the fence, so we can determine
1756 // its completion.
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001757 CB_SUBMISSION submission;
1758 submission.fence = fence;
1759 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06001760 }
1761 } else {
1762 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001763 early_retire_seq = queue_state->seq + queue_state->submissions.size();
locke-lunargd556cc32019-09-17 01:21:23 -06001764 }
1765 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001766 return early_retire_seq;
1767}
1768
1769void ValidationStateTracker::RecordSubmitCommandBuffer(CB_SUBMISSION &submission, VkCommandBuffer command_buffer) {
1770 auto cb_node = GetCBState(command_buffer);
1771 if (cb_node) {
1772 submission.cbs.push_back(command_buffer);
John Zulauf79f06582021-02-27 18:38:39 -07001773 for (auto *secondary_cmd_buffer : cb_node->linkedCommandBuffers) {
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06001774 submission.cbs.push_back(secondary_cmd_buffer->commandBuffer());
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001775 IncrementResources(secondary_cmd_buffer);
1776 }
1777 IncrementResources(cb_node);
Jeremy Gebben5570abe2021-05-16 18:35:13 -06001778 // increment use count for all bound objects including secondary cbs
1779 cb_node->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001780
1781 VkQueryPool first_pool = VK_NULL_HANDLE;
1782 EventToStageMap local_event_to_stage_map;
1783 QueryMap local_query_to_state_map;
1784 for (auto &function : cb_node->queryUpdates) {
1785 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &local_query_to_state_map);
1786 }
1787
John Zulauf79f06582021-02-27 18:38:39 -07001788 for (const auto &query_state_pair : local_query_to_state_map) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001789 queryToStateMap[query_state_pair.first] = query_state_pair.second;
1790 }
1791
John Zulauf79f06582021-02-27 18:38:39 -07001792 for (const auto &function : cb_node->eventUpdates) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001793 function(nullptr, /*do_validate*/ false, &local_event_to_stage_map);
1794 }
1795
John Zulauf79f06582021-02-27 18:38:39 -07001796 for (const auto &eventStagePair : local_event_to_stage_map) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001797 eventMap[eventStagePair.first]->stageMask = eventStagePair.second;
1798 }
1799 }
1800}
1801
1802void ValidationStateTracker::RecordSubmitWaitSemaphore(CB_SUBMISSION &submission, VkQueue queue, VkSemaphore semaphore,
1803 uint64_t value, uint64_t next_seq) {
1804 auto semaphore_state = GetSemaphoreState(semaphore);
1805 if (semaphore_state) {
1806 if (semaphore_state->scope == kSyncScopeInternal) {
1807 SEMAPHORE_WAIT wait;
1808 wait.semaphore = semaphore;
1809 wait.type = semaphore_state->type;
1810 if (semaphore_state->type == VK_SEMAPHORE_TYPE_BINARY) {
1811 if (semaphore_state->signaler.first != VK_NULL_HANDLE) {
1812 wait.queue = semaphore_state->signaler.first;
1813 wait.seq = semaphore_state->signaler.second;
1814 submission.waitSemaphores.emplace_back(std::move(wait));
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001815 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001816 }
1817 semaphore_state->signaler.first = VK_NULL_HANDLE;
1818 semaphore_state->signaled = false;
1819 } else if (semaphore_state->payload < value) {
1820 wait.queue = queue;
1821 wait.seq = next_seq;
1822 wait.payload = value;
1823 submission.waitSemaphores.emplace_back(std::move(wait));
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001824 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001825 }
1826 } else {
1827 submission.externalSemaphores.push_back(semaphore);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001828 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001829 if (semaphore_state->scope == kSyncScopeExternalTemporary) {
1830 semaphore_state->scope = kSyncScopeInternal;
1831 }
1832 }
1833 }
1834}
1835
1836bool ValidationStateTracker::RecordSubmitSignalSemaphore(CB_SUBMISSION &submission, VkQueue queue, VkSemaphore semaphore,
1837 uint64_t value, uint64_t next_seq) {
1838 bool retire_early = false;
1839 auto semaphore_state = GetSemaphoreState(semaphore);
1840 if (semaphore_state) {
1841 if (semaphore_state->scope == kSyncScopeInternal) {
1842 SEMAPHORE_SIGNAL signal;
1843 signal.semaphore = semaphore;
1844 signal.seq = next_seq;
1845 if (semaphore_state->type == VK_SEMAPHORE_TYPE_BINARY) {
1846 semaphore_state->signaler.first = queue;
1847 semaphore_state->signaler.second = next_seq;
1848 semaphore_state->signaled = true;
1849 } else {
1850 signal.payload = value;
1851 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001852 semaphore_state->BeginUse();
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001853 submission.signalSemaphores.emplace_back(std::move(signal));
1854 } else {
1855 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1856 retire_early = true;
1857 }
1858 }
1859 return retire_early;
1860}
1861
1862void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1863 VkFence fence, VkResult result) {
1864 if (result != VK_SUCCESS) return;
1865 auto queue_state = GetQueueState(queue);
1866
1867 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, submitCount);
locke-lunargd556cc32019-09-17 01:21:23 -06001868
1869 // Now process each individual submit
1870 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001871 CB_SUBMISSION submission;
locke-lunargd556cc32019-09-17 01:21:23 -06001872 const VkSubmitInfo *submit = &pSubmits[submit_idx];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001873 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001874 auto *timeline_semaphore_submit = LvlFindInChain<VkTimelineSemaphoreSubmitInfo>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001875 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
Jeremy Gebben4ae5cc72021-03-10 15:34:44 -07001876 uint64_t value = 0;
1877 if (timeline_semaphore_submit && timeline_semaphore_submit->pWaitSemaphoreValues != nullptr &&
1878 (i < timeline_semaphore_submit->waitSemaphoreValueCount)) {
1879 value = timeline_semaphore_submit->pWaitSemaphoreValues[i];
1880 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001881 RecordSubmitWaitSemaphore(submission, queue, submit->pWaitSemaphores[i], value, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001882 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001883
1884 bool retire_early = false;
locke-lunargd556cc32019-09-17 01:21:23 -06001885 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
Jeremy Gebben4ae5cc72021-03-10 15:34:44 -07001886 uint64_t value = 0;
1887 if (timeline_semaphore_submit && timeline_semaphore_submit->pSignalSemaphoreValues != nullptr &&
1888 (i < timeline_semaphore_submit->signalSemaphoreValueCount)) {
1889 value = timeline_semaphore_submit->pSignalSemaphoreValues[i];
1890 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001891 retire_early |= RecordSubmitSignalSemaphore(submission, queue, submit->pSignalSemaphores[i], value, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001892 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001893 if (retire_early) {
1894 early_retire_seq = std::max(early_retire_seq, next_seq);
1895 }
1896
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07001897 const auto perf_submit = LvlFindInChain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001898 submission.perf_submit_pass = perf_submit ? perf_submit->counterPassIndex : 0;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001899
locke-lunargd556cc32019-09-17 01:21:23 -06001900 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001901 RecordSubmitCommandBuffer(submission, submit->pCommandBuffers[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06001902 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001903 submission.fence = submit_idx == (submitCount - 1) ? fence : VK_NULL_HANDLE;
1904 queue_state->submissions.emplace_back(std::move(submission));
1905 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001906
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001907 if (early_retire_seq) {
1908 RetireWorkOnQueue(queue_state, early_retire_seq);
1909 }
1910}
1911
1912void ValidationStateTracker::PostCallRecordQueueSubmit2KHR(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR *pSubmits,
1913 VkFence fence, VkResult result) {
1914 if (result != VK_SUCCESS) return;
1915 auto queue_state = GetQueueState(queue);
1916
1917 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, submitCount);
1918
1919 // Now process each individual submit
1920 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1921 CB_SUBMISSION submission;
1922 const VkSubmitInfo2KHR *submit = &pSubmits[submit_idx];
1923 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
1924 for (uint32_t i = 0; i < submit->waitSemaphoreInfoCount; ++i) {
1925 const auto &sem_info = submit->pWaitSemaphoreInfos[i];
1926 RecordSubmitWaitSemaphore(submission, queue, sem_info.semaphore, sem_info.value, next_seq);
1927 }
1928 bool retire_early = false;
1929 for (uint32_t i = 0; i < submit->signalSemaphoreInfoCount; ++i) {
1930 const auto &sem_info = submit->pSignalSemaphoreInfos[i];
1931 retire_early |= RecordSubmitSignalSemaphore(submission, queue, sem_info.semaphore, sem_info.value, next_seq);
1932 }
1933 if (retire_early) {
1934 early_retire_seq = std::max(early_retire_seq, next_seq);
1935 }
1936 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
1937 submission.perf_submit_pass = perf_submit ? perf_submit->counterPassIndex : 0;
1938
1939 for (uint32_t i = 0; i < submit->commandBufferInfoCount; i++) {
1940 RecordSubmitCommandBuffer(submission, submit->pCommandBufferInfos[i].commandBuffer);
1941 }
1942 submission.fence = submit_idx == (submitCount - 1) ? fence : VK_NULL_HANDLE;
1943 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06001944 }
1945
1946 if (early_retire_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001947 RetireWorkOnQueue(queue_state, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001948 }
1949}
1950
1951void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1952 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1953 VkResult result) {
1954 if (VK_SUCCESS == result) {
1955 AddMemObjInfo(device, *pMemory, pAllocateInfo);
1956 }
1957 return;
1958}
1959
1960void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1961 if (!mem) return;
1962 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
locke-lunargd556cc32019-09-17 01:21:23 -06001963 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001964 mem_info->Destroy();
John Zulauf79952712020-04-07 11:25:54 -06001965 fake_memory.Free(mem_info->fake_base_address);
locke-lunargd556cc32019-09-17 01:21:23 -06001966 memObjMap.erase(mem);
1967}
1968
1969void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
1970 VkFence fence, VkResult result) {
1971 if (result != VK_SUCCESS) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001972 auto queue_state = GetQueueState(queue);
locke-lunargd556cc32019-09-17 01:21:23 -06001973
Jeremy Gebben74aa7622020-12-15 11:18:00 -07001974 uint64_t early_retire_seq = RecordSubmitFence(queue_state, fence, bindInfoCount);
locke-lunargd556cc32019-09-17 01:21:23 -06001975
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001976 for (uint32_t bind_idx = 0; bind_idx < bindInfoCount; ++bind_idx) {
1977 const VkBindSparseInfo &bind_info = pBindInfo[bind_idx];
locke-lunargd556cc32019-09-17 01:21:23 -06001978 // Track objects tied to memory
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001979 for (uint32_t j = 0; j < bind_info.bufferBindCount; j++) {
1980 for (uint32_t k = 0; k < bind_info.pBufferBinds[j].bindCount; k++) {
1981 auto sparse_binding = bind_info.pBufferBinds[j].pBinds[k];
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001982 auto buffer_state = GetBufferState(bind_info.pBufferBinds[j].buffer);
1983 auto mem_state = GetDevMemShared(sparse_binding.memory);
1984 if (buffer_state && mem_state) {
1985 buffer_state->SetSparseMemBinding(mem_state, sparse_binding.memoryOffset, sparse_binding.size);
1986 }
locke-lunargd556cc32019-09-17 01:21:23 -06001987 }
1988 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001989 for (uint32_t j = 0; j < bind_info.imageOpaqueBindCount; j++) {
1990 for (uint32_t k = 0; k < bind_info.pImageOpaqueBinds[j].bindCount; k++) {
1991 auto sparse_binding = bind_info.pImageOpaqueBinds[j].pBinds[k];
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06001992 auto image_state = GetImageState(bind_info.pImageOpaqueBinds[j].image);
1993 auto mem_state = GetDevMemShared(sparse_binding.memory);
1994 if (image_state && mem_state) {
1995 image_state->SetSparseMemBinding(mem_state, sparse_binding.memoryOffset, sparse_binding.size);
1996 }
locke-lunargd556cc32019-09-17 01:21:23 -06001997 }
1998 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001999 for (uint32_t j = 0; j < bind_info.imageBindCount; j++) {
2000 for (uint32_t k = 0; k < bind_info.pImageBinds[j].bindCount; k++) {
2001 auto sparse_binding = bind_info.pImageBinds[j].pBinds[k];
locke-lunargd556cc32019-09-17 01:21:23 -06002002 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
2003 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002004 auto image_state = GetImageState(bind_info.pImageBinds[j].image);
2005 auto mem_state = GetDevMemShared(sparse_binding.memory);
2006 if (image_state && mem_state) {
2007 image_state->SetSparseMemBinding(mem_state, sparse_binding.memoryOffset, size);
2008 }
locke-lunargd556cc32019-09-17 01:21:23 -06002009 }
2010 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002011 CB_SUBMISSION submission;
2012 const uint64_t next_seq = queue_state->seq + queue_state->submissions.size() + 1;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002013 for (uint32_t i = 0; i < bind_info.waitSemaphoreCount; ++i) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002014 RecordSubmitWaitSemaphore(submission, queue, bind_info.pWaitSemaphores[i], 0, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002015 }
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002016 bool retire_early = false;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002017 for (uint32_t i = 0; i < bind_info.signalSemaphoreCount; ++i) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002018 retire_early |= RecordSubmitSignalSemaphore(submission, queue, bind_info.pSignalSemaphores[i], 0, next_seq);
2019 }
2020 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
2021 if (retire_early) {
2022 early_retire_seq = std::max(early_retire_seq, queue_state->seq + queue_state->submissions.size() + 1);
locke-lunargd556cc32019-09-17 01:21:23 -06002023 }
2024
Jeremy Gebben74aa7622020-12-15 11:18:00 -07002025 submission.fence = bind_idx == (bindInfoCount - 1) ? fence : VK_NULL_HANDLE;
2026 queue_state->submissions.emplace_back(std::move(submission));
locke-lunargd556cc32019-09-17 01:21:23 -06002027 }
2028
2029 if (early_retire_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002030 RetireWorkOnQueue(queue_state, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002031 }
2032}
2033
2034void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
2035 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
2036 VkResult result) {
2037 if (VK_SUCCESS != result) return;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002038 semaphoreMap[*pSemaphore] = std::make_shared<SEMAPHORE_STATE>(*pSemaphore, LvlFindInChain<VkSemaphoreTypeCreateInfo>(pCreateInfo->pNext));
locke-lunargd556cc32019-09-17 01:21:23 -06002039}
2040
Mike Schuchardt2df08912020-12-15 16:28:09 -08002041void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBits handle_type,
2042 VkSemaphoreImportFlags flags) {
locke-lunargd556cc32019-09-17 01:21:23 -06002043 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
2044 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08002045 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) &&
locke-lunargd556cc32019-09-17 01:21:23 -06002046 sema_node->scope == kSyncScopeInternal) {
2047 sema_node->scope = kSyncScopeExternalTemporary;
2048 } else {
2049 sema_node->scope = kSyncScopeExternalPermanent;
2050 }
2051 }
2052}
2053
Mike Schuchardt2df08912020-12-15 16:28:09 -08002054void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfo *pSignalInfo,
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002055 VkResult result) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002056 auto *semaphore_state = GetSemaphoreState(pSignalInfo->semaphore);
2057 semaphore_state->payload = pSignalInfo->value;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002058}
2059
locke-lunargd556cc32019-09-17 01:21:23 -06002060void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
2061 auto mem_info = GetDevMemState(mem);
2062 if (mem_info) {
2063 mem_info->mapped_range.offset = offset;
2064 mem_info->mapped_range.size = size;
2065 mem_info->p_driver_data = *ppData;
2066 }
2067}
2068
2069void ValidationStateTracker::RetireFence(VkFence fence) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002070 auto fence_state = GetFenceState(fence);
2071 if (fence_state && fence_state->scope == kSyncScopeInternal) {
2072 if (fence_state->signaler.first != VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -06002073 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002074 RetireWorkOnQueue(GetQueueState(fence_state->signaler.first), fence_state->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002075 } else {
2076 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
2077 // the fence as retired.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002078 fence_state->state = FENCE_RETIRED;
locke-lunargd556cc32019-09-17 01:21:23 -06002079 }
2080 }
2081}
2082
2083void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2084 VkBool32 waitAll, uint64_t timeout, VkResult result) {
2085 if (VK_SUCCESS != result) return;
2086
2087 // When we know that all fences are complete we can clean/remove their CBs
2088 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
2089 for (uint32_t i = 0; i < fenceCount; i++) {
2090 RetireFence(pFences[i]);
2091 }
2092 }
2093 // NOTE : Alternate case not handled here is when some fences have completed. In
2094 // this case for app to guarantee which fences completed it will have to call
2095 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
2096}
2097
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002098void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002099 auto semaphore_state = GetSemaphoreState(semaphore);
2100 if (semaphore_state) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002101 for (auto &pair : queueMap) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002102 QUEUE_STATE &queue_state = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002103 uint64_t max_seq = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002104 for (const auto &submission : queue_state.submissions) {
2105 for (const auto &signal_semaphore : submission.signalSemaphores) {
2106 if (signal_semaphore.semaphore == semaphore && signal_semaphore.payload <= until_payload) {
2107 if (signal_semaphore.seq > max_seq) {
2108 max_seq = signal_semaphore.seq;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002109 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002110 }
2111 }
2112 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06002113 if (max_seq) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002114 RetireWorkOnQueue(&queue_state, max_seq);
Tony-LunarG47d5e272020-04-07 15:35:55 -06002115 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002116 }
2117 }
2118}
2119
John Zulauff89de662020-04-13 18:57:34 -06002120void ValidationStateTracker::RecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2121 VkResult result) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002122 if (VK_SUCCESS != result) return;
2123
2124 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
2125 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
2126 }
2127}
2128
John Zulauff89de662020-04-13 18:57:34 -06002129void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2130 VkResult result) {
2131 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2132}
2133
2134void ValidationStateTracker::PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo,
2135 uint64_t timeout, VkResult result) {
2136 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2137}
2138
Adrian Coca Lorentec7d76102020-09-28 13:58:16 +02002139void ValidationStateTracker::RecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2140 VkResult result) {
2141 if (VK_SUCCESS != result) return;
2142
2143 RetireTimelineSemaphore(semaphore, *pValue);
2144}
2145
2146void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2147 VkResult result) {
2148 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2149}
2150void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2151 VkResult result) {
2152 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2153}
2154
locke-lunargd556cc32019-09-17 01:21:23 -06002155void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
2156 if (VK_SUCCESS != result) return;
2157 RetireFence(fence);
2158}
2159
2160void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
Jeremy Gebben5573a8c2021-06-04 08:55:10 -06002161 queueMap.emplace(queue, QUEUE_STATE(queue, queue_family_index));
locke-lunargd556cc32019-09-17 01:21:23 -06002162}
2163
2164void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2165 VkQueue *pQueue) {
2166 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2167}
2168
2169void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2170 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2171}
2172
2173void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2174 if (VK_SUCCESS != result) return;
2175 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002176 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002177}
2178
2179void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2180 if (VK_SUCCESS != result) return;
2181 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002182 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002183 }
2184}
2185
2186void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2187 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002188 auto fence_state = GetFenceState(fence);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002189 fence_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002190 fenceMap.erase(fence);
2191}
2192
2193void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2194 const VkAllocationCallbacks *pAllocator) {
2195 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002196 auto semaphore_state = GetSemaphoreState(semaphore);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002197 semaphore_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002198 semaphoreMap.erase(semaphore);
2199}
2200
2201void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2202 if (!event) return;
John Zulauf48057322020-12-02 11:59:31 -07002203 EVENT_STATE *event_state = Get<EVENT_STATE>(event);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002204 event_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002205 eventMap.erase(event);
2206}
2207
2208void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2209 const VkAllocationCallbacks *pAllocator) {
2210 if (!queryPool) return;
2211 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002212 qp_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002213 queryPoolMap.erase(queryPool);
2214}
2215
locke-lunargd556cc32019-09-17 01:21:23 -06002216void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2217 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2218 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002219 // Track objects tied to memory
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002220 auto mem_state = GetDevMemShared(mem);
2221 if (mem_state) {
2222 buffer_state->SetMemBinding(mem_state, memoryOffset);
2223 }
locke-lunargd556cc32019-09-17 01:21:23 -06002224 }
2225}
2226
2227void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2228 VkDeviceSize memoryOffset, VkResult result) {
2229 if (VK_SUCCESS != result) return;
2230 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2231}
2232
2233void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002234 const VkBindBufferMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06002235 for (uint32_t i = 0; i < bindInfoCount; i++) {
2236 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2237 }
2238}
2239
2240void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002241 const VkBindBufferMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06002242 for (uint32_t i = 0; i < bindInfoCount; i++) {
2243 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2244 }
2245}
2246
Spencer Fricke6c127102020-04-16 06:25:20 -07002247void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer) {
locke-lunargd556cc32019-09-17 01:21:23 -06002248 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2249 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002250 buffer_state->memory_requirements_checked = true;
2251 }
2252}
2253
2254void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2255 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002256 RecordGetBufferMemoryRequirementsState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002257}
2258
2259void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002260 const VkBufferMemoryRequirementsInfo2 *pInfo,
2261 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002262 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002263}
2264
2265void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08002266 const VkBufferMemoryRequirementsInfo2 *pInfo,
2267 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002268 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002269}
2270
Spencer Fricke6c127102020-04-16 06:25:20 -07002271void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002272 const VkImagePlaneMemoryRequirementsInfo *plane_info =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002273 (pInfo == nullptr) ? nullptr : LvlFindInChain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002274 IMAGE_STATE *image_state = GetImageState(image);
2275 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002276 if (plane_info != nullptr) {
2277 // Multi-plane image
2278 image_state->memory_requirements_checked = false; // Each image plane needs to be checked itself
2279 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
2280 image_state->plane0_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002281 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
2282 image_state->plane1_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002283 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
2284 image_state->plane2_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002285 }
2286 } else {
2287 // Single Plane image
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002288 image_state->memory_requirements_checked = true;
2289 }
locke-lunargd556cc32019-09-17 01:21:23 -06002290 }
2291}
2292
2293void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2294 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002295 RecordGetImageMemoryRequirementsState(image, nullptr);
locke-lunargd556cc32019-09-17 01:21:23 -06002296}
2297
2298void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2299 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002300 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002301}
2302
2303void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2304 const VkImageMemoryRequirementsInfo2 *pInfo,
2305 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002306 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002307}
2308
2309static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2310 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2311 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2312 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2313 image_state->sparse_metadata_required = true;
2314 }
2315}
2316
2317void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2318 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2319 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2320 auto image_state = GetImageState(image);
2321 image_state->get_sparse_reqs_called = true;
2322 if (!pSparseMemoryRequirements) return;
2323 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2324 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2325 }
2326}
2327
2328void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
Mike Schuchardt2df08912020-12-15 16:28:09 -08002329 VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount,
2330 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements) {
locke-lunargd556cc32019-09-17 01:21:23 -06002331 auto image_state = GetImageState(pInfo->image);
2332 image_state->get_sparse_reqs_called = true;
2333 if (!pSparseMemoryRequirements) return;
2334 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2335 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2336 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2337 }
2338}
2339
2340void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08002341 VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount,
2342 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements) {
locke-lunargd556cc32019-09-17 01:21:23 -06002343 auto image_state = GetImageState(pInfo->image);
2344 image_state->get_sparse_reqs_called = true;
2345 if (!pSparseMemoryRequirements) return;
2346 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2347 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2348 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2349 }
2350}
2351
2352void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2353 const VkAllocationCallbacks *pAllocator) {
2354 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002355 auto shader_module_state = GetShaderModuleState(shaderModule);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002356 shader_module_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002357 shaderModuleMap.erase(shaderModule);
2358}
2359
2360void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2361 const VkAllocationCallbacks *pAllocator) {
2362 if (!pipeline) return;
2363 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
locke-lunargd556cc32019-09-17 01:21:23 -06002364 // Any bound cmd buffers are now invalid
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002365 pipeline_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002366 pipelineMap.erase(pipeline);
2367}
2368
2369void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2370 const VkAllocationCallbacks *pAllocator) {
2371 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002372 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002373 pipeline_layout_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002374 pipelineLayoutMap.erase(pipelineLayout);
2375}
2376
2377void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2378 const VkAllocationCallbacks *pAllocator) {
2379 if (!sampler) return;
2380 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
locke-lunargd556cc32019-09-17 01:21:23 -06002381 // Any bound cmd buffers are now invalid
2382 if (sampler_state) {
Yuly Novikov424cdd52020-05-26 16:45:12 -04002383 if (sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2384 sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
2385 custom_border_color_sampler_count--;
2386 }
2387
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002388 sampler_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002389 }
2390 samplerMap.erase(sampler);
2391}
2392
2393void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2394 const VkAllocationCallbacks *pAllocator) {
2395 if (!descriptorSetLayout) return;
2396 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2397 if (layout_it != descriptorSetLayoutMap.end()) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002398 layout_it->second.get()->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002399 descriptorSetLayoutMap.erase(layout_it);
2400 }
2401}
2402
2403void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2404 const VkAllocationCallbacks *pAllocator) {
2405 if (!descriptorPool) return;
2406 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002407 if (desc_pool_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002408 // Free sets that were in this pool
John Zulauf79f06582021-02-27 18:38:39 -07002409 for (auto *ds : desc_pool_state->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06002410 FreeDescriptorSet(ds);
2411 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002412 desc_pool_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002413 descriptorPoolMap.erase(descriptorPool);
2414 }
2415}
2416
2417// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2418void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2419 const VkCommandBuffer *command_buffers) {
2420 for (uint32_t i = 0; i < command_buffer_count; i++) {
John Zulaufd1f85d42020-04-15 12:23:15 -06002421 // Allow any derived class to clean up command buffer state
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002422 if (command_buffer_reset_callback) {
2423 (*command_buffer_reset_callback)(command_buffers[i]);
2424 }
John Zulaufd1f85d42020-04-15 12:23:15 -06002425 if (command_buffer_free_callback) {
2426 (*command_buffer_free_callback)(command_buffers[i]);
2427 }
2428
locke-lunargd556cc32019-09-17 01:21:23 -06002429 auto cb_state = GetCBState(command_buffers[i]);
2430 // Remove references to command buffer's state and delete
2431 if (cb_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002432 // Remove the cb_state's references from COMMAND_POOL_STATEs
2433 pool_state->commandBuffers.erase(command_buffers[i]);
2434 // Remove the cb debug labels
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06002435 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer());
locke-lunargd556cc32019-09-17 01:21:23 -06002436 // Remove CBState from CB map
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002437 cb_state->Destroy();
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06002438 commandBufferMap.erase(cb_state->commandBuffer());
locke-lunargd556cc32019-09-17 01:21:23 -06002439 }
2440 }
2441}
2442
2443void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2444 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002445 auto pool = GetCommandPoolState(commandPool);
2446 FreeCommandBufferStates(pool, commandBufferCount, pCommandBuffers);
locke-lunargd556cc32019-09-17 01:21:23 -06002447}
2448
2449void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2450 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2451 VkResult result) {
2452 if (VK_SUCCESS != result) return;
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06002453 auto queue_flags = GetPhysicalDeviceState()->queue_family_properties[pCreateInfo->queueFamilyIndex].queueFlags;
2454 commandPoolMap[*pCommandPool] = std::make_shared<COMMAND_POOL_STATE>(*pCommandPool, pCreateInfo, queue_flags);
locke-lunargd556cc32019-09-17 01:21:23 -06002455}
2456
2457void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2458 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2459 VkResult result) {
2460 if (VK_SUCCESS != result) return;
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002461
2462 uint32_t index_count = 0, n_perf_pass = 0;
2463 bool has_cb = false, has_rb = false;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002464 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002465 const auto *perf = LvlFindInChain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002466 index_count = perf->counterIndexCount;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002467
Mark Lobodzinski7e948e42020-09-09 10:23:36 -06002468 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002469 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2470 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2471 switch (counter.scope) {
2472 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002473 has_cb = true;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002474 break;
2475 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002476 has_rb = true;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002477 break;
2478 default:
2479 break;
2480 }
2481 }
2482
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002483 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf, &n_perf_pass);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002484 }
2485
Jeremy Gebbene9206ee2021-06-02 12:44:41 -06002486 queryPoolMap[*pQueryPool] =
2487 std::make_shared<QUERY_POOL_STATE>(*pQueryPool, pCreateInfo, index_count, n_perf_pass, has_cb, has_rb);
locke-lunargd556cc32019-09-17 01:21:23 -06002488
2489 QueryObject query_obj{*pQueryPool, 0u};
2490 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2491 query_obj.query = i;
2492 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2493 }
2494}
2495
2496void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2497 const VkAllocationCallbacks *pAllocator) {
2498 if (!commandPool) return;
2499 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2500 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2501 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2502 if (cp_state) {
2503 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2504 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2505 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002506 cp_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002507 commandPoolMap.erase(commandPool);
2508 }
2509}
2510
2511void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2512 VkCommandPoolResetFlags flags, VkResult result) {
2513 if (VK_SUCCESS != result) return;
2514 // Reset all of the CBs allocated from this pool
2515 auto command_pool_state = GetCommandPoolState(commandPool);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002516 for (auto cmd_buffer : command_pool_state->commandBuffers) {
2517 ResetCommandBufferState(cmd_buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002518 }
2519}
2520
2521void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2522 VkResult result) {
2523 for (uint32_t i = 0; i < fenceCount; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002524 auto fence_state = GetFenceState(pFences[i]);
2525 if (fence_state) {
2526 if (fence_state->scope == kSyncScopeInternal) {
2527 fence_state->state = FENCE_UNSIGNALED;
2528 } else if (fence_state->scope == kSyncScopeExternalTemporary) {
2529 fence_state->scope = kSyncScopeInternal;
locke-lunargd556cc32019-09-17 01:21:23 -06002530 }
2531 }
2532 }
2533}
2534
locke-lunargd556cc32019-09-17 01:21:23 -06002535void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2536 const VkAllocationCallbacks *pAllocator) {
2537 if (!framebuffer) return;
2538 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002539 framebuffer_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002540 frameBufferMap.erase(framebuffer);
2541}
2542
2543void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2544 const VkAllocationCallbacks *pAllocator) {
2545 if (!renderPass) return;
2546 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002547 rp_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06002548 renderPassMap.erase(renderPass);
2549}
2550
2551void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2552 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2553 if (VK_SUCCESS != result) return;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002554 fenceMap[*pFence] = std::make_shared<FENCE_STATE>(*pFence, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002555}
2556
2557bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2558 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2559 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002560 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002561 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2562 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2563 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2564 cgpl_state->pipe_state.reserve(count);
2565 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002566 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002567 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002568 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002569 }
2570 return false;
2571}
2572
2573void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2574 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2575 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2576 VkResult result, void *cgpl_state_data) {
2577 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2578 // This API may create pipelines regardless of the return value
2579 for (uint32_t i = 0; i < count; i++) {
2580 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002581 (cgpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002582 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2583 }
2584 }
2585 cgpl_state->pipe_state.clear();
2586}
2587
2588bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2589 const VkComputePipelineCreateInfo *pCreateInfos,
2590 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002591 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002592 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2593 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2594 ccpl_state->pipe_state.reserve(count);
2595 for (uint32_t i = 0; i < count; i++) {
2596 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002597 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002598 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002599 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002600 }
2601 return false;
2602}
2603
2604void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2605 const VkComputePipelineCreateInfo *pCreateInfos,
2606 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2607 VkResult result, void *ccpl_state_data) {
2608 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2609
2610 // This API may create pipelines regardless of the return value
2611 for (uint32_t i = 0; i < count; i++) {
2612 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002613 (ccpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002614 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2615 }
2616 }
2617 ccpl_state->pipe_state.clear();
2618}
2619
2620bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2621 uint32_t count,
2622 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2623 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002624 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002625 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2626 crtpl_state->pipe_state.reserve(count);
2627 for (uint32_t i = 0; i < count; i++) {
2628 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002629 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002630 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002631 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002632 }
2633 return false;
2634}
2635
2636void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2637 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2638 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2639 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2640 // This API may create pipelines regardless of the return value
2641 for (uint32_t i = 0; i < count; i++) {
2642 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002643 (crtpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002644 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2645 }
2646 }
2647 crtpl_state->pipe_state.clear();
2648}
2649
sourav parmarcd5fb182020-07-17 12:58:44 -07002650bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
2651 VkPipelineCache pipelineCache, uint32_t count,
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002652 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2653 const VkAllocationCallbacks *pAllocator,
2654 VkPipeline *pPipelines, void *crtpl_state_data) const {
2655 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2656 crtpl_state->pipe_state.reserve(count);
2657 for (uint32_t i = 0; i < count; i++) {
2658 // Create and initialize internal tracking data structure
2659 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
2660 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
2661 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
2662 }
2663 return false;
2664}
2665
sourav parmarcd5fb182020-07-17 12:58:44 -07002666void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
2667 VkPipelineCache pipelineCache, uint32_t count,
2668 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2669 const VkAllocationCallbacks *pAllocator,
2670 VkPipeline *pPipelines, VkResult result,
2671 void *crtpl_state_data) {
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002672 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2673 // This API may create pipelines regardless of the return value
2674 for (uint32_t i = 0; i < count; i++) {
2675 if (pPipelines[i] != VK_NULL_HANDLE) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002676 (crtpl_state->pipe_state)[i]->SetHandle(pPipelines[i]);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002677 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2678 }
2679 }
2680 crtpl_state->pipe_state.clear();
2681}
2682
locke-lunargd556cc32019-09-17 01:21:23 -06002683void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2684 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2685 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002686 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002687 if (pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2688 pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
Tony-LunarG7337b312020-04-15 16:40:25 -06002689 custom_border_color_sampler_count++;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002690 }
locke-lunargd556cc32019-09-17 01:21:23 -06002691}
2692
2693void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2694 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2695 const VkAllocationCallbacks *pAllocator,
2696 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2697 if (VK_SUCCESS != result) return;
2698 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2699}
2700
2701// For repeatable sorting, not very useful for "memory in range" search
2702struct PushConstantRangeCompare {
2703 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2704 if (lhs->offset == rhs->offset) {
2705 if (lhs->size == rhs->size) {
2706 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2707 return lhs->stageFlags < rhs->stageFlags;
2708 }
2709 // If the offsets are the same then sorting by the end of range is useful for validation
2710 return lhs->size < rhs->size;
2711 }
2712 return lhs->offset < rhs->offset;
2713 }
2714};
2715
2716static PushConstantRangesDict push_constant_ranges_dict;
2717
2718PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2719 if (!info->pPushConstantRanges) {
2720 // Hand back the empty entry (creating as needed)...
2721 return push_constant_ranges_dict.look_up(PushConstantRanges());
2722 }
2723
2724 // Sort the input ranges to ensure equivalent ranges map to the same id
2725 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2726 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2727 sorted.insert(info->pPushConstantRanges + i);
2728 }
2729
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07002730 PushConstantRanges ranges;
2731 ranges.reserve(sorted.size());
John Zulauf79f06582021-02-27 18:38:39 -07002732 for (const auto *range : sorted) {
locke-lunargd556cc32019-09-17 01:21:23 -06002733 ranges.emplace_back(*range);
2734 }
2735 return push_constant_ranges_dict.look_up(std::move(ranges));
2736}
2737
2738// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2739static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2740
2741// Dictionary of canonical form of the "compatible for set" records
2742static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2743
2744static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2745 const PipelineLayoutSetLayoutsId set_layouts_id) {
2746 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2747}
2748
2749void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2750 const VkAllocationCallbacks *pAllocator,
2751 VkPipelineLayout *pPipelineLayout, VkResult result) {
2752 if (VK_SUCCESS != result) return;
2753
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002754 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>(*pPipelineLayout);
locke-lunargd556cc32019-09-17 01:21:23 -06002755 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2756 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2757 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002758 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002759 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2760 }
2761
2762 // Get canonical form IDs for the "compatible for set" contents
2763 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
2764 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
2765 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
2766
2767 // Create table of "compatible for set N" cannonical forms for trivial accept validation
2768 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2769 pipeline_layout_state->compat_for_set.emplace_back(
2770 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
2771 }
2772 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
2773}
2774
2775void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
2776 const VkAllocationCallbacks *pAllocator,
2777 VkDescriptorPool *pDescriptorPool, VkResult result) {
2778 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002779 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002780}
2781
2782void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2783 VkDescriptorPoolResetFlags flags, VkResult result) {
2784 if (VK_SUCCESS != result) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002785 DESCRIPTOR_POOL_STATE *pool = GetDescriptorPoolState(descriptorPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002786 // TODO: validate flags
2787 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
John Zulauf79f06582021-02-27 18:38:39 -07002788 for (auto *ds : pool->sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06002789 FreeDescriptorSet(ds);
2790 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002791 pool->sets.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06002792 // Reset available count for each type and available sets for this pool
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002793 for (auto it = pool->availableDescriptorTypeCount.begin(); it != pool->availableDescriptorTypeCount.end(); ++it) {
2794 pool->availableDescriptorTypeCount[it->first] = pool->maxDescriptorTypeCount[it->first];
locke-lunargd556cc32019-09-17 01:21:23 -06002795 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002796 pool->availableSets = pool->maxSets;
locke-lunargd556cc32019-09-17 01:21:23 -06002797}
2798
2799bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
2800 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002801 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002802 // Always update common data
2803 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2804 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2805 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
2806
2807 return false;
2808}
2809
2810// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
2811void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
2812 VkDescriptorSet *pDescriptorSets, VkResult result,
2813 void *ads_state_data) {
2814 if (VK_SUCCESS != result) return;
2815 // All the updates are contained in a single cvdescriptorset function
2816 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2817 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2818 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
2819}
2820
2821void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
2822 const VkDescriptorSet *pDescriptorSets) {
2823 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
2824 // Update available descriptor sets in pool
2825 pool_state->availableSets += count;
2826
2827 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
2828 for (uint32_t i = 0; i < count; ++i) {
2829 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
2830 auto descriptor_set = setMap[pDescriptorSets[i]].get();
2831 uint32_t type_index = 0, descriptor_count = 0;
2832 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
2833 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
2834 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
2835 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
2836 }
2837 FreeDescriptorSet(descriptor_set);
2838 pool_state->sets.erase(descriptor_set);
2839 }
2840 }
2841}
2842
2843void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
2844 const VkWriteDescriptorSet *pDescriptorWrites,
2845 uint32_t descriptorCopyCount,
2846 const VkCopyDescriptorSet *pDescriptorCopies) {
2847 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
2848 pDescriptorCopies);
2849}
2850
2851void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
2852 VkCommandBuffer *pCommandBuffer, VkResult result) {
2853 if (VK_SUCCESS != result) return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002854 auto pool = GetCommandPoolShared(pCreateInfo->commandPool);
2855 if (pool) {
locke-lunargd556cc32019-09-17 01:21:23 -06002856 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
2857 // Add command buffer to its commandPool map
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002858 pool->commandBuffers.insert(pCommandBuffer[i]);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06002859 auto cb_state = std::make_shared<CMD_BUFFER_STATE>(pCommandBuffer[i], pCreateInfo);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002860 cb_state->command_pool = pool;
2861 cb_state->unprotected = pool->unprotected;
locke-lunargd556cc32019-09-17 01:21:23 -06002862 // Add command buffer to map
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07002863 commandBufferMap[pCommandBuffer[i]] = std::move(cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002864 ResetCommandBufferState(pCommandBuffer[i]);
2865 }
2866 }
2867}
2868
locke-lunargfc78e932020-11-19 17:06:24 -07002869void UpdateSubpassAttachments(const safe_VkSubpassDescription2 &subpass, std::vector<SUBPASS_INFO> &subpasses) {
2870 for (uint32_t index = 0; index < subpass.inputAttachmentCount; ++index) {
2871 const uint32_t attachment_index = subpass.pInputAttachments[index].attachment;
2872 if (attachment_index != VK_ATTACHMENT_UNUSED) {
2873 subpasses[attachment_index].used = true;
2874 subpasses[attachment_index].usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
2875 subpasses[attachment_index].layout = subpass.pInputAttachments[index].layout;
2876 }
2877 }
2878
2879 for (uint32_t index = 0; index < subpass.colorAttachmentCount; ++index) {
2880 const uint32_t attachment_index = subpass.pColorAttachments[index].attachment;
2881 if (attachment_index != VK_ATTACHMENT_UNUSED) {
2882 subpasses[attachment_index].used = true;
2883 subpasses[attachment_index].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2884 subpasses[attachment_index].layout = subpass.pColorAttachments[index].layout;
2885 }
2886 if (subpass.pResolveAttachments) {
2887 const uint32_t attachment_index2 = subpass.pResolveAttachments[index].attachment;
2888 if (attachment_index2 != VK_ATTACHMENT_UNUSED) {
2889 subpasses[attachment_index2].used = true;
2890 subpasses[attachment_index2].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2891 subpasses[attachment_index2].layout = subpass.pResolveAttachments[index].layout;
2892 }
2893 }
2894 }
2895
2896 if (subpass.pDepthStencilAttachment) {
2897 const uint32_t attachment_index = subpass.pDepthStencilAttachment->attachment;
2898 if (attachment_index != VK_ATTACHMENT_UNUSED) {
2899 subpasses[attachment_index].used = true;
2900 subpasses[attachment_index].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
2901 subpasses[attachment_index].layout = subpass.pDepthStencilAttachment->layout;
2902 }
2903 }
2904}
2905
2906void UpdateAttachmentsView(ValidationStateTracker &tracker, CMD_BUFFER_STATE &cb_state, const FRAMEBUFFER_STATE &framebuffer,
2907 const VkRenderPassBeginInfo *pRenderPassBegin) {
2908 auto &attachments = *(cb_state.active_attachments.get());
2909 const bool imageless = (framebuffer.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) ? true : false;
2910 const VkRenderPassAttachmentBeginInfo *attachment_info_struct = nullptr;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002911 if (pRenderPassBegin) attachment_info_struct = LvlFindInChain<VkRenderPassAttachmentBeginInfo>(pRenderPassBegin->pNext);
locke-lunargfc78e932020-11-19 17:06:24 -07002912
2913 for (uint32_t i = 0; i < attachments.size(); ++i) {
2914 if (imageless) {
2915 if (attachment_info_struct && i < attachment_info_struct->attachmentCount) {
2916 auto res = cb_state.attachments_view_states.insert(
2917 tracker.GetShared<IMAGE_VIEW_STATE>(attachment_info_struct->pAttachments[i]));
2918 attachments[i] = res.first->get();
2919 }
2920 } else {
2921 auto res = cb_state.attachments_view_states.insert(framebuffer.attachments_view_state[i]);
2922 attachments[i] = res.first->get();
2923 }
2924 }
2925}
2926
locke-lunargd556cc32019-09-17 01:21:23 -06002927void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
2928 const VkCommandBufferBeginInfo *pBeginInfo) {
2929 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2930 if (!cb_state) return;
locke-lunargfc78e932020-11-19 17:06:24 -07002931
locke-lunargd556cc32019-09-17 01:21:23 -06002932 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
2933 ResetCommandBufferState(commandBuffer);
2934 }
2935 // Set updated state here in case implicit reset occurs above
2936 cb_state->state = CB_RECORDING;
2937 cb_state->beginInfo = *pBeginInfo;
Tony-LunarG3c287f62020-12-17 12:39:49 -07002938 if (cb_state->beginInfo.pInheritanceInfo && (cb_state->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY)) {
locke-lunargd556cc32019-09-17 01:21:23 -06002939 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
2940 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
2941 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
2942 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
2943 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
locke-lunargaecf2152020-05-12 17:15:41 -06002944 cb_state->activeRenderPass = GetShared<RENDER_PASS_STATE>(cb_state->beginInfo.pInheritanceInfo->renderPass);
locke-lunargd556cc32019-09-17 01:21:23 -06002945 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
locke-lunargfc78e932020-11-19 17:06:24 -07002946
locke-lunargaecf2152020-05-12 17:15:41 -06002947 if (cb_state->beginInfo.pInheritanceInfo->framebuffer) {
2948 cb_state->activeFramebuffer = GetShared<FRAMEBUFFER_STATE>(cb_state->beginInfo.pInheritanceInfo->framebuffer);
locke-lunargfc78e932020-11-19 17:06:24 -07002949 cb_state->active_subpasses = nullptr;
2950 cb_state->active_attachments = nullptr;
2951
2952 if (cb_state->activeFramebuffer) {
2953 cb_state->framebuffers.insert(cb_state->activeFramebuffer);
2954
2955 // Set cb_state->active_subpasses
2956 cb_state->active_subpasses =
2957 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
2958 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
2959 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
2960
2961 // Set cb_state->active_attachments & cb_state->attachments_view_states
2962 cb_state->active_attachments =
2963 std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
2964 UpdateAttachmentsView(*this, *cb_state, *cb_state->activeFramebuffer, nullptr);
2965
2966 // Connect this framebuffer and its children to this cmdBuffer
Jeremy Gebben5570abe2021-05-16 18:35:13 -06002967 if (!disabled[command_buffer_state]) {
2968 cb_state->AddChild(cb_state->activeFramebuffer.get());
2969 }
locke-lunargfc78e932020-11-19 17:06:24 -07002970 }
locke-lunargaecf2152020-05-12 17:15:41 -06002971 }
David Zhao Akeley44139b12021-04-26 16:16:13 -07002972
2973 // Check for VkCommandBufferInheritanceViewportScissorInfoNV (VK_NV_inherited_viewport_scissor)
2974 auto p_inherited_viewport_scissor_info =
2975 LvlFindInChain<VkCommandBufferInheritanceViewportScissorInfoNV>(cb_state->beginInfo.pInheritanceInfo->pNext);
2976 if (p_inherited_viewport_scissor_info != nullptr && p_inherited_viewport_scissor_info->viewportScissor2D) {
2977 auto pViewportDepths = p_inherited_viewport_scissor_info->pViewportDepths;
2978 cb_state->inheritedViewportDepths.assign(
2979 pViewportDepths, pViewportDepths + p_inherited_viewport_scissor_info->viewportDepthCount);
2980 }
locke-lunargd556cc32019-09-17 01:21:23 -06002981 }
2982 }
2983
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07002984 auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002985 if (chained_device_group_struct) {
2986 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
2987 } else {
2988 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
2989 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002990
2991 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06002992}
2993
2994void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
2995 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2996 if (!cb_state) return;
2997 // Cached validation is specific to a specific recording of a specific command buffer.
John Zulauf79f06582021-02-27 18:38:39 -07002998 for (auto *descriptor_set : cb_state->validated_descriptor_sets) {
locke-lunargd556cc32019-09-17 01:21:23 -06002999 descriptor_set->ClearCachedValidation(cb_state);
3000 }
3001 cb_state->validated_descriptor_sets.clear();
3002 if (VK_SUCCESS == result) {
3003 cb_state->state = CB_RECORDED;
3004 }
3005}
3006
3007void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
3008 VkResult result) {
3009 if (VK_SUCCESS == result) {
3010 ResetCommandBufferState(commandBuffer);
3011 }
3012}
3013
3014CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
3015 // initially assume everything is static state
3016 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
3017
3018 if (ds) {
3019 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
locke-lunarg4189aa22020-10-21 00:23:48 -06003020 flags &= ~ConvertToCBStatusFlagBits(ds->pDynamicStates[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003021 }
3022 }
locke-lunargd556cc32019-09-17 01:21:23 -06003023 return flags;
3024}
3025
3026// Validation cache:
3027// CV is the bottommost implementor of this extension. Don't pass calls down.
3028// utility function to set collective state for pipeline
3029void SetPipelineState(PIPELINE_STATE *pPipe) {
3030 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
3031 if (pPipe->graphicsPipelineCI.pColorBlendState) {
3032 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
3033 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
3034 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3035 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3036 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3037 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3038 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3039 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3040 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3041 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
3042 pPipe->blendConstantsEnabled = true;
3043 }
3044 }
3045 }
3046 }
sfricke-samsung8f658d42020-05-03 20:12:24 -07003047 // Check if sample location is enabled
3048 if (pPipe->graphicsPipelineCI.pMultisampleState) {
3049 const VkPipelineSampleLocationsStateCreateInfoEXT *sample_location_state =
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07003050 LvlFindInChain<VkPipelineSampleLocationsStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pMultisampleState->pNext);
sfricke-samsung8f658d42020-05-03 20:12:24 -07003051 if (sample_location_state != nullptr) {
3052 pPipe->sample_location_enabled = sample_location_state->sampleLocationsEnable;
3053 }
3054 }
locke-lunargd556cc32019-09-17 01:21:23 -06003055}
3056
locke-lunargb8be8222020-10-20 00:34:37 -06003057void UpdateSamplerDescriptorsUsedByImage(LAST_BOUND_STATE &last_bound_state) {
3058 if (!last_bound_state.pipeline_state) return;
3059 if (last_bound_state.per_set.empty()) return;
3060
3061 for (auto &slot : last_bound_state.pipeline_state->active_slots) {
3062 for (auto &req : slot.second) {
3063 for (auto &samplers : req.second.samplers_used_by_image) {
3064 for (auto &sampler : samplers) {
3065 if (sampler.first.sampler_slot.first < last_bound_state.per_set.size() &&
3066 last_bound_state.per_set[sampler.first.sampler_slot.first].bound_descriptor_set) {
3067 sampler.second = last_bound_state.per_set[sampler.first.sampler_slot.first]
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003068 .bound_descriptor_set->GetDescriptorFromBinding(sampler.first.sampler_slot.second,
3069 sampler.first.sampler_index);
locke-lunargb8be8222020-10-20 00:34:37 -06003070 }
3071 }
3072 }
3073 }
3074 }
3075}
3076
locke-lunargd556cc32019-09-17 01:21:23 -06003077void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
3078 VkPipeline pipeline) {
3079 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3080 assert(cb_state);
3081
3082 auto pipe_state = GetPipelineState(pipeline);
3083 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003084 bool rasterization_enabled = VK_FALSE == pipe_state->graphicsPipelineCI.ptr()->pRasterizationState->rasterizerDiscardEnable;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003085 const auto* viewport_state = pipe_state->graphicsPipelineCI.ptr()->pViewportState;
3086 const auto* dynamic_state = pipe_state->graphicsPipelineCI.ptr()->pDynamicState;
locke-lunargd556cc32019-09-17 01:21:23 -06003087 cb_state->status &= ~cb_state->static_status;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003088 cb_state->static_status = MakeStaticStateMask(dynamic_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003089 cb_state->status |= cb_state->static_status;
locke-lunarg4189aa22020-10-21 00:23:48 -06003090 cb_state->dynamic_status = CBSTATUS_ALL_STATE_SET & (~cb_state->static_status);
David Zhao Akeley44139b12021-04-26 16:16:13 -07003091
3092 // Used to calculate CMD_BUFFER_STATE::usedViewportScissorCount upon draw command with this graphics pipeline.
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003093 // If rasterization disabled (no viewport/scissors used), or the actual number of viewports/scissors is dynamic (unknown at
3094 // this time), then these are set to 0 to disable this checking.
David Zhao Akeley44139b12021-04-26 16:16:13 -07003095 auto has_dynamic_viewport_count = cb_state->dynamic_status & CBSTATUS_VIEWPORT_WITH_COUNT_SET;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003096 auto has_dynamic_scissor_count = cb_state->dynamic_status & CBSTATUS_SCISSOR_WITH_COUNT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003097 cb_state->pipelineStaticViewportCount =
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003098 has_dynamic_viewport_count || !rasterization_enabled ? 0 : viewport_state->viewportCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003099 cb_state->pipelineStaticScissorCount =
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003100 has_dynamic_scissor_count || !rasterization_enabled ? 0 : viewport_state->scissorCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003101
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003102 // Trash dynamic viewport/scissor state if pipeline defines static state and enabled rasterization.
David Zhao Akeley44139b12021-04-26 16:16:13 -07003103 // akeley98 NOTE: There's a bit of an ambiguity in the spec, whether binding such a pipeline overwrites
3104 // the entire viewport (scissor) array, or only the subsection defined by the viewport (scissor) count.
3105 // I am taking the latter interpretation based on the implementation details of NVIDIA's Vulkan driver.
David Zhao Akeley44139b12021-04-26 16:16:13 -07003106 if (!has_dynamic_viewport_count) {
3107 cb_state->trashedViewportCount = true;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003108 if (rasterization_enabled && (cb_state->static_status & CBSTATUS_VIEWPORT_SET)) {
David Zhao Akeley44139b12021-04-26 16:16:13 -07003109 cb_state->trashedViewportMask |= (uint32_t(1) << viewport_state->viewportCount) - 1u;
3110 // should become = ~uint32_t(0) if the other interpretation is correct.
3111 }
3112 }
3113 if (!has_dynamic_scissor_count) {
3114 cb_state->trashedScissorCount = true;
David Zhao Akeley5f40eb72021-05-04 21:56:14 -07003115 if (rasterization_enabled && (cb_state->static_status & CBSTATUS_SCISSOR_SET)) {
David Zhao Akeley44139b12021-04-26 16:16:13 -07003116 cb_state->trashedScissorMask |= (uint32_t(1) << viewport_state->scissorCount) - 1u;
3117 // should become = ~uint32_t(0) if the other interpretation is correct.
3118 }
3119 }
locke-lunargd556cc32019-09-17 01:21:23 -06003120 }
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003121 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
3122 cb_state->lastBound[lv_bind_point].pipeline_state = pipe_state;
locke-lunargd556cc32019-09-17 01:21:23 -06003123 SetPipelineState(pipe_state);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003124 if (!disabled[command_buffer_state]) {
3125 cb_state->AddChild(pipe_state);
3126 }
locke-lunargb8be8222020-10-20 00:34:37 -06003127 for (auto &slot : pipe_state->active_slots) {
3128 for (auto &req : slot.second) {
3129 for (auto &sampler : req.second.samplers_used_by_image) {
3130 for (auto &des : sampler) {
3131 des.second = nullptr;
3132 }
3133 }
3134 }
3135 }
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003136 UpdateSamplerDescriptorsUsedByImage(cb_state->lastBound[lv_bind_point]);
locke-lunargd556cc32019-09-17 01:21:23 -06003137}
3138
3139void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3140 uint32_t viewportCount, const VkViewport *pViewports) {
3141 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07003142 uint32_t bits = ((1u << viewportCount) - 1u) << firstViewport;
3143 cb_state->viewportMask |= bits;
3144 cb_state->trashedViewportMask &= ~bits;
locke-lunargd556cc32019-09-17 01:21:23 -06003145 cb_state->status |= CBSTATUS_VIEWPORT_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003146 cb_state->static_status &= ~CBSTATUS_VIEWPORT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07003147
3148 cb_state->dynamicViewports.resize(std::max(size_t(firstViewport + viewportCount), cb_state->dynamicViewports.size()));
3149 for (size_t i = 0; i < viewportCount; ++i) {
3150 cb_state->dynamicViewports[firstViewport + i] = pViewports[i];
3151 }
locke-lunargd556cc32019-09-17 01:21:23 -06003152}
3153
3154void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
3155 uint32_t exclusiveScissorCount,
3156 const VkRect2D *pExclusiveScissors) {
3157 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3158 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
3159 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
3160 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003161 cb_state->static_status &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003162}
3163
3164void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3165 VkImageLayout imageLayout) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003166 if (disabled[command_buffer_state]) return;
3167
locke-lunargd556cc32019-09-17 01:21:23 -06003168 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3169
3170 if (imageView != VK_NULL_HANDLE) {
3171 auto view_state = GetImageViewState(imageView);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003172 cb_state->AddChild(view_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003173 }
3174}
3175
3176void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3177 uint32_t viewportCount,
3178 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3179 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3180 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3181 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3182 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003183 cb_state->static_status &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003184}
3185
3186void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3187 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3188 const VkAllocationCallbacks *pAllocator,
3189 VkAccelerationStructureNV *pAccelerationStructure,
3190 VkResult result) {
3191 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003192 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003193
3194 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003195 auto as_memory_requirements_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003196 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06003197 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure();
locke-lunargd556cc32019-09-17 01:21:23 -06003198 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3199
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003200 auto scratch_memory_req_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003201 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06003202 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure();
locke-lunargd556cc32019-09-17 01:21:23 -06003203 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3204 &as_state->build_scratch_memory_requirements);
3205
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06003206 auto update_memory_req_info = LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
locke-lunargd556cc32019-09-17 01:21:23 -06003207 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06003208 update_memory_req_info.accelerationStructure = as_state->acceleration_structure();
locke-lunargd556cc32019-09-17 01:21:23 -06003209 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3210 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003211 as_state->allocator = pAllocator;
locke-lunargd556cc32019-09-17 01:21:23 -06003212 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3213}
3214
Jeff Bolz95176d02020-04-01 00:36:16 -05003215void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3216 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3217 const VkAllocationCallbacks *pAllocator,
3218 VkAccelerationStructureKHR *pAccelerationStructure,
3219 VkResult result) {
3220 if (VK_SUCCESS != result) return;
sourav parmarcd5fb182020-07-17 12:58:44 -07003221 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE_KHR>(*pAccelerationStructure, pCreateInfo);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003222 as_state->allocator = pAllocator;
sourav parmarcd5fb182020-07-17 12:58:44 -07003223 accelerationStructureMap_khr[*pAccelerationStructure] = std::move(as_state);
Jeff Bolz95176d02020-04-01 00:36:16 -05003224}
3225
sourav parmarcd5fb182020-07-17 12:58:44 -07003226void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructuresKHR(
3227 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
3228 const VkAccelerationStructureBuildRangeInfoKHR *const *ppBuildRangeInfos) {
3229 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3230 if (cb_state == nullptr) {
3231 return;
3232 }
3233 for (uint32_t i = 0; i < infoCount; ++i) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003234 auto *dst_as_state = GetAccelerationStructureStateKHR(pInfos[i].dstAccelerationStructure);
sourav parmarcd5fb182020-07-17 12:58:44 -07003235 if (dst_as_state != nullptr) {
3236 dst_as_state->built = true;
3237 dst_as_state->build_info_khr.initialize(&pInfos[i]);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003238 if (!disabled[command_buffer_state]) {
3239 cb_state->AddChild(dst_as_state);
3240 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003241 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003242 if (!disabled[command_buffer_state]) {
3243 auto *src_as_state = GetAccelerationStructureStateKHR(pInfos[i].srcAccelerationStructure);
3244 if (src_as_state != nullptr) {
3245 cb_state->AddChild(src_as_state);
3246 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003247 }
3248 }
3249 cb_state->hasBuildAccelerationStructureCmd = true;
3250}
3251
3252void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructuresIndirectKHR(
3253 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
3254 const VkDeviceAddress *pIndirectDeviceAddresses, const uint32_t *pIndirectStrides,
3255 const uint32_t *const *ppMaxPrimitiveCounts) {
3256 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3257 if (cb_state == nullptr) {
3258 return;
3259 }
3260 for (uint32_t i = 0; i < infoCount; ++i) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003261 auto *dst_as_state = GetAccelerationStructureStateKHR(pInfos[i].dstAccelerationStructure);
sourav parmarcd5fb182020-07-17 12:58:44 -07003262 if (dst_as_state != nullptr) {
3263 dst_as_state->built = true;
3264 dst_as_state->build_info_khr.initialize(&pInfos[i]);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003265 if (!disabled[command_buffer_state]) {
3266 cb_state->AddChild(dst_as_state);
3267 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003268 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003269 if (!disabled[command_buffer_state]) {
3270 auto *src_as_state = GetAccelerationStructureStateKHR(pInfos[i].srcAccelerationStructure);
3271 if (src_as_state != nullptr) {
3272 cb_state->AddChild(src_as_state);
3273 }
sourav parmarcd5fb182020-07-17 12:58:44 -07003274 }
3275 }
3276 cb_state->hasBuildAccelerationStructureCmd = true;
3277}
locke-lunargd556cc32019-09-17 01:21:23 -06003278void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
Mike Schuchardt2df08912020-12-15 16:28:09 -08003279 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2 *pMemoryRequirements) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003280 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureStateNV(pInfo->accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003281 if (as_state != nullptr) {
3282 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
3283 as_state->memory_requirements = *pMemoryRequirements;
3284 as_state->memory_requirements_checked = true;
3285 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
3286 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
3287 as_state->build_scratch_memory_requirements_checked = true;
3288 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
3289 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
3290 as_state->update_scratch_memory_requirements_checked = true;
3291 }
3292 }
3293}
3294
sourav parmarcd5fb182020-07-17 12:58:44 -07003295void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
3296 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06003297 if (VK_SUCCESS != result) return;
3298 for (uint32_t i = 0; i < bindInfoCount; i++) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003299 const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06003300
sourav parmarcd5fb182020-07-17 12:58:44 -07003301 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureStateNV(info.accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003302 if (as_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06003303 // Track objects tied to memory
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003304 auto mem_state = GetDevMemShared(info.memory);
3305 if (mem_state) {
3306 as_state->SetMemBinding(mem_state, info.memoryOffset);
3307 }
locke-lunargd556cc32019-09-17 01:21:23 -06003308
3309 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05003310 // XXX TODO: Query device address for KHR extension
sourav parmarcd5fb182020-07-17 12:58:44 -07003311 if (enabled[gpu_validation]) {
locke-lunargd556cc32019-09-17 01:21:23 -06003312 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
3313 }
3314 }
3315 }
3316}
3317
3318void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
3319 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
3320 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
3321 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3322 if (cb_state == nullptr) {
3323 return;
3324 }
3325
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003326 auto *dst_as_state = GetAccelerationStructureStateNV(dst);
locke-lunargd556cc32019-09-17 01:21:23 -06003327 if (dst_as_state != nullptr) {
3328 dst_as_state->built = true;
3329 dst_as_state->build_info.initialize(pInfo);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003330 if (!disabled[command_buffer_state]) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003331 cb_state->AddChild(dst_as_state);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003332 }
locke-lunargd556cc32019-09-17 01:21:23 -06003333 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003334 if (!disabled[command_buffer_state]) {
3335 auto *src_as_state = GetAccelerationStructureStateNV(src);
3336 if (src_as_state != nullptr) {
3337 cb_state->AddChild(src_as_state);
3338 }
locke-lunargd556cc32019-09-17 01:21:23 -06003339 }
3340 cb_state->hasBuildAccelerationStructureCmd = true;
3341}
3342
3343void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
3344 VkAccelerationStructureNV dst,
3345 VkAccelerationStructureNV src,
3346 VkCopyAccelerationStructureModeNV mode) {
3347 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3348 if (cb_state) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003349 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureStateNV(src);
3350 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureStateNV(dst);
locke-lunargd556cc32019-09-17 01:21:23 -06003351 if (dst_as_state != nullptr && src_as_state != nullptr) {
3352 dst_as_state->built = true;
3353 dst_as_state->build_info = src_as_state->build_info;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003354 if (!disabled[command_buffer_state]) {
3355 cb_state->AddChild(dst_as_state);
3356 cb_state->AddChild(src_as_state);
3357 }
locke-lunargd556cc32019-09-17 01:21:23 -06003358 }
3359 }
3360}
3361
Jeff Bolz95176d02020-04-01 00:36:16 -05003362void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
3363 VkAccelerationStructureKHR accelerationStructure,
3364 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06003365 if (!accelerationStructure) return;
sourav parmarcd5fb182020-07-17 12:58:44 -07003366 auto *as_state = GetAccelerationStructureStateKHR(accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003367 if (as_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003368 as_state->Destroy();
sourav parmarcd5fb182020-07-17 12:58:44 -07003369 accelerationStructureMap_khr.erase(accelerationStructure);
locke-lunargd556cc32019-09-17 01:21:23 -06003370 }
3371}
3372
Jeff Bolz95176d02020-04-01 00:36:16 -05003373void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
3374 VkAccelerationStructureNV accelerationStructure,
3375 const VkAllocationCallbacks *pAllocator) {
sourav parmarcd5fb182020-07-17 12:58:44 -07003376 if (!accelerationStructure) return;
3377 auto *as_state = GetAccelerationStructureStateNV(accelerationStructure);
3378 if (as_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003379 as_state->Destroy();
sourav parmarcd5fb182020-07-17 12:58:44 -07003380 accelerationStructureMap.erase(accelerationStructure);
3381 }
Jeff Bolz95176d02020-04-01 00:36:16 -05003382}
3383
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003384void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3385 uint32_t viewportCount,
3386 const VkViewportWScalingNV *pViewportWScalings) {
3387 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3388 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003389 cb_state->static_status &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003390}
3391
locke-lunargd556cc32019-09-17 01:21:23 -06003392void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
3393 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3394 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003395 cb_state->static_status &= ~CBSTATUS_LINE_WIDTH_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003396}
3397
3398void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
3399 uint16_t lineStipplePattern) {
3400 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3401 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003402 cb_state->static_status &= ~CBSTATUS_LINE_STIPPLE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003403}
3404
3405void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
3406 float depthBiasClamp, float depthBiasSlopeFactor) {
3407 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3408 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003409 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003410}
3411
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003412void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
3413 const VkRect2D *pScissors) {
3414 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07003415 uint32_t bits = ((1u << scissorCount) - 1u) << firstScissor;
3416 cb_state->scissorMask |= bits;
3417 cb_state->trashedScissorMask &= ~bits;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003418 cb_state->status |= CBSTATUS_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003419 cb_state->static_status &= ~CBSTATUS_SCISSOR_SET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003420}
3421
locke-lunargd556cc32019-09-17 01:21:23 -06003422void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
3423 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3424 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003425 cb_state->static_status &= ~CBSTATUS_BLEND_CONSTANTS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003426}
3427
3428void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
3429 float maxDepthBounds) {
3430 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3431 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003432 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003433}
3434
3435void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3436 uint32_t compareMask) {
3437 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3438 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003439 cb_state->static_status &= ~CBSTATUS_STENCIL_READ_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003440}
3441
3442void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3443 uint32_t writeMask) {
3444 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3445 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003446 cb_state->static_status &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003447}
3448
3449void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3450 uint32_t reference) {
3451 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3452 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003453 cb_state->static_status &= ~CBSTATUS_STENCIL_REFERENCE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003454}
3455
3456// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
3457// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
3458// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
3459void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
3460 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
3461 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
3462 cvdescriptorset::DescriptorSet *push_descriptor_set,
3463 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
3464 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
3465 // Defensive
3466 assert(pipeline_layout);
3467 if (!pipeline_layout) return;
3468
3469 uint32_t required_size = first_set + set_count;
3470 const uint32_t last_binding_index = required_size - 1;
3471 assert(last_binding_index < pipeline_layout->compat_for_set.size());
3472
3473 // Some useful shorthand
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003474 const auto lv_bind_point = ConvertToLvlBindPoint(pipeline_bind_point);
3475 auto &last_bound = cb_state->lastBound[lv_bind_point];
locke-lunargd556cc32019-09-17 01:21:23 -06003476 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
3477 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
3478
3479 // We need this three times in this function, but nowhere else
3480 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
3481 if (ds && ds->IsPushDescriptor()) {
3482 assert(ds == last_bound.push_descriptor_set.get());
3483 last_bound.push_descriptor_set = nullptr;
3484 return true;
3485 }
3486 return false;
3487 };
3488
3489 // Clean up the "disturbed" before and after the range to be set
3490 if (required_size < current_size) {
3491 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
3492 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
3493 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
3494 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
3495 }
3496 } else {
3497 // We're not disturbing past last, so leave the upper binding data alone.
3498 required_size = current_size;
3499 }
3500 }
3501
3502 // We resize if we need more set entries or if those past "last" are disturbed
3503 if (required_size != current_size) {
3504 last_bound.per_set.resize(required_size);
3505 }
3506
3507 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
3508 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
3509 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
3510 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3511 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
3512 last_bound.per_set[set_idx].dynamicOffsets.clear();
3513 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
3514 }
3515 }
3516
3517 // Now update the bound sets with the input sets
3518 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
3519 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
3520 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
3521 cvdescriptorset::DescriptorSet *descriptor_set =
3522 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
3523
3524 // Record binding (or push)
3525 if (descriptor_set != last_bound.push_descriptor_set.get()) {
3526 // Only cleanup the push descriptors if they aren't the currently used set.
3527 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3528 }
3529 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
3530 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
3531
3532 if (descriptor_set) {
3533 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
3534 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
3535 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
3536 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
3537 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
3538 input_dynamic_offsets = end_offset;
3539 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
3540 } else {
3541 last_bound.per_set[set_idx].dynamicOffsets.clear();
3542 }
3543 if (!descriptor_set->IsPushDescriptor()) {
3544 // Can't cache validation of push_descriptors
3545 cb_state->validated_descriptor_sets.insert(descriptor_set);
3546 }
3547 }
3548 }
3549}
3550
3551// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
3552void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
3553 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3554 uint32_t firstSet, uint32_t setCount,
3555 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
3556 const uint32_t *pDynamicOffsets) {
3557 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3558 auto pipeline_layout = GetPipelineLayout(layout);
3559
3560 // Resize binding arrays
3561 uint32_t last_set_index = firstSet + setCount - 1;
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003562 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
3563 if (last_set_index >= cb_state->lastBound[lv_bind_point].per_set.size()) {
3564 cb_state->lastBound[lv_bind_point].per_set.resize(last_set_index + 1);
locke-lunargd556cc32019-09-17 01:21:23 -06003565 }
3566
3567 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
3568 dynamicOffsetCount, pDynamicOffsets);
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003569 cb_state->lastBound[lv_bind_point].pipeline_layout = layout;
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003570 UpdateSamplerDescriptorsUsedByImage(cb_state->lastBound[lv_bind_point]);
locke-lunargd556cc32019-09-17 01:21:23 -06003571}
3572
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003573void LAST_BOUND_STATE::UnbindAndResetPushDescriptorSet(CMD_BUFFER_STATE *cb_state, cvdescriptorset::DescriptorSet *ds) {
3574 if (push_descriptor_set) {
3575 for (auto &ps: per_set) {
3576 if (ps.bound_descriptor_set == push_descriptor_set.get()) {
3577 cb_state->RemoveChild(ps.bound_descriptor_set);
3578 ps.bound_descriptor_set = nullptr;
3579 }
3580 }
3581 }
3582 cb_state->AddChild(ds);
3583 push_descriptor_set.reset(ds);
3584}
3585void LAST_BOUND_STATE::Reset() {
3586 pipeline_state = nullptr;
3587 pipeline_layout = VK_NULL_HANDLE;
3588 if (push_descriptor_set) {
3589 push_descriptor_set->Reset();
3590 }
3591 push_descriptor_set = nullptr;
3592 per_set.clear();
3593}
3594
locke-lunargd556cc32019-09-17 01:21:23 -06003595void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
3596 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
3597 const VkWriteDescriptorSet *pDescriptorWrites) {
3598 const auto &pipeline_layout = GetPipelineLayout(layout);
3599 // Short circuit invalid updates
3600 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003601 !pipeline_layout->set_layouts[set]->IsPushDescriptor()) {
locke-lunargd556cc32019-09-17 01:21:23 -06003602 return;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07003603 }
locke-lunargd556cc32019-09-17 01:21:23 -06003604
3605 // We need a descriptor set to update the bindings with, compatible with the passed layout
Jeremy Gebben50fb1832021-03-19 09:10:13 -06003606 const auto& dsl = pipeline_layout->set_layouts[set];
locke-lunargb8d7a7a2020-10-25 16:01:52 -06003607 const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
3608 auto &last_bound = cb_state->lastBound[lv_bind_point];
locke-lunargd556cc32019-09-17 01:21:23 -06003609 auto &push_descriptor_set = last_bound.push_descriptor_set;
3610 // If we are disturbing the current push_desriptor_set clear it
3611 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003612 last_bound.UnbindAndResetPushDescriptorSet(cb_state, new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003613 }
3614
3615 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3616 nullptr);
3617 last_bound.pipeline_layout = layout;
3618
3619 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003620 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003621}
3622
3623void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3624 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3625 uint32_t set, uint32_t descriptorWriteCount,
3626 const VkWriteDescriptorSet *pDescriptorWrites) {
3627 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3628 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3629}
3630
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003631void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3632 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3633 const void *pValues) {
3634 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3635 if (cb_state != nullptr) {
3636 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3637
3638 auto &push_constant_data = cb_state->push_constant_data;
3639 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3640 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
locke-lunargde3f0fa2020-09-10 11:55:31 -06003641 cb_state->push_constant_pipeline_layout_set = layout;
3642
3643 auto flags = stageFlags;
3644 uint32_t bit_shift = 0;
3645 while (flags) {
3646 if (flags & 1) {
3647 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
3648 const auto it = cb_state->push_constant_data_update.find(flag);
3649
3650 if (it != cb_state->push_constant_data_update.end()) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06003651 std::memset(it->second.data() + offset, PC_Byte_Updated, static_cast<std::size_t>(size));
locke-lunargde3f0fa2020-09-10 11:55:31 -06003652 }
3653 }
3654 flags = flags >> 1;
3655 ++bit_shift;
3656 }
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003657 }
3658}
3659
locke-lunargd556cc32019-09-17 01:21:23 -06003660void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3661 VkIndexType indexType) {
locke-lunargd556cc32019-09-17 01:21:23 -06003662 auto cb_state = GetCBState(commandBuffer);
3663
3664 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
Piers Daniell39842ee2020-07-10 16:42:33 -06003665 cb_state->static_status &= ~CBSTATUS_INDEX_BUFFER_BOUND;
locke-lunarg1ae57d62020-11-18 10:49:19 -07003666 cb_state->index_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(buffer);
3667 cb_state->index_buffer_binding.size = cb_state->index_buffer_binding.buffer_state->createInfo.size;
locke-lunargd556cc32019-09-17 01:21:23 -06003668 cb_state->index_buffer_binding.offset = offset;
3669 cb_state->index_buffer_binding.index_type = indexType;
3670 // Add binding for this index buffer to this commandbuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003671 if (!disabled[command_buffer_state]) {
3672 cb_state->AddChild(cb_state->index_buffer_binding.buffer_state.get());
3673 }
locke-lunargd556cc32019-09-17 01:21:23 -06003674}
3675
3676void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3677 uint32_t bindingCount, const VkBuffer *pBuffers,
3678 const VkDeviceSize *pOffsets) {
3679 auto cb_state = GetCBState(commandBuffer);
3680
3681 uint32_t end = firstBinding + bindingCount;
3682 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3683 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3684 }
3685
3686 for (uint32_t i = 0; i < bindingCount; ++i) {
3687 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07003688 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003689 vertex_buffer_binding.offset = pOffsets[i];
Piers Daniell39842ee2020-07-10 16:42:33 -06003690 vertex_buffer_binding.size = VK_WHOLE_SIZE;
3691 vertex_buffer_binding.stride = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06003692 // Add binding for this vertex buffer to this commandbuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003693 if (pBuffers[i] && !disabled[command_buffer_state]) {
3694 cb_state->AddChild(vertex_buffer_binding.buffer_state.get());
Jeff Bolz165818a2020-05-08 11:19:03 -05003695 }
locke-lunargd556cc32019-09-17 01:21:23 -06003696 }
3697}
3698
3699void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3700 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003701 if (disabled[command_buffer_state]) return;
3702
locke-lunargd556cc32019-09-17 01:21:23 -06003703 auto cb_state = GetCBState(commandBuffer);
3704 auto dst_buffer_state = GetBufferState(dstBuffer);
3705
3706 // Update bindings between buffer and cmd buffer
Jeremy Gebben5570abe2021-05-16 18:35:13 -06003707 if (cb_state && dst_buffer_state) {
3708 cb_state->AddChild(dst_buffer_state);
3709 }
locke-lunargd556cc32019-09-17 01:21:23 -06003710}
3711
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06003712static bool SetEventStageMask(VkEvent event, VkPipelineStageFlags2KHR stageMask,
Jeff Bolz310775c2019-10-09 00:46:33 -05003713 EventToStageMap *localEventToStageMap) {
3714 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003715 return false;
3716}
3717
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003718void ValidationStateTracker::RecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask) {
locke-lunargd556cc32019-09-17 01:21:23 -06003719 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003720 if (!disabled[command_buffer_state]) {
3721 auto event_state = GetEventState(event);
3722 if (event_state) {
3723 cb_state->AddChild(event_state);
3724 }
locke-lunargd556cc32019-09-17 01:21:23 -06003725 }
3726 cb_state->events.push_back(event);
3727 if (!cb_state->waitedEvents.count(event)) {
3728 cb_state->writeEventsBeforeWait.push_back(event);
3729 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003730 cb_state->eventUpdates.emplace_back(
3731 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3732 return SetEventStageMask(event, stageMask, localEventToStageMap);
3733 });
locke-lunargd556cc32019-09-17 01:21:23 -06003734}
3735
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003736void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3737 VkPipelineStageFlags stageMask) {
3738 RecordCmdSetEvent(commandBuffer, event, stageMask);
3739}
3740
3741void ValidationStateTracker::PreCallRecordCmdSetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event,
3742 const VkDependencyInfoKHR *pDependencyInfo) {
3743 auto stage_masks = sync_utils::GetGlobalStageMasks(*pDependencyInfo);
3744
3745 RecordCmdSetEvent(commandBuffer, event, stage_masks.src);
3746}
3747
3748void ValidationStateTracker::RecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3749 VkPipelineStageFlags2KHR stageMask) {
locke-lunargd556cc32019-09-17 01:21:23 -06003750 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003751 if (!disabled[command_buffer_state]) {
3752 auto event_state = GetEventState(event);
3753 if (event_state) {
3754 cb_state->AddChild(event_state);
3755 }
locke-lunargd556cc32019-09-17 01:21:23 -06003756 }
3757 cb_state->events.push_back(event);
3758 if (!cb_state->waitedEvents.count(event)) {
3759 cb_state->writeEventsBeforeWait.push_back(event);
3760 }
3761
3762 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003763 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003764 return SetEventStageMask(event, VkPipelineStageFlags2KHR(0), localEventToStageMap);
Jeff Bolz310775c2019-10-09 00:46:33 -05003765 });
locke-lunargd556cc32019-09-17 01:21:23 -06003766}
3767
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003768void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3769 VkPipelineStageFlags stageMask) {
3770 RecordCmdResetEvent(commandBuffer, event, stageMask);
3771}
3772
3773void ValidationStateTracker::PreCallRecordCmdResetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event,
3774 VkPipelineStageFlags2KHR stageMask) {
3775 RecordCmdResetEvent(commandBuffer, event, stageMask);
3776}
3777
3778void ValidationStateTracker::RecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents) {
locke-lunargd556cc32019-09-17 01:21:23 -06003779 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3780 for (uint32_t i = 0; i < eventCount; ++i) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003781 if (!disabled[command_buffer_state]) {
3782 auto event_state = GetEventState(pEvents[i]);
3783 if (event_state) {
3784 cb_state->AddChild(event_state);
3785 }
locke-lunargd556cc32019-09-17 01:21:23 -06003786 }
3787 cb_state->waitedEvents.insert(pEvents[i]);
3788 cb_state->events.push_back(pEvents[i]);
3789 }
3790}
3791
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003792void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3793 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3794 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3795 uint32_t bufferMemoryBarrierCount,
3796 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3797 uint32_t imageMemoryBarrierCount,
3798 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3799 RecordCmdWaitEvents(commandBuffer, eventCount, pEvents);
3800}
3801
3802void ValidationStateTracker::PreCallRecordCmdWaitEvents2KHR(VkCommandBuffer commandBuffer, uint32_t eventCount,
3803 const VkEvent *pEvents, const VkDependencyInfoKHR *pDependencyInfos) {
3804 RecordCmdWaitEvents(commandBuffer, eventCount, pEvents);
3805}
3806
Jeff Bolz310775c2019-10-09 00:46:33 -05003807bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
3808 (*localQueryToStateMap)[object] = value;
3809 return false;
3810}
3811
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003812bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
3813 QueryState value, QueryMap *localQueryToStateMap) {
Jeff Bolz310775c2019-10-09 00:46:33 -05003814 for (uint32_t i = 0; i < queryCount; i++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003815 QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
Jeff Bolz310775c2019-10-09 00:46:33 -05003816 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06003817 }
3818 return false;
3819}
3820
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003821QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex,
3822 uint32_t perfPass) const {
3823 QueryObject query = QueryObject(QueryObject(queryPool, queryIndex), perfPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003824
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003825 auto iter = localQueryToStateMap->find(query);
3826 if (iter != localQueryToStateMap->end()) return iter->second;
Jeff Bolz310775c2019-10-09 00:46:33 -05003827
Jeff Bolz310775c2019-10-09 00:46:33 -05003828 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06003829}
3830
3831void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003832 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003833 cb_state->activeQueries.insert(query_obj);
3834 cb_state->startedQueries.insert(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003835 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3836 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3837 QueryMap *localQueryToStateMap) {
3838 SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
3839 return false;
3840 });
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003841 if (!disabled[command_buffer_state]) {
3842 auto pool_state = GetQueryPoolState(query_obj.pool);
3843 cb_state->AddChild(pool_state);
3844 }
locke-lunargd556cc32019-09-17 01:21:23 -06003845}
3846
3847void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3848 VkFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003849 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003850 QueryObject query = {queryPool, slot};
3851 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3852 RecordCmdBeginQuery(cb_state, query);
3853}
3854
3855void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003856 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003857 cb_state->activeQueries.erase(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003858 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3859 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3860 QueryMap *localQueryToStateMap) {
3861 return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3862 });
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003863 if (!disabled[command_buffer_state]) {
3864 auto pool_state = GetQueryPoolState(query_obj.pool);
3865 cb_state->AddChild(pool_state);
3866 }
locke-lunargd556cc32019-09-17 01:21:23 -06003867}
3868
3869void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003870 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003871 QueryObject query_obj = {queryPool, slot};
3872 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3873 RecordCmdEndQuery(cb_state, query_obj);
3874}
3875
3876void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3877 uint32_t firstQuery, uint32_t queryCount) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003878 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003879 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3880
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02003881 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
3882 QueryObject query = {queryPool, slot};
3883 cb_state->resetQueries.insert(query);
3884 }
3885
Jeff Bolz310775c2019-10-09 00:46:33 -05003886 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003887 bool do_validate, VkQueryPool &firstPerfQueryPool,
3888 uint32_t perfQueryPass,
3889 QueryMap *localQueryToStateMap) {
3890 return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06003891 });
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003892 if (!disabled[command_buffer_state]) {
3893 auto pool_state = GetQueryPoolState(queryPool);
3894 cb_state->AddChild(pool_state);
3895 }
locke-lunargd556cc32019-09-17 01:21:23 -06003896}
3897
3898void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3899 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3900 VkDeviceSize dstOffset, VkDeviceSize stride,
3901 VkQueryResultFlags flags) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003902 if (disabled[query_validation] || disabled[command_buffer_state]) return;
3903
locke-lunargd556cc32019-09-17 01:21:23 -06003904 auto cb_state = GetCBState(commandBuffer);
3905 auto dst_buff_state = GetBufferState(dstBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003906 cb_state->AddChild(dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003907 auto pool_state = GetQueryPoolState(queryPool);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003908 cb_state->AddChild(pool_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003909}
3910
3911void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3912 VkQueryPool queryPool, uint32_t slot) {
Jeremy Gebben74aa7622020-12-15 11:18:00 -07003913 PostCallRecordCmdWriteTimestamp2KHR(commandBuffer, pipelineStage, queryPool, slot);
3914}
3915
3916void ValidationStateTracker::PostCallRecordCmdWriteTimestamp2KHR(VkCommandBuffer commandBuffer,
3917 VkPipelineStageFlags2KHR pipelineStage, VkQueryPool queryPool,
3918 uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003919 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003920 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003921 if (!disabled[command_buffer_state]) {
3922 auto pool_state = GetQueryPoolState(queryPool);
3923 cb_state->AddChild(pool_state);
3924 }
locke-lunargd556cc32019-09-17 01:21:23 -06003925 QueryObject query = {queryPool, slot};
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003926 cb_state->queryUpdates.emplace_back([query](const ValidationStateTracker *device_data, bool do_validate,
3927 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3928 QueryMap *localQueryToStateMap) {
3929 return SetQueryState(QueryObject(query, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3930 });
locke-lunargd556cc32019-09-17 01:21:23 -06003931}
3932
Marijn Suijten6750fdc2020-12-30 22:06:42 +01003933void ValidationStateTracker::PostCallRecordCmdWriteAccelerationStructuresPropertiesKHR(
3934 VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR *pAccelerationStructures,
3935 VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) {
3936 if (disabled[query_validation]) return;
3937 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06003938 if (!disabled[command_buffer_state]) {
3939 auto pool_state = GetQueryPoolState(queryPool);
3940 cb_state->AddChild(pool_state);
3941 }
Marijn Suijten6750fdc2020-12-30 22:06:42 +01003942 cb_state->queryUpdates.emplace_back(
3943 [queryPool, firstQuery, accelerationStructureCount](const ValidationStateTracker *device_data, bool do_validate,
3944 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3945 QueryMap *localQueryToStateMap) {
3946 return SetQueryStateMulti(queryPool, firstQuery, accelerationStructureCount, perfQueryPass, QUERYSTATE_ENDED,
3947 localQueryToStateMap);
3948 });
3949}
3950
locke-lunargd556cc32019-09-17 01:21:23 -06003951void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3952 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3953 VkResult result) {
3954 if (VK_SUCCESS != result) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003955
Jeremy Gebben88f58142021-06-01 10:07:52 -06003956 std::vector<std::shared_ptr<IMAGE_VIEW_STATE>> views;
Mike Schuchardt2df08912020-12-15 16:28:09 -08003957 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) == 0) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003958 views.resize(pCreateInfo->attachmentCount);
locke-lunarg1ae57d62020-11-18 10:49:19 -07003959
locke-lunargd556cc32019-09-17 01:21:23 -06003960 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003961 views[i] = GetShared<IMAGE_VIEW_STATE>(pCreateInfo->pAttachments[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003962 }
3963 }
Jeremy Gebben88f58142021-06-01 10:07:52 -06003964
3965 frameBufferMap[*pFramebuffer] = std::make_shared<FRAMEBUFFER_STATE>(
3966 *pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass), std::move(views));
locke-lunargd556cc32019-09-17 01:21:23 -06003967}
3968
locke-lunargd556cc32019-09-17 01:21:23 -06003969void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
3970 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3971 VkResult result) {
3972 if (VK_SUCCESS != result) return;
Jeremy Gebben88f58142021-06-01 10:07:52 -06003973 renderPassMap[*pRenderPass] = std::make_shared<RENDER_PASS_STATE>(*pRenderPass, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003974}
3975
Mike Schuchardt2df08912020-12-15 16:28:09 -08003976void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07003977 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3978 VkResult result) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003979 if (VK_SUCCESS != result) return;
3980
3981 renderPassMap[*pRenderPass] = std::make_shared<RENDER_PASS_STATE>(*pRenderPass, pCreateInfo);
Tony-LunarG977448c2019-12-02 14:52:02 -07003982}
3983
Mike Schuchardt2df08912020-12-15 16:28:09 -08003984void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
Tony-LunarG977448c2019-12-02 14:52:02 -07003985 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3986 VkResult result) {
Jeremy Gebben88f58142021-06-01 10:07:52 -06003987 if (VK_SUCCESS != result) return;
3988
3989 renderPassMap[*pRenderPass] = std::make_shared<RENDER_PASS_STATE>(*pRenderPass, pCreateInfo);
Tony-LunarG977448c2019-12-02 14:52:02 -07003990}
3991
locke-lunargd556cc32019-09-17 01:21:23 -06003992void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
3993 const VkRenderPassBeginInfo *pRenderPassBegin,
3994 const VkSubpassContents contents) {
3995 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunargaecf2152020-05-12 17:15:41 -06003996 auto render_pass_state = pRenderPassBegin ? GetShared<RENDER_PASS_STATE>(pRenderPassBegin->renderPass) : nullptr;
3997 auto framebuffer = pRenderPassBegin ? GetShared<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer) : nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06003998
3999 if (render_pass_state) {
locke-lunargaecf2152020-05-12 17:15:41 -06004000 cb_state->activeFramebuffer = framebuffer;
locke-lunargd556cc32019-09-17 01:21:23 -06004001 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07004002 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06004003 cb_state->activeSubpass = 0;
4004 cb_state->activeSubpassContents = contents;
locke-lunargfc78e932020-11-19 17:06:24 -07004005
locke-lunargd556cc32019-09-17 01:21:23 -06004006 // Connect this RP to cmdBuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004007 if (!disabled[command_buffer_state]) {
4008 cb_state->AddChild(render_pass_state.get());
4009 }
locke-lunargd556cc32019-09-17 01:21:23 -06004010
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07004011 auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06004012 if (chained_device_group_struct) {
4013 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
4014 } else {
4015 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
4016 }
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004017
locke-lunargfc78e932020-11-19 17:06:24 -07004018 cb_state->active_subpasses = nullptr;
4019 cb_state->active_attachments = nullptr;
4020
4021 if (framebuffer) {
4022 cb_state->framebuffers.insert(framebuffer);
4023
4024 // Set cb_state->active_subpasses
4025 cb_state->active_subpasses =
4026 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
4027 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
4028 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
4029
4030 // Set cb_state->active_attachments & cb_state->attachments_view_states
4031 cb_state->active_attachments =
4032 std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(framebuffer->createInfo.attachmentCount);
4033 UpdateAttachmentsView(*this, *cb_state, *cb_state->activeFramebuffer, pRenderPassBegin);
4034
4035 // Connect this framebuffer and its children to this cmdBuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004036 cb_state->AddChild(framebuffer.get());
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004037 }
locke-lunargd556cc32019-09-17 01:21:23 -06004038 }
4039}
4040
4041void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
4042 const VkRenderPassBeginInfo *pRenderPassBegin,
4043 VkSubpassContents contents) {
4044 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
4045}
4046
4047void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
4048 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004049 const VkSubpassBeginInfo *pSubpassBeginInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06004050 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4051}
4052
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06004053void ValidationStateTracker::PostCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4054 uint32_t counterBufferCount,
4055 const VkBuffer *pCounterBuffers,
4056 const VkDeviceSize *pCounterBufferOffsets) {
4057 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4058
4059 cb_state->transform_feedback_active = true;
4060}
4061
4062void ValidationStateTracker::PostCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4063 uint32_t counterBufferCount, const VkBuffer *pCounterBuffers,
4064 const VkDeviceSize *pCounterBufferOffsets) {
4065 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4066
4067 cb_state->transform_feedback_active = false;
4068}
4069
Tony-LunarG977448c2019-12-02 14:52:02 -07004070void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
4071 const VkRenderPassBeginInfo *pRenderPassBegin,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004072 const VkSubpassBeginInfo *pSubpassBeginInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004073 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4074}
4075
locke-lunargd556cc32019-09-17 01:21:23 -06004076void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4077 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4078 cb_state->activeSubpass++;
4079 cb_state->activeSubpassContents = contents;
locke-lunargfc78e932020-11-19 17:06:24 -07004080
4081 // Update cb_state->active_subpasses
4082 if (cb_state->activeRenderPass && cb_state->activeFramebuffer) {
4083 cb_state->active_subpasses = nullptr;
4084 cb_state->active_subpasses =
4085 std::make_shared<std::vector<SUBPASS_INFO>>(cb_state->activeFramebuffer->createInfo.attachmentCount);
4086
4087 const auto &subpass = cb_state->activeRenderPass->createInfo.pSubpasses[cb_state->activeSubpass];
4088 UpdateSubpassAttachments(subpass, *cb_state->active_subpasses);
4089 }
locke-lunargd556cc32019-09-17 01:21:23 -06004090}
4091
4092void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4093 RecordCmdNextSubpass(commandBuffer, contents);
4094}
4095
4096void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004097 const VkSubpassBeginInfo *pSubpassBeginInfo,
4098 const VkSubpassEndInfo *pSubpassEndInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06004099 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4100}
4101
Tony-LunarG977448c2019-12-02 14:52:02 -07004102void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004103 const VkSubpassBeginInfo *pSubpassBeginInfo,
4104 const VkSubpassEndInfo *pSubpassEndInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004105 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4106}
4107
locke-lunargd556cc32019-09-17 01:21:23 -06004108void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
4109 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4110 cb_state->activeRenderPass = nullptr;
locke-lunargfc78e932020-11-19 17:06:24 -07004111 cb_state->active_attachments = nullptr;
4112 cb_state->active_subpasses = nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004113 cb_state->activeSubpass = 0;
4114 cb_state->activeFramebuffer = VK_NULL_HANDLE;
4115}
4116
4117void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
4118 RecordCmdEndRenderPassState(commandBuffer);
4119}
4120
4121void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004122 const VkSubpassEndInfo *pSubpassEndInfo) {
locke-lunargd556cc32019-09-17 01:21:23 -06004123 RecordCmdEndRenderPassState(commandBuffer);
4124}
4125
Tony-LunarG977448c2019-12-02 14:52:02 -07004126void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004127 const VkSubpassEndInfo *pSubpassEndInfo) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004128 RecordCmdEndRenderPassState(commandBuffer);
4129}
locke-lunargd556cc32019-09-17 01:21:23 -06004130void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
4131 const VkCommandBuffer *pCommandBuffers) {
4132 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4133
4134 CMD_BUFFER_STATE *sub_cb_state = NULL;
4135 for (uint32_t i = 0; i < commandBuffersCount; i++) {
4136 sub_cb_state = GetCBState(pCommandBuffers[i]);
4137 assert(sub_cb_state);
4138 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
4139 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
4140 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
4141 // from the validation step to the recording step
4142 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
4143 }
4144 }
4145
4146 // Propagate inital layout and current layout state to the primary cmd buffer
4147 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
4148 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
4149 // for those other classes.
4150 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
4151 const auto image = sub_layout_map_entry.first;
4152 const auto *image_state = GetImageState(image);
4153 if (!image_state) continue; // Can't set layouts of a dead image
4154
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06004155 auto *cb_subres_map = cb_state->GetImageSubresourceLayoutMap(*image_state);
John Zulauf17708d02021-02-22 11:20:58 -07004156 const auto *sub_cb_subres_map = &sub_layout_map_entry.second;
locke-lunargd556cc32019-09-17 01:21:23 -06004157 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
4158 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
4159 }
4160
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06004161 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer();
locke-lunargd556cc32019-09-17 01:21:23 -06004162 cb_state->linkedCommandBuffers.insert(sub_cb_state);
Jeremy Gebben5570abe2021-05-16 18:35:13 -06004163 cb_state->AddChild(sub_cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004164 for (auto &function : sub_cb_state->queryUpdates) {
4165 cb_state->queryUpdates.push_back(function);
4166 }
4167 for (auto &function : sub_cb_state->queue_submit_functions) {
4168 cb_state->queue_submit_functions.push_back(function);
4169 }
David Zhao Akeley44139b12021-04-26 16:16:13 -07004170
4171 // State is trashed after executing secondary command buffers.
4172 // Importantly, this function runs after CoreChecks::PreCallValidateCmdExecuteCommands.
4173 cb_state->trashedViewportMask = ~uint32_t(0);
4174 cb_state->trashedScissorMask = ~uint32_t(0);
4175 cb_state->trashedViewportCount = true;
4176 cb_state->trashedScissorCount = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004177 }
4178}
4179
4180void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
4181 VkFlags flags, void **ppData, VkResult result) {
4182 if (VK_SUCCESS != result) return;
4183 RecordMappedMemory(mem, offset, size, ppData);
4184}
4185
4186void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
4187 auto mem_info = GetDevMemState(mem);
4188 if (mem_info) {
4189 mem_info->mapped_range = MemRange();
4190 mem_info->p_driver_data = nullptr;
4191 }
4192}
4193
4194void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
4195 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
4196 if (image_state) {
locke-lunargae26eac2020-04-16 15:29:05 -06004197 // An Android sepcial image cannot get VkSubresourceLayout until the image binds a memory.
4198 // See: VUID-vkGetImageSubresourceLayout-image-01895
4199 image_state->fragment_encoder =
4200 std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(new subresource_adapter::ImageRangeEncoder(*image_state));
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07004201 const auto swapchain_info = LvlFindInChain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06004202 if (swapchain_info) {
John Zulauf29d00532021-03-04 13:28:54 -07004203 auto *swapchain = GetSwapchainState(swapchain_info->swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004204 if (swapchain) {
John Zulaufd13b38e2021-03-05 08:17:38 -07004205 SWAPCHAIN_IMAGE &swap_image = swapchain->images[swapchain_info->imageIndex];
John Zulauf29d00532021-03-04 13:28:54 -07004206 if (swap_image.bound_images.empty()) {
4207 // If this is the first "binding" of an image to this swapchain index, get a fake allocation
4208 image_state->swapchain_fake_address = fake_memory.Alloc(image_state->fragment_encoder->TotalSize());
4209 } else {
4210 image_state->swapchain_fake_address = (*swap_image.bound_images.cbegin())->swapchain_fake_address;
4211 }
John Zulaufd13b38e2021-03-05 08:17:38 -07004212 swap_image.bound_images.emplace(image_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004213 image_state->bind_swapchain = swapchain_info->swapchain;
4214 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
John Zulaufd13b38e2021-03-05 08:17:38 -07004215
John Zulauf29d00532021-03-04 13:28:54 -07004216 // All images bound to this swapchain and index are aliases
Jeremy Gebben5570abe2021-05-16 18:35:13 -06004217 image_state->AddAliasingImage(swap_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06004218 }
4219 } else {
4220 // Track bound memory range information
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004221 auto mem_info = GetDevMemShared(bindInfo.memory);
locke-lunargd556cc32019-09-17 01:21:23 -06004222 if (mem_info) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -06004223 mem_info->bound_images.insert(image_state);
John Zulaufd13b38e2021-03-05 08:17:38 -07004224 if (image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) {
Jeremy Gebben5570abe2021-05-16 18:35:13 -06004225 image_state->AddAliasingImage(mem_info->bound_images);
John Zulaufd13b38e2021-03-05 08:17:38 -07004226 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004227 // Track objects tied to memory
4228 image_state->SetMemBinding(mem_info, bindInfo.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06004229 }
locke-lunargd556cc32019-09-17 01:21:23 -06004230 }
locke-lunargd556cc32019-09-17 01:21:23 -06004231 }
4232}
4233
4234void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
4235 VkDeviceSize memoryOffset, VkResult result) {
4236 if (VK_SUCCESS != result) return;
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06004237 auto bind_info = LvlInitStruct<VkBindImageMemoryInfo>();
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004238 bind_info.image = image;
4239 bind_info.memory = mem;
4240 bind_info.memoryOffset = memoryOffset;
4241 UpdateBindImageMemoryState(bind_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004242}
4243
4244void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004245 const VkBindImageMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004246 if (VK_SUCCESS != result) return;
4247 for (uint32_t i = 0; i < bindInfoCount; i++) {
4248 UpdateBindImageMemoryState(pBindInfos[i]);
4249 }
4250}
4251
4252void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004253 const VkBindImageMemoryInfo *pBindInfos, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004254 if (VK_SUCCESS != result) return;
4255 for (uint32_t i = 0; i < bindInfoCount; i++) {
4256 UpdateBindImageMemoryState(pBindInfos[i]);
4257 }
4258}
4259
4260void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
4261 auto event_state = GetEventState(event);
4262 if (event_state) {
4263 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
4264 }
locke-lunargd556cc32019-09-17 01:21:23 -06004265}
4266
4267void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
4268 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
4269 VkResult result) {
4270 if (VK_SUCCESS != result) return;
4271 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
4272 pImportSemaphoreFdInfo->flags);
4273}
4274
4275void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004276 VkExternalSemaphoreHandleTypeFlagBits handle_type) {
locke-lunargd556cc32019-09-17 01:21:23 -06004277 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
Mike Schuchardt2df08912020-12-15 16:28:09 -08004278 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT) {
locke-lunargd556cc32019-09-17 01:21:23 -06004279 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
4280 semaphore_state->scope = kSyncScopeExternalPermanent;
4281 }
4282}
4283
4284#ifdef VK_USE_PLATFORM_WIN32_KHR
4285void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
4286 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
4287 if (VK_SUCCESS != result) return;
4288 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
4289 pImportSemaphoreWin32HandleInfo->flags);
4290}
4291
4292void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
4293 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4294 HANDLE *pHandle, VkResult result) {
4295 if (VK_SUCCESS != result) return;
4296 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
4297}
4298
4299void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
4300 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
4301 if (VK_SUCCESS != result) return;
4302 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
4303 pImportFenceWin32HandleInfo->flags);
4304}
4305
4306void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
4307 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4308 HANDLE *pHandle, VkResult result) {
4309 if (VK_SUCCESS != result) return;
4310 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
4311}
4312#endif
4313
4314void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
4315 VkResult result) {
4316 if (VK_SUCCESS != result) return;
4317 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
4318}
4319
Mike Schuchardt2df08912020-12-15 16:28:09 -08004320void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBits handle_type,
4321 VkFenceImportFlags flags) {
locke-lunargd556cc32019-09-17 01:21:23 -06004322 FENCE_STATE *fence_node = GetFenceState(fence);
4323 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08004324 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT || flags & VK_FENCE_IMPORT_TEMPORARY_BIT) &&
locke-lunargd556cc32019-09-17 01:21:23 -06004325 fence_node->scope == kSyncScopeInternal) {
4326 fence_node->scope = kSyncScopeExternalTemporary;
4327 } else {
4328 fence_node->scope = kSyncScopeExternalPermanent;
4329 }
4330 }
4331}
4332
4333void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
4334 VkResult result) {
4335 if (VK_SUCCESS != result) return;
4336 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
4337}
4338
Mike Schuchardt2df08912020-12-15 16:28:09 -08004339void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBits handle_type) {
locke-lunargd556cc32019-09-17 01:21:23 -06004340 FENCE_STATE *fence_state = GetFenceState(fence);
4341 if (fence_state) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08004342 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT) {
locke-lunargd556cc32019-09-17 01:21:23 -06004343 // Export with reference transference becomes external
4344 fence_state->scope = kSyncScopeExternalPermanent;
4345 } else if (fence_state->scope == kSyncScopeInternal) {
4346 // Export with copy transference has a side effect of resetting the fence
4347 fence_state->state = FENCE_UNSIGNALED;
4348 }
4349 }
4350}
4351
4352void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
4353 VkResult result) {
4354 if (VK_SUCCESS != result) return;
4355 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
4356}
4357
4358void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
4359 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
4360 if (VK_SUCCESS != result) return;
John Zulaufd5115702021-01-18 12:34:33 -07004361 const auto event = *pEvent;
Jeremy Gebbencbf22862021-03-03 12:01:22 -07004362 eventMap.emplace(event, std::make_shared<EVENT_STATE>(event, pCreateInfo->flags));
locke-lunargd556cc32019-09-17 01:21:23 -06004363}
4364
4365void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
4366 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
4367 SWAPCHAIN_NODE *old_swapchain_state) {
4368 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004369 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004370 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
4371 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
4372 swapchain_state->shared_presentable = true;
4373 }
4374 surface_state->swapchain = swapchain_state.get();
4375 swapchainMap[*pSwapchain] = std::move(swapchain_state);
4376 } else {
4377 surface_state->swapchain = nullptr;
4378 }
4379 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
4380 if (old_swapchain_state) {
4381 old_swapchain_state->retired = true;
4382 }
4383 return;
4384}
4385
4386void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
4387 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
4388 VkResult result) {
4389 auto surface_state = GetSurfaceState(pCreateInfo->surface);
4390 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
4391 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
4392}
4393
4394void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
4395 const VkAllocationCallbacks *pAllocator) {
4396 if (!swapchain) return;
4397 auto swapchain_data = GetSwapchainState(swapchain);
4398 if (swapchain_data) {
John Zulauffaa7a522021-03-05 12:22:45 -07004399 for (auto &swapchain_image : swapchain_data->images) {
4400 // TODO: missing validation that the bound images are empty (except for image_state above)
4401 // Clean up the aliases and the bound_images *before* erasing the image_state.
Jeremy Gebben5570abe2021-05-16 18:35:13 -06004402 RemoveAliasingImages(swapchain_image.bound_images);
John Zulauffaa7a522021-03-05 12:22:45 -07004403 swapchain_image.bound_images.clear();
4404
4405 if (swapchain_image.image_state) {
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004406 swapchain_image.image_state->Destroy();
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -06004407 imageMap.erase(swapchain_image.image_state->image());
John Zulauffaa7a522021-03-05 12:22:45 -07004408 swapchain_image.image_state = nullptr;
John Zulauf2d60a452021-03-04 15:12:03 -07004409 }
locke-lunargd556cc32019-09-17 01:21:23 -06004410 }
4411
4412 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
4413 if (surface_state) {
4414 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
4415 }
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004416 swapchain_data->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06004417 swapchainMap.erase(swapchain);
4418 }
4419}
4420
sfricke-samsung5c1b7392020-12-13 22:17:15 -08004421void ValidationStateTracker::PostCallRecordCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
4422 const VkDisplayModeCreateInfoKHR *pCreateInfo,
4423 const VkAllocationCallbacks *pAllocator, VkDisplayModeKHR *pMode,
4424 VkResult result) {
4425 if (VK_SUCCESS != result) return;
4426 if (!pMode) return;
Jeremy Gebben5573a8c2021-06-04 08:55:10 -06004427 display_mode_map[*pMode] = std::make_shared<DISPLAY_MODE_STATE>(*pMode, physicalDevice);
sfricke-samsung5c1b7392020-12-13 22:17:15 -08004428}
4429
locke-lunargd556cc32019-09-17 01:21:23 -06004430void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
4431 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
4432 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004433 auto semaphore_state = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
4434 if (semaphore_state) {
4435 semaphore_state->signaler.first = VK_NULL_HANDLE;
4436 semaphore_state->signaled = false;
locke-lunargd556cc32019-09-17 01:21:23 -06004437 }
4438 }
4439
4440 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
4441 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
4442 // confused itself just as much.
4443 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
4444 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
4445 // Mark the image as having been released to the WSI
4446 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
4447 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
John Zulauffaa7a522021-03-05 12:22:45 -07004448 IMAGE_STATE *image_state = swapchain_data->images[pPresentInfo->pImageIndices[i]].image_state;
locke-lunargd556cc32019-09-17 01:21:23 -06004449 if (image_state) {
4450 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004451 if (image_state->shared_presentable) {
4452 image_state->layout_locked = true;
4453 }
locke-lunargd556cc32019-09-17 01:21:23 -06004454 }
4455 }
4456 }
4457 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
4458 // its semaphore waits) /never/ participate in any completion proof.
4459}
4460
4461void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
4462 const VkSwapchainCreateInfoKHR *pCreateInfos,
4463 const VkAllocationCallbacks *pAllocator,
4464 VkSwapchainKHR *pSwapchains, VkResult result) {
4465 if (pCreateInfos) {
4466 for (uint32_t i = 0; i < swapchainCount; i++) {
4467 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
4468 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
4469 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
4470 }
4471 }
4472}
4473
4474void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4475 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004476 auto fence_state = GetFenceState(fence);
4477 if (fence_state && fence_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06004478 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
4479 // import
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004480 fence_state->state = FENCE_INFLIGHT;
4481 fence_state->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
locke-lunargd556cc32019-09-17 01:21:23 -06004482 }
4483
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004484 auto semaphore_state = GetSemaphoreState(semaphore);
4485 if (semaphore_state && semaphore_state->scope == kSyncScopeInternal) {
locke-lunargd556cc32019-09-17 01:21:23 -06004486 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
4487 // temporary import
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004488 semaphore_state->signaled = true;
4489 semaphore_state->signaler.first = VK_NULL_HANDLE;
locke-lunargd556cc32019-09-17 01:21:23 -06004490 }
4491
4492 // Mark the image as acquired.
4493 auto swapchain_data = GetSwapchainState(swapchain);
4494 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
John Zulauffaa7a522021-03-05 12:22:45 -07004495 IMAGE_STATE *image_state = swapchain_data->images[*pImageIndex].image_state;
locke-lunargd556cc32019-09-17 01:21:23 -06004496 if (image_state) {
4497 image_state->acquired = true;
4498 image_state->shared_presentable = swapchain_data->shared_presentable;
4499 }
4500 }
4501}
4502
4503void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4504 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
4505 VkResult result) {
4506 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4507 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
4508}
4509
4510void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
4511 uint32_t *pImageIndex, VkResult result) {
4512 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4513 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
4514 pAcquireInfo->fence, pImageIndex);
4515}
4516
4517void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
4518 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
4519 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
4520 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
4521 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
4522 phys_device_state.phys_device = pPhysicalDevices[i];
4523 // Init actual features for each physical device
4524 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
4525 }
4526 }
4527}
4528
4529// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
4530static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004531 VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004532 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
4533
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004534 if (pQueueFamilyProperties) { // Save queue family properties
locke-lunargd556cc32019-09-17 01:21:23 -06004535 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
4536 for (uint32_t i = 0; i < count; ++i) {
4537 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
4538 }
4539 }
4540}
4541
4542void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
4543 uint32_t *pQueueFamilyPropertyCount,
4544 VkQueueFamilyProperties *pQueueFamilyProperties) {
4545 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4546 assert(physical_device_state);
Mike Schuchardt2df08912020-12-15 16:28:09 -08004547 VkQueueFamilyProperties2 *pqfp = nullptr;
4548 std::vector<VkQueueFamilyProperties2> qfp;
locke-lunargd556cc32019-09-17 01:21:23 -06004549 qfp.resize(*pQueueFamilyPropertyCount);
4550 if (pQueueFamilyProperties) {
4551 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06004552 qfp[i] = LvlInitStruct<VkQueueFamilyProperties2>();
locke-lunargd556cc32019-09-17 01:21:23 -06004553 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
4554 }
4555 pqfp = qfp.data();
4556 }
4557 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
4558}
4559
4560void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004561 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004562 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4563 assert(physical_device_state);
4564 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4565 pQueueFamilyProperties);
4566}
4567
4568void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004569 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004570 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4571 assert(physical_device_state);
4572 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4573 pQueueFamilyProperties);
4574}
4575void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
4576 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004577 if (!surface) return;
4578 auto surface_state = GetSurfaceState(surface);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004579 surface_state->Destroy();
locke-lunargd556cc32019-09-17 01:21:23 -06004580 surface_map.erase(surface);
4581}
4582
4583void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004584 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06004585}
4586
4587void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
4588 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
4589 const VkAllocationCallbacks *pAllocator,
4590 VkSurfaceKHR *pSurface, VkResult result) {
4591 if (VK_SUCCESS != result) return;
4592 RecordVulkanSurface(pSurface);
4593}
4594
4595#ifdef VK_USE_PLATFORM_ANDROID_KHR
4596void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
4597 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
4598 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4599 VkResult result) {
4600 if (VK_SUCCESS != result) return;
4601 RecordVulkanSurface(pSurface);
4602}
4603#endif // VK_USE_PLATFORM_ANDROID_KHR
4604
4605#ifdef VK_USE_PLATFORM_IOS_MVK
4606void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
4607 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4608 VkResult result) {
4609 if (VK_SUCCESS != result) return;
4610 RecordVulkanSurface(pSurface);
4611}
4612#endif // VK_USE_PLATFORM_IOS_MVK
4613
4614#ifdef VK_USE_PLATFORM_MACOS_MVK
4615void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
4616 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
4617 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4618 VkResult result) {
4619 if (VK_SUCCESS != result) return;
4620 RecordVulkanSurface(pSurface);
4621}
4622#endif // VK_USE_PLATFORM_MACOS_MVK
4623
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07004624#ifdef VK_USE_PLATFORM_METAL_EXT
4625void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
4626 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
4627 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4628 VkResult result) {
4629 if (VK_SUCCESS != result) return;
4630 RecordVulkanSurface(pSurface);
4631}
4632#endif // VK_USE_PLATFORM_METAL_EXT
4633
locke-lunargd556cc32019-09-17 01:21:23 -06004634#ifdef VK_USE_PLATFORM_WAYLAND_KHR
4635void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
4636 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
4637 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4638 VkResult result) {
4639 if (VK_SUCCESS != result) return;
4640 RecordVulkanSurface(pSurface);
4641}
4642#endif // VK_USE_PLATFORM_WAYLAND_KHR
4643
4644#ifdef VK_USE_PLATFORM_WIN32_KHR
4645void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
4646 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
4647 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4648 VkResult result) {
4649 if (VK_SUCCESS != result) return;
4650 RecordVulkanSurface(pSurface);
4651}
4652#endif // VK_USE_PLATFORM_WIN32_KHR
4653
4654#ifdef VK_USE_PLATFORM_XCB_KHR
4655void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
4656 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4657 VkResult result) {
4658 if (VK_SUCCESS != result) return;
4659 RecordVulkanSurface(pSurface);
4660}
4661#endif // VK_USE_PLATFORM_XCB_KHR
4662
4663#ifdef VK_USE_PLATFORM_XLIB_KHR
4664void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
4665 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4666 VkResult result) {
4667 if (VK_SUCCESS != result) return;
4668 RecordVulkanSurface(pSurface);
4669}
4670#endif // VK_USE_PLATFORM_XLIB_KHR
4671
Niklas Haas8b84af12020-04-19 22:20:11 +02004672void ValidationStateTracker::PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance,
4673 const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
4674 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4675 VkResult result) {
4676 if (VK_SUCCESS != result) return;
4677 RecordVulkanSurface(pSurface);
4678}
4679
Cort23cf2282019-09-20 18:58:18 +02004680void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004681 VkPhysicalDeviceFeatures *pFeatures) {
4682 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Yilong Li358152a2020-07-08 02:16:45 -07004683 // Reset the features2 safe struct before setting up the features field.
4684 physical_device_state->features2 = safe_VkPhysicalDeviceFeatures2();
Cortffba2642019-09-20 22:09:41 +02004685 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02004686}
4687
4688void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004689 VkPhysicalDeviceFeatures2 *pFeatures) {
4690 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02004691 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004692}
4693
4694void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004695 VkPhysicalDeviceFeatures2 *pFeatures) {
4696 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02004697 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004698}
4699
locke-lunargd556cc32019-09-17 01:21:23 -06004700void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
4701 VkSurfaceKHR surface,
4702 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
4703 VkResult result) {
4704 if (VK_SUCCESS != result) return;
4705 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004706 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004707
4708 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
4709 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004710}
4711
4712void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
4713 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4714 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
4715 if (VK_SUCCESS != result) return;
4716 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004717 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004718
4719 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
4720 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004721}
4722
4723void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
4724 VkSurfaceKHR surface,
4725 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
4726 VkResult result) {
4727 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004728 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
4729 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
4730 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
4731 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
4732 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
4733 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
4734 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
4735 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
4736 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
4737 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004738
4739 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
4740 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004741}
4742
4743void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
4744 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
4745 VkBool32 *pSupported, VkResult result) {
4746 if (VK_SUCCESS != result) return;
4747 auto surface_state = GetSurfaceState(surface);
4748 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
4749}
4750
4751void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
4752 VkSurfaceKHR surface,
4753 uint32_t *pPresentModeCount,
4754 VkPresentModeKHR *pPresentModes,
4755 VkResult result) {
4756 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4757
4758 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
4759 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004760
4761 if (*pPresentModeCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004762 if (*pPresentModeCount > physical_device_state->present_modes.size()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004763 physical_device_state->present_modes.resize(*pPresentModeCount);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004764 }
locke-lunargd556cc32019-09-17 01:21:23 -06004765 }
4766 if (pPresentModes) {
locke-lunargd556cc32019-09-17 01:21:23 -06004767 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
4768 physical_device_state->present_modes[i] = pPresentModes[i];
4769 }
4770 }
4771}
4772
4773void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
4774 uint32_t *pSurfaceFormatCount,
4775 VkSurfaceFormatKHR *pSurfaceFormats,
4776 VkResult result) {
4777 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4778
4779 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004780
4781 if (*pSurfaceFormatCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004782 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004783 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004784 }
locke-lunargd556cc32019-09-17 01:21:23 -06004785 }
4786 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06004787 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4788 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
4789 }
4790 }
4791}
4792
4793void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
4794 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4795 uint32_t *pSurfaceFormatCount,
4796 VkSurfaceFormat2KHR *pSurfaceFormats,
4797 VkResult result) {
4798 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4799
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004800 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06004801 if (*pSurfaceFormatCount) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004802 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size()) {
4803 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
4804 }
locke-lunargd556cc32019-09-17 01:21:23 -06004805 }
4806 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06004807 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004808 physical_device_state->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
locke-lunargd556cc32019-09-17 01:21:23 -06004809 }
4810 }
4811}
4812
4813void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4814 const VkDebugUtilsLabelEXT *pLabelInfo) {
4815 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4816}
4817
4818void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
4819 EndCmdDebugUtilsLabel(report_data, commandBuffer);
4820}
4821
4822void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4823 const VkDebugUtilsLabelEXT *pLabelInfo) {
4824 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4825
4826 // Squirrel away an easily accessible copy.
4827 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4828 cb_state->debug_label = LoggingLabel(pLabelInfo);
4829}
4830
4831void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004832 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties) {
locke-lunargd556cc32019-09-17 01:21:23 -06004833 if (NULL != pPhysicalDeviceGroupProperties) {
4834 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
4835 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
4836 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
4837 auto &phys_device_state = physical_device_map[cur_phys_dev];
4838 phys_device_state.phys_device = cur_phys_dev;
4839 // Init actual features for each physical device
4840 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
4841 }
4842 }
4843 }
4844}
4845
4846void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004847 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties,
locke-lunargd556cc32019-09-17 01:21:23 -06004848 VkResult result) {
4849 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4850 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4851}
4852
4853void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004854 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties,
locke-lunargd556cc32019-09-17 01:21:23 -06004855 VkResult result) {
4856 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4857 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4858}
4859
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004860void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
4861 uint32_t queueFamilyIndex,
4862 uint32_t *pCounterCount,
4863 VkPerformanceCounterKHR *pCounters) {
4864 if (NULL == pCounters) return;
4865
4866 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4867 assert(physical_device_state);
4868
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004869 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queue_family_counters(new QUEUE_FAMILY_PERF_COUNTERS());
4870 queue_family_counters->counters.resize(*pCounterCount);
4871 for (uint32_t i = 0; i < *pCounterCount; i++) queue_family_counters->counters[i] = pCounters[i];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004872
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07004873 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queue_family_counters);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004874}
4875
4876void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
4877 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
4878 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
4879 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4880 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
4881}
4882
4883void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
4884 VkResult result) {
4885 if (result == VK_SUCCESS) performance_lock_acquired = true;
4886}
4887
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004888void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
4889 performance_lock_acquired = false;
4890 for (auto &cmd_buffer : commandBufferMap) {
4891 cmd_buffer.second->performance_lock_released = true;
4892 }
4893}
4894
locke-lunargd556cc32019-09-17 01:21:23 -06004895void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004896 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004897 const VkAllocationCallbacks *pAllocator) {
4898 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004899 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4900 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004901 desc_template_map.erase(descriptorUpdateTemplate);
4902}
4903
4904void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004905 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004906 const VkAllocationCallbacks *pAllocator) {
4907 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004908 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4909 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004910 desc_template_map.erase(descriptorUpdateTemplate);
4911}
4912
Mike Schuchardt2df08912020-12-15 16:28:09 -08004913void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
4914 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate) {
locke-lunargd556cc32019-09-17 01:21:23 -06004915 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004916 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004917 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
4918}
4919
Mike Schuchardt2df08912020-12-15 16:28:09 -08004920void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(VkDevice device,
4921 const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
4922 const VkAllocationCallbacks *pAllocator,
4923 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate,
4924 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004925 if (VK_SUCCESS != result) return;
4926 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4927}
4928
4929void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
Mike Schuchardt2df08912020-12-15 16:28:09 -08004930 VkDevice device, const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4931 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate, VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004932 if (VK_SUCCESS != result) return;
4933 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4934}
4935
4936void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004937 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004938 const void *pData) {
4939 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
4940 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
4941 assert(0);
4942 } else {
4943 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
4944 // TODO: Record template push descriptor updates
4945 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
4946 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
4947 }
4948 }
4949}
4950
4951void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
4952 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4953 const void *pData) {
4954 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4955}
4956
4957void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
Mike Schuchardt2df08912020-12-15 16:28:09 -08004958 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
locke-lunargd556cc32019-09-17 01:21:23 -06004959 const void *pData) {
4960 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4961}
4962
Mike Schuchardt2df08912020-12-15 16:28:09 -08004963void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
4964 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4965 VkPipelineLayout layout, uint32_t set,
4966 const void *pData) {
locke-lunargd556cc32019-09-17 01:21:23 -06004967 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4968
4969 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4970 if (template_state) {
4971 auto layout_data = GetPipelineLayout(layout);
4972 auto dsl = GetDslFromPipelineLayout(layout_data, set);
4973 const auto &template_ci = template_state->create_info;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06004974 if (dsl && !dsl->Destroyed()) {
locke-lunargd556cc32019-09-17 01:21:23 -06004975 // Decode the template into a set of write updates
4976 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
4977 dsl->GetDescriptorSetLayout());
4978 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
4979 static_cast<uint32_t>(decoded_template.desc_writes.size()),
4980 decoded_template.desc_writes.data());
4981 }
4982 }
4983}
4984
4985void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
4986 uint32_t *pPropertyCount, void *pProperties) {
4987 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4988 if (*pPropertyCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06004989 physical_device_state->display_plane_property_count = *pPropertyCount;
4990 }
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004991 if (*pPropertyCount || pProperties) {
4992 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004993 }
4994}
4995
4996void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
4997 uint32_t *pPropertyCount,
4998 VkDisplayPlanePropertiesKHR *pProperties,
4999 VkResult result) {
5000 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5001 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5002}
5003
5004void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
5005 uint32_t *pPropertyCount,
5006 VkDisplayPlaneProperties2KHR *pProperties,
5007 VkResult result) {
5008 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5009 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5010}
5011
5012void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5013 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
5014 QueryObject query_obj = {queryPool, query, index};
5015 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5016 RecordCmdBeginQuery(cb_state, query_obj);
5017}
5018
5019void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5020 uint32_t query, uint32_t index) {
5021 QueryObject query_obj = {queryPool, query, index};
5022 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5023 RecordCmdEndQuery(cb_state, query_obj);
5024}
5025
5026void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
5027 VkSamplerYcbcrConversion ycbcr_conversion) {
Jeremy Gebben5d970742021-05-31 16:04:14 -06005028 auto ycbcr_state = std::make_shared<SAMPLER_YCBCR_CONVERSION_STATE>(ycbcr_conversion, create_info,
5029 GetPotentialFormatFeatures(create_info->format));
5030 // If format is VK_FORMAT_UNDEFINED, format_features will be set by external AHB features
locke-lunargd556cc32019-09-17 01:21:23 -06005031 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005032 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion, ycbcr_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06005033 }
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005034 samplerYcbcrConversionMap[ycbcr_conversion] = std::move(ycbcr_state);
locke-lunargd556cc32019-09-17 01:21:23 -06005035}
5036
5037void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
5038 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5039 const VkAllocationCallbacks *pAllocator,
5040 VkSamplerYcbcrConversion *pYcbcrConversion,
5041 VkResult result) {
5042 if (VK_SUCCESS != result) return;
5043 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5044}
5045
5046void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
5047 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5048 const VkAllocationCallbacks *pAllocator,
5049 VkSamplerYcbcrConversion *pYcbcrConversion,
5050 VkResult result) {
5051 if (VK_SUCCESS != result) return;
5052 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5053}
5054
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005055void ValidationStateTracker::RecordDestroySamplerYcbcrConversionState(VkSamplerYcbcrConversion ycbcr_conversion) {
5056 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
5057 RecordDestroySamplerYcbcrConversionANDROID(ycbcr_conversion);
5058 }
5059
5060 auto ycbcr_state = GetSamplerYcbcrConversionState(ycbcr_conversion);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005061 ycbcr_state->Destroy();
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005062 samplerYcbcrConversionMap.erase(ycbcr_conversion);
5063}
5064
locke-lunargd556cc32019-09-17 01:21:23 -06005065void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
5066 const VkAllocationCallbacks *pAllocator) {
5067 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005068 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005069}
5070
5071void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
5072 VkSamplerYcbcrConversion ycbcrConversion,
5073 const VkAllocationCallbacks *pAllocator) {
5074 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005075 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005076}
5077
Tony-LunarG977448c2019-12-02 14:52:02 -07005078void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5079 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005080 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07005081 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06005082
5083 // Do nothing if the query pool has been destroyed.
5084 auto query_pool_state = GetQueryPoolState(queryPool);
5085 if (!query_pool_state) return;
5086
5087 // Reset the state of existing entries.
5088 QueryObject query_obj{queryPool, 0};
5089 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
5090 for (uint32_t i = 0; i < max_query_count; ++i) {
5091 query_obj.query = firstQuery + i;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005092 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005093 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005094 for (uint32_t pass_index = 0; pass_index < query_pool_state->n_performance_passes; pass_index++) {
5095 query_obj.perf_pass = pass_index;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005096 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005097 }
5098 }
locke-lunargd556cc32019-09-17 01:21:23 -06005099 }
5100}
5101
Tony-LunarG977448c2019-12-02 14:52:02 -07005102void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5103 uint32_t queryCount) {
5104 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5105}
5106
5107void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5108 uint32_t queryCount) {
5109 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5110}
5111
locke-lunargd556cc32019-09-17 01:21:23 -06005112void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
5113 const TEMPLATE_STATE *template_state, const void *pData) {
5114 // Translate the templated update into a normal update for validation...
5115 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
5116 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
5117 decoded_update.desc_writes.data(), 0, NULL);
5118}
5119
5120// Update the common AllocateDescriptorSetsData
5121void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005122 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005123 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05005124 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06005125 if (layout) {
5126 ds_data->layout_nodes[i] = layout;
5127 // Count total descriptors required per type
5128 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
5129 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005130 uint32_t type_index = static_cast<uint32_t>(binding_layout->descriptorType);
5131 ds_data->required_descriptors_by_type[type_index] += binding_layout->descriptorCount;
locke-lunargd556cc32019-09-17 01:21:23 -06005132 }
5133 }
5134 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
5135 }
5136}
5137
5138// Decrement allocated sets from the pool and insert new sets into set_map
5139void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
5140 const VkDescriptorSet *descriptor_sets,
5141 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
5142 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
5143 // Account for sets and individual descriptors allocated from pool
5144 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
5145 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
5146 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
5147 }
5148
Mark Lobodzinski1f887d32020-12-30 15:31:33 -07005149 const auto *variable_count_info = LvlFindInChain<VkDescriptorSetVariableDescriptorCountAllocateInfo>(p_alloc_info->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06005150 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
5151
5152 // Create tracking object for each descriptor set; insert into global map and the pool's set.
5153 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
5154 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
5155
Jeff Bolz41a1ced2019-10-11 11:40:49 -05005156 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07005157 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06005158 pool_state->sets.insert(new_ds.get());
locke-lunargd556cc32019-09-17 01:21:23 -06005159 setMap[descriptor_sets[i]] = std::move(new_ds);
5160 }
5161}
5162
5163// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
Jeremy Kniager05631e72020-06-08 14:21:35 -06005164void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type,
locke-lunarg540b2252020-08-03 13:23:36 -06005165 VkPipelineBindPoint bind_point, const char *function) {
5166 UpdateDrawState(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005167 cb_state->hasDispatchCmd = true;
5168}
5169
locke-lunargd556cc32019-09-17 01:21:23 -06005170// Generic function to handle state update for all CmdDraw* type functions
locke-lunarg540b2252020-08-03 13:23:36 -06005171void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, VkPipelineBindPoint bind_point,
5172 const char *function) {
5173 UpdateStateCmdDrawDispatchType(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005174 cb_state->hasDrawCmd = true;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005175
5176 // Update the consumed viewport/scissor count.
5177 uint32_t& used = cb_state->usedViewportScissorCount;
5178 used = std::max(used, cb_state->pipelineStaticViewportCount);
5179 used = std::max(used, cb_state->pipelineStaticScissorCount);
5180 cb_state->usedDynamicViewportCount |= !!(cb_state->dynamic_status & CBSTATUS_VIEWPORT_WITH_COUNT_SET); // !! silences MSVC warn
5181 cb_state->usedDynamicScissorCount |= !!(cb_state->dynamic_status & CBSTATUS_SCISSOR_WITH_COUNT_SET);
locke-lunargd556cc32019-09-17 01:21:23 -06005182}
5183
5184void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
5185 uint32_t firstVertex, uint32_t firstInstance) {
5186 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005187 UpdateStateCmdDrawType(cb_state, CMD_DRAW, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDraw()");
locke-lunargd556cc32019-09-17 01:21:23 -06005188}
5189
5190void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
5191 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
5192 uint32_t firstInstance) {
5193 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005194 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXED, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexed()");
locke-lunargd556cc32019-09-17 01:21:23 -06005195}
5196
5197void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5198 uint32_t count, uint32_t stride) {
5199 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5200 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005201 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndirect()");
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005202 if (!disabled[command_buffer_state]) {
5203 cb_state->AddChild(buffer_state);
5204 }
locke-lunargd556cc32019-09-17 01:21:23 -06005205}
5206
5207void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5208 VkDeviceSize offset, uint32_t count, uint32_t stride) {
5209 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5210 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005211 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexedIndirect()");
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005212 if (!disabled[command_buffer_state]) {
5213 cb_state->AddChild(buffer_state);
5214 }
locke-lunargd556cc32019-09-17 01:21:23 -06005215}
5216
5217void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
5218 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005219 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCH, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatch()");
locke-lunargd556cc32019-09-17 01:21:23 -06005220}
5221
5222void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5223 VkDeviceSize offset) {
5224 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005225 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCHINDIRECT, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatchIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06005226 BUFFER_STATE *buffer_state = GetBufferState(buffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005227 if (!disabled[command_buffer_state]) {
5228 cb_state->AddChild(buffer_state);
5229 }
locke-lunargd556cc32019-09-17 01:21:23 -06005230}
5231
Tony-LunarG977448c2019-12-02 14:52:02 -07005232void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5233 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
locke-lunarg540b2252020-08-03 13:23:36 -06005234 uint32_t stride, const char *function) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005235 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5236 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5237 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005238 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005239 if (!disabled[command_buffer_state]) {
5240 cb_state->AddChild(buffer_state);
5241 cb_state->AddChild(count_buffer_state);
5242 }
Tony-LunarG977448c2019-12-02 14:52:02 -07005243}
5244
locke-lunargd556cc32019-09-17 01:21:23 -06005245void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5246 VkDeviceSize offset, VkBuffer countBuffer,
5247 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5248 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005249 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5250 "vkCmdDrawIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005251}
5252
5253void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5254 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5255 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005256 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5257 "vkCmdDrawIndirectCount()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005258}
5259
5260void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5261 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
locke-lunarg540b2252020-08-03 13:23:36 -06005262 uint32_t maxDrawCount, uint32_t stride, const char *function) {
locke-lunargd556cc32019-09-17 01:21:23 -06005263 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5264 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5265 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005266 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005267 if (!disabled[command_buffer_state]) {
5268 cb_state->AddChild(buffer_state);
5269 cb_state->AddChild(count_buffer_state);
5270 }
locke-lunargd556cc32019-09-17 01:21:23 -06005271}
5272
5273void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5274 VkDeviceSize offset, VkBuffer countBuffer,
5275 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5276 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005277 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5278 "vkCmdDrawIndexedIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005279}
5280
5281void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
5282 VkDeviceSize offset, VkBuffer countBuffer,
5283 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5284 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005285 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5286 "vkCmdDrawIndexedIndirectCount()");
locke-lunargd556cc32019-09-17 01:21:23 -06005287}
5288
5289void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
5290 uint32_t firstTask) {
5291 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005292 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSNV, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawMeshTasksNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005293}
5294
5295void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5296 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
5297 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005298 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
5299 "vkCmdDrawMeshTasksIndirectNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005300 BUFFER_STATE *buffer_state = GetBufferState(buffer);
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005301 if (!disabled[command_buffer_state] && buffer_state) {
5302 cb_state->AddChild(buffer_state);
locke-lunargd556cc32019-09-17 01:21:23 -06005303 }
5304}
5305
5306void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5307 VkDeviceSize offset, VkBuffer countBuffer,
5308 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5309 uint32_t stride) {
5310 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5311 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5312 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005313 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTCOUNTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
5314 "vkCmdDrawMeshTasksIndirectCountNV()");
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005315 if (!disabled[command_buffer_state]) {
5316 if (buffer_state) {
5317 cb_state->AddChild(buffer_state);
5318 }
5319 if (count_buffer_state) {
5320 cb_state->AddChild(count_buffer_state);
5321 }
locke-lunargd556cc32019-09-17 01:21:23 -06005322 }
5323}
5324
5325void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
5326 const VkAllocationCallbacks *pAllocator,
5327 VkShaderModule *pShaderModule, VkResult result,
5328 void *csm_state_data) {
5329 if (VK_SUCCESS != result) return;
5330 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
5331
Tony-LunarG8a51b7d2020-07-01 15:57:23 -06005332 spv_target_env spirv_environment = PickSpirvEnv(api_version, (device_extensions.vk_khr_spirv_1_4 != kNotEnabled));
locke-lunargd556cc32019-09-17 01:21:23 -06005333 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005334 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
5335 csm_state->unique_shader_id)
5336 : std::make_shared<SHADER_MODULE_STATE>();
sfricke-samsung962cad92021-04-13 00:46:29 -07005337 new_shader_module->SetPushConstantUsedInShader();
locke-lunargd556cc32019-09-17 01:21:23 -06005338 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
5339}
5340
5341void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeremy Gebben159b3cc2021-06-03 09:09:03 -06005342 PipelineStageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005343 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
locke-lunargde3f0fa2020-09-10 11:55:31 -06005344 stage_state->entry_point_name = pStage->pName;
5345 stage_state->shader_state = GetShared<SHADER_MODULE_STATE>(pStage->module);
5346 auto module = stage_state->shader_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -06005347 if (!module->has_valid_spirv) return;
5348
5349 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
sfricke-samsung962cad92021-04-13 00:46:29 -07005350 auto entrypoint = module->FindEntrypoint(pStage->pName, pStage->stage);
locke-lunargd556cc32019-09-17 01:21:23 -06005351 if (entrypoint == module->end()) return;
5352
locke-lunarg654e3692020-06-04 17:19:15 -06005353 stage_state->stage_flag = pStage->stage;
5354
locke-lunargd556cc32019-09-17 01:21:23 -06005355 // Mark accessible ids
sfricke-samsung962cad92021-04-13 00:46:29 -07005356 stage_state->accessible_ids = module->MarkAccessibleIds(entrypoint);
5357 module->ProcessExecutionModes(entrypoint, pipeline);
locke-lunargd556cc32019-09-17 01:21:23 -06005358
sfricke-samsung962cad92021-04-13 00:46:29 -07005359 stage_state->descriptor_uses = module->CollectInterfaceByDescriptorSlot(
5360 stage_state->accessible_ids, &stage_state->has_writable_descriptor, &stage_state->has_atomic_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06005361 // Capture descriptor uses for the pipeline
locke-lunarg36045992020-08-20 16:54:37 -06005362 for (const auto &use : stage_state->descriptor_uses) {
locke-lunargd556cc32019-09-17 01:21:23 -06005363 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06005364 const uint32_t slot = use.first.first;
locke-lunarg351c9d82020-10-23 14:43:21 -06005365 pipeline->active_slots[slot][use.first.second].is_writable |= use.second.is_writable;
locke-lunarg36045992020-08-20 16:54:37 -06005366 auto &reqs = pipeline->active_slots[slot][use.first.second].reqs;
sfricke-samsung962cad92021-04-13 00:46:29 -07005367 reqs = descriptor_req(reqs | module->DescriptorTypeToReqs(use.second.type_id));
locke-lunarg25b6c352020-08-06 17:44:18 -06005368 if (use.second.is_atomic_operation) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_VIEW_ATOMIC_OPERATION);
locke-lunarg12d20992020-09-21 12:46:49 -06005369 if (use.second.is_sampler_implicitLod_dref_proj) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_IMPLICITLOD_DREF_PROJ);
locke-lunargae2a43c2020-09-22 17:21:57 -06005370 if (use.second.is_sampler_bias_offset) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_BIAS_OFFSET);
locke-lunarg12d20992020-09-21 12:46:49 -06005371
John Zulauf649edd52019-10-02 14:39:41 -06005372 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunarg36045992020-08-20 16:54:37 -06005373 if (use.second.samplers_used_by_image.size()) {
locke-lunarg654a9052020-10-13 16:28:42 -06005374 auto &samplers_used_by_image = pipeline->active_slots[slot][use.first.second].samplers_used_by_image;
5375 if (use.second.samplers_used_by_image.size() > samplers_used_by_image.size()) {
5376 samplers_used_by_image.resize(use.second.samplers_used_by_image.size());
5377 }
locke-lunarg654a9052020-10-13 16:28:42 -06005378 uint32_t image_index = 0;
5379 for (const auto &samplers : use.second.samplers_used_by_image) {
5380 for (const auto &sampler : samplers) {
locke-lunargb8be8222020-10-20 00:34:37 -06005381 samplers_used_by_image[image_index].emplace(sampler, nullptr);
locke-lunarg654a9052020-10-13 16:28:42 -06005382 }
5383 ++image_index;
5384 }
locke-lunarg36045992020-08-20 16:54:37 -06005385 }
locke-lunargd556cc32019-09-17 01:21:23 -06005386 }
locke-lunarg78486832020-09-09 19:39:42 -06005387
locke-lunarg96dc9632020-06-10 17:22:18 -06005388 if (pStage->stage == VK_SHADER_STAGE_FRAGMENT_BIT) {
sfricke-samsung962cad92021-04-13 00:46:29 -07005389 pipeline->fragmentShader_writable_output_location_list = module->CollectWritableOutputLocationinFS(*pStage);
locke-lunarg96dc9632020-06-10 17:22:18 -06005390 }
locke-lunargd556cc32019-09-17 01:21:23 -06005391}
5392
sfricke-samsung70ad9ce2021-04-04 00:53:54 -07005393// Discussed in details in https://github.com/KhronosGroup/Vulkan-Docs/issues/1081
5394// Internal discussion and CTS were written to prove that this is not called after an incompatible vkCmdBindPipeline
5395// "Binding a pipeline with a layout that is not compatible with the push constant layout does not disturb the push constant values"
5396//
5397// vkCmdBindDescriptorSet has nothing to do with push constants and don't need to call this after neither
5398//
5399// Part of this assumes apps at draw/dispath/traceRays/etc time will have it properly compatabile or else other VU will be triggered
locke-lunargd556cc32019-09-17 01:21:23 -06005400void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
5401 if (cb_state == nullptr) {
5402 return;
5403 }
5404
5405 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
5406 if (pipeline_layout_state == nullptr) {
5407 return;
5408 }
5409
5410 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
5411 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
5412 cb_state->push_constant_data.clear();
locke-lunargde3f0fa2020-09-10 11:55:31 -06005413 cb_state->push_constant_data_update.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06005414 uint32_t size_needed = 0;
John Zulauf79f06582021-02-27 18:38:39 -07005415 for (const auto &push_constant_range : *cb_state->push_constant_data_ranges) {
locke-lunargde3f0fa2020-09-10 11:55:31 -06005416 auto size = push_constant_range.offset + push_constant_range.size;
5417 size_needed = std::max(size_needed, size);
5418
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005419 auto stage_flags = push_constant_range.stageFlags;
locke-lunargde3f0fa2020-09-10 11:55:31 -06005420 uint32_t bit_shift = 0;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005421 while (stage_flags) {
5422 if (stage_flags & 1) {
locke-lunargde3f0fa2020-09-10 11:55:31 -06005423 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
5424 const auto it = cb_state->push_constant_data_update.find(flag);
5425
5426 if (it != cb_state->push_constant_data_update.end()) {
5427 if (it->second.size() < push_constant_range.offset) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06005428 it->second.resize(push_constant_range.offset, PC_Byte_Not_Set);
locke-lunargde3f0fa2020-09-10 11:55:31 -06005429 }
5430 if (it->second.size() < size) {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06005431 it->second.resize(size, PC_Byte_Not_Updated);
locke-lunargde3f0fa2020-09-10 11:55:31 -06005432 }
5433 } else {
locke-lunarg3d8b8f32020-10-26 17:04:16 -06005434 std::vector<uint8_t> bytes;
5435 bytes.resize(push_constant_range.offset, PC_Byte_Not_Set);
5436 bytes.resize(size, PC_Byte_Not_Updated);
locke-lunargde3f0fa2020-09-10 11:55:31 -06005437 cb_state->push_constant_data_update[flag] = bytes;
5438 }
5439 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005440 stage_flags = stage_flags >> 1;
locke-lunargde3f0fa2020-09-10 11:55:31 -06005441 ++bit_shift;
5442 }
locke-lunargd556cc32019-09-17 01:21:23 -06005443 }
5444 cb_state->push_constant_data.resize(size_needed, 0);
5445 }
5446}
John Zulauf22b0fbe2019-10-15 06:26:16 -06005447
5448void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
5449 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
5450 VkResult result) {
5451 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
5452 auto swapchain_state = GetSwapchainState(swapchain);
5453
5454 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
5455
5456 if (pSwapchainImages) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005457 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
John Zulauf29d00532021-03-04 13:28:54 -07005458 SWAPCHAIN_IMAGE &swapchain_image = swapchain_state->images[i];
John Zulauffaa7a522021-03-05 12:22:45 -07005459 if (swapchain_image.image_state) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06005460
5461 // Add imageMap entries for each swapchain image
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -06005462 auto image_ci = LvlInitStruct<VkImageCreateInfo>();
5463 image_ci.pNext = LvlFindInChain<VkImageFormatListCreateInfo>(swapchain_state->createInfo.pNext);
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -06005464 image_ci.flags = 0; // to be updated below
John Zulauf22b0fbe2019-10-15 06:26:16 -06005465 image_ci.imageType = VK_IMAGE_TYPE_2D;
5466 image_ci.format = swapchain_state->createInfo.imageFormat;
5467 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
5468 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
5469 image_ci.extent.depth = 1;
5470 image_ci.mipLevels = 1;
5471 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
5472 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
5473 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
5474 image_ci.usage = swapchain_state->createInfo.imageUsage;
5475 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
5476 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
5477 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
5478 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5479
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005480 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005481 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005482 }
5483 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005484 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005485 }
5486 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR) {
Mike Schuchardt2df08912020-12-15 16:28:09 -08005487 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07005488 }
John Zulauf22b0fbe2019-10-15 06:26:16 -06005489
locke-lunarg296a3c92020-03-25 01:04:29 -06005490 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(device, pSwapchainImages[i], &image_ci);
John Zulauffaa7a522021-03-05 12:22:45 -07005491 auto *image_state = imageMap[pSwapchainImages[i]].get();
5492 assert(image_state);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005493 image_state->valid = false;
5494 image_state->create_from_swapchain = swapchain;
5495 image_state->bind_swapchain = swapchain;
5496 image_state->bind_swapchain_imageIndex = i;
Tony-LunarGe64e4fe2020-02-17 16:21:55 -07005497 image_state->is_swapchain_image = true;
sfricke-samsungc8a50852021-03-31 13:56:50 -07005498 image_state->unprotected = ((image_ci.flags & VK_IMAGE_CREATE_PROTECTED_BIT) == 0);
John Zulauf29d00532021-03-04 13:28:54 -07005499
5500 // Since swapchains can't be linear, we can create an encoder here, and SyncValNeeds a fake_base_address
5501 image_state->fragment_encoder = std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(
5502 new subresource_adapter::ImageRangeEncoder(*image_state));
5503
5504 if (swapchain_image.bound_images.empty()) {
5505 // First time "bind" allocates
5506 image_state->swapchain_fake_address = fake_memory.Alloc(image_state->fragment_encoder->TotalSize());
5507 } else {
5508 // All others reuse
5509 image_state->swapchain_fake_address = (*swapchain_image.bound_images.cbegin())->swapchain_fake_address;
5510 // Since there are others, need to update the aliasing information
Jeremy Gebben5570abe2021-05-16 18:35:13 -06005511 image_state->AddAliasingImage(swapchain_image.bound_images);
John Zulauf29d00532021-03-04 13:28:54 -07005512 }
5513
5514 swapchain_image.image_state = image_state; // Don't move, it's already a reference to the imageMap
John Zulauffaa7a522021-03-05 12:22:45 -07005515 swapchain_image.bound_images.emplace(image_state);
Petr Kraus44f1c482020-04-25 20:09:25 +02005516
5517 AddImageStateProps(*image_state, device, physical_device);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005518 }
5519 }
5520
5521 if (*pSwapchainImageCount) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005522 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
5523 }
5524}
sourav parmar35e7a002020-06-09 17:58:44 -07005525
sourav parmar35e7a002020-06-09 17:58:44 -07005526void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer,
5527 const VkCopyAccelerationStructureInfoKHR *pInfo) {
5528 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5529 if (cb_state) {
sourav parmarcd5fb182020-07-17 12:58:44 -07005530 ACCELERATION_STRUCTURE_STATE_KHR *src_as_state = GetAccelerationStructureStateKHR(pInfo->src);
5531 ACCELERATION_STRUCTURE_STATE_KHR *dst_as_state = GetAccelerationStructureStateKHR(pInfo->dst);
sourav parmar35e7a002020-06-09 17:58:44 -07005532 if (dst_as_state != nullptr && src_as_state != nullptr) {
5533 dst_as_state->built = true;
5534 dst_as_state->build_info_khr = src_as_state->build_info_khr;
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005535 if (!disabled[command_buffer_state]) {
5536 cb_state->AddChild(dst_as_state);
5537 cb_state->AddChild(src_as_state);
5538 }
sourav parmar35e7a002020-06-09 17:58:44 -07005539 }
5540 }
5541}
Piers Daniell39842ee2020-07-10 16:42:33 -06005542
5543void ValidationStateTracker::PreCallRecordCmdSetCullModeEXT(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) {
5544 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5545 cb_state->status |= CBSTATUS_CULL_MODE_SET;
5546 cb_state->static_status &= ~CBSTATUS_CULL_MODE_SET;
5547}
5548
5549void ValidationStateTracker::PreCallRecordCmdSetFrontFaceEXT(VkCommandBuffer commandBuffer, VkFrontFace frontFace) {
5550 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5551 cb_state->status |= CBSTATUS_FRONT_FACE_SET;
5552 cb_state->static_status &= ~CBSTATUS_FRONT_FACE_SET;
5553}
5554
5555void ValidationStateTracker::PreCallRecordCmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer,
5556 VkPrimitiveTopology primitiveTopology) {
5557 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5558 cb_state->primitiveTopology = primitiveTopology;
5559 cb_state->status |= CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5560 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5561}
5562
5563void ValidationStateTracker::PreCallRecordCmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer, uint32_t viewportCount,
5564 const VkViewport *pViewports) {
5565 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07005566 uint32_t bits = (1u << viewportCount) - 1u;
5567 cb_state->viewportWithCountMask |= bits;
5568 cb_state->trashedViewportMask &= ~bits;
Tobias Hector6663c9b2020-11-05 10:18:02 +00005569 cb_state->viewportWithCountCount = viewportCount;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005570 cb_state->trashedViewportCount = false;
Piers Daniell39842ee2020-07-10 16:42:33 -06005571 cb_state->status |= CBSTATUS_VIEWPORT_WITH_COUNT_SET;
5572 cb_state->static_status &= ~CBSTATUS_VIEWPORT_WITH_COUNT_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005573
5574 cb_state->dynamicViewports.resize(std::max(size_t(viewportCount), cb_state->dynamicViewports.size()));
5575 for (size_t i = 0; i < viewportCount; ++i) {
5576 cb_state->dynamicViewports[i] = pViewports[i];
5577 }
Piers Daniell39842ee2020-07-10 16:42:33 -06005578}
5579
5580void ValidationStateTracker::PreCallRecordCmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer, uint32_t scissorCount,
5581 const VkRect2D *pScissors) {
5582 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
David Zhao Akeley44139b12021-04-26 16:16:13 -07005583 uint32_t bits = (1u << scissorCount) - 1u;
5584 cb_state->scissorWithCountMask |= bits;
5585 cb_state->trashedScissorMask &= ~bits;
5586 cb_state->scissorWithCountCount = scissorCount;
5587 cb_state->trashedScissorCount = false;
Piers Daniell39842ee2020-07-10 16:42:33 -06005588 cb_state->status |= CBSTATUS_SCISSOR_WITH_COUNT_SET;
5589 cb_state->static_status &= ~CBSTATUS_SCISSOR_WITH_COUNT_SET;
5590}
5591
5592void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer, uint32_t firstBinding,
5593 uint32_t bindingCount, const VkBuffer *pBuffers,
5594 const VkDeviceSize *pOffsets, const VkDeviceSize *pSizes,
5595 const VkDeviceSize *pStrides) {
5596 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5597 if (pStrides) {
5598 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5599 cb_state->static_status &= ~CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5600 }
5601
5602 uint32_t end = firstBinding + bindingCount;
5603 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
5604 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
5605 }
5606
5607 for (uint32_t i = 0; i < bindingCount; ++i) {
5608 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07005609 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
Piers Daniell39842ee2020-07-10 16:42:33 -06005610 vertex_buffer_binding.offset = pOffsets[i];
5611 vertex_buffer_binding.size = (pSizes) ? pSizes[i] : VK_WHOLE_SIZE;
5612 vertex_buffer_binding.stride = (pStrides) ? pStrides[i] : 0;
5613 // Add binding for this vertex buffer to this commandbuffer
Jeremy Gebben9efe1cf2021-05-15 20:05:09 -06005614 if (!disabled[command_buffer_state] && pBuffers[i]) {
5615 cb_state->AddChild(vertex_buffer_binding.buffer_state.get());
Piers Daniell39842ee2020-07-10 16:42:33 -06005616 }
5617 }
5618}
5619
5620void ValidationStateTracker::PreCallRecordCmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) {
5621 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5622 cb_state->status |= CBSTATUS_DEPTH_TEST_ENABLE_SET;
5623 cb_state->static_status &= ~CBSTATUS_DEPTH_TEST_ENABLE_SET;
5624}
5625
5626void ValidationStateTracker::PreCallRecordCmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) {
5627 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5628 cb_state->status |= CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5629 cb_state->static_status &= ~CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5630}
5631
5632void ValidationStateTracker::PreCallRecordCmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) {
5633 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5634 cb_state->status |= CBSTATUS_DEPTH_COMPARE_OP_SET;
5635 cb_state->static_status &= ~CBSTATUS_DEPTH_COMPARE_OP_SET;
5636}
5637
5638void ValidationStateTracker::PreCallRecordCmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer,
5639 VkBool32 depthBoundsTestEnable) {
5640 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5641 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5642 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5643}
5644void ValidationStateTracker::PreCallRecordCmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) {
5645 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5646 cb_state->status |= CBSTATUS_STENCIL_TEST_ENABLE_SET;
5647 cb_state->static_status &= ~CBSTATUS_STENCIL_TEST_ENABLE_SET;
5648}
5649
5650void ValidationStateTracker::PreCallRecordCmdSetStencilOpEXT(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
5651 VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp,
5652 VkCompareOp compareOp) {
5653 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5654 cb_state->status |= CBSTATUS_STENCIL_OP_SET;
5655 cb_state->static_status &= ~CBSTATUS_STENCIL_OP_SET;
5656}
locke-lunarg4189aa22020-10-21 00:23:48 -06005657
5658void ValidationStateTracker::PreCallRecordCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle,
5659 uint32_t discardRectangleCount,
5660 const VkRect2D *pDiscardRectangles) {
5661 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5662 cb_state->status |= CBSTATUS_DISCARD_RECTANGLE_SET;
5663 cb_state->static_status &= ~CBSTATUS_DISCARD_RECTANGLE_SET;
5664}
5665
5666void ValidationStateTracker::PreCallRecordCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,
5667 const VkSampleLocationsInfoEXT *pSampleLocationsInfo) {
5668 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5669 cb_state->status |= CBSTATUS_SAMPLE_LOCATIONS_SET;
5670 cb_state->static_status &= ~CBSTATUS_SAMPLE_LOCATIONS_SET;
5671}
5672
5673void ValidationStateTracker::PreCallRecordCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer,
5674 VkCoarseSampleOrderTypeNV sampleOrderType,
5675 uint32_t customSampleOrderCount,
5676 const VkCoarseSampleOrderCustomNV *pCustomSampleOrders) {
5677 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5678 cb_state->status |= CBSTATUS_COARSE_SAMPLE_ORDER_SET;
5679 cb_state->static_status &= ~CBSTATUS_COARSE_SAMPLE_ORDER_SET;
5680}
Vikram Kushwahaa57b0c32021-04-19 12:21:46 -07005681
5682void ValidationStateTracker::PreCallRecordCmdSetPatchControlPointsEXT(VkCommandBuffer commandBuffer, uint32_t patchControlPoints) {
5683 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5684 cb_state->status |= CBSTATUS_PATCH_CONTROL_POINTS_SET;
5685 cb_state->static_status &= ~CBSTATUS_PATCH_CONTROL_POINTS_SET;
5686}
5687
5688void ValidationStateTracker::PreCallRecordCmdSetLogicOpEXT(VkCommandBuffer commandBuffer, VkLogicOp logicOp) {
5689 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5690 cb_state->status |= CBSTATUS_LOGIC_OP_SET;
5691 cb_state->static_status &= ~CBSTATUS_LOGIC_OP_SET;
5692}
5693
5694void ValidationStateTracker::PreCallRecordCmdSetRasterizerDiscardEnableEXT(VkCommandBuffer commandBuffer,
5695 VkBool32 rasterizerDiscardEnable) {
5696 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5697 cb_state->status |= CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET;
5698 cb_state->static_status &= ~CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET;
5699}
5700
5701void ValidationStateTracker::PreCallRecordCmdSetDepthBiasEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable) {
5702 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5703 cb_state->status |= CBSTATUS_DEPTH_BIAS_ENABLE_SET;
5704 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_ENABLE_SET;
5705}
5706
5707void ValidationStateTracker::PreCallRecordCmdSetPrimitiveRestartEnableEXT(VkCommandBuffer commandBuffer,
5708 VkBool32 primitiveRestartEnable) {
5709 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5710 cb_state->status |= CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET;
5711 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET;
David Zhao Akeley44139b12021-04-26 16:16:13 -07005712}
Piers Daniell924cd832021-05-18 13:48:47 -06005713
5714void ValidationStateTracker::PreCallRecordCmdSetVertexInputEXT(
5715 VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount,
5716 const VkVertexInputBindingDescription2EXT *pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount,
5717 const VkVertexInputAttributeDescription2EXT *pVertexAttributeDescriptions) {
5718 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5719 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET | CBSTATUS_VERTEX_INPUT_SET;
5720 cb_state->static_status &= ~(CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET | CBSTATUS_VERTEX_INPUT_SET);
5721}