blob: 256b2bb78fefed4ee5725a4f7ea58955eddcae5c [file] [log] [blame]
Tony-LunarG73719992020-01-15 10:20:28 -07001/* Copyright (c) 2015-2020 The Khronos Group Inc.
2 * Copyright (c) 2015-2020 Valve Corporation
3 * Copyright (c) 2015-2020 LunarG, Inc.
4 * Copyright (C) 2015-2020 Google Inc.
locke-lunargd556cc32019-09-17 01:21:23 -06005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
locke-lunargd556cc32019-09-17 01:21:23 -060023#include <cmath>
24#include <set>
locke-lunargd556cc32019-09-17 01:21:23 -060025
26#include "vk_enum_string_helper.h"
27#include "vk_format_utils.h"
28#include "vk_layer_data.h"
29#include "vk_layer_utils.h"
30#include "vk_layer_logging.h"
31#include "vk_typemap_helper.h"
32
33#include "chassis.h"
34#include "state_tracker.h"
35#include "shader_validation.h"
36
John Zulauf890b50b2020-06-17 15:18:19 -060037const char *CommandTypeString(CMD_TYPE type) {
38 // Autogenerated as part of the vk_validation_error_message.h codegen
39 static const std::array<const char *, CMD_RANGE_SIZE> command_name_list = {{VUID_CMD_NAME_LIST}};
40 return command_name_list[type];
41}
42
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -060043void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
44 if (add_obj) {
45 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
46 // Call base class
47 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
48 }
49}
50
John Zulauf5c5e88d2019-12-26 11:22:02 -070051uint32_t ResolveRemainingLevels(const VkImageSubresourceRange *range, uint32_t mip_levels) {
52 // Return correct number of mip levels taking into account VK_REMAINING_MIP_LEVELS
53 uint32_t mip_level_count = range->levelCount;
54 if (range->levelCount == VK_REMAINING_MIP_LEVELS) {
55 mip_level_count = mip_levels - range->baseMipLevel;
56 }
57 return mip_level_count;
58}
59
60uint32_t ResolveRemainingLayers(const VkImageSubresourceRange *range, uint32_t layers) {
61 // Return correct number of layers taking into account VK_REMAINING_ARRAY_LAYERS
62 uint32_t array_layer_count = range->layerCount;
63 if (range->layerCount == VK_REMAINING_ARRAY_LAYERS) {
64 array_layer_count = layers - range->baseArrayLayer;
65 }
66 return array_layer_count;
67}
68
69VkImageSubresourceRange NormalizeSubresourceRange(const VkImageCreateInfo &image_create_info,
70 const VkImageSubresourceRange &range) {
71 VkImageSubresourceRange norm = range;
72 norm.levelCount = ResolveRemainingLevels(&range, image_create_info.mipLevels);
73
74 // Special case for 3D images with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR flag bit, where <extent.depth> and
75 // <arrayLayers> can potentially alias.
76 uint32_t layer_limit = (0 != (image_create_info.flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR))
77 ? image_create_info.extent.depth
78 : image_create_info.arrayLayers;
79 norm.layerCount = ResolveRemainingLayers(&range, layer_limit);
80
81 // For multiplanar formats, IMAGE_ASPECT_COLOR is equivalent to adding the aspect of the individual planes
82 VkImageAspectFlags &aspect_mask = norm.aspectMask;
83 if (FormatIsMultiplane(image_create_info.format)) {
84 if (aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) {
85 aspect_mask &= ~VK_IMAGE_ASPECT_COLOR_BIT;
86 aspect_mask |= (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT);
87 if (FormatPlaneCount(image_create_info.format) > 2) {
88 aspect_mask |= VK_IMAGE_ASPECT_PLANE_2_BIT;
89 }
90 }
91 }
92 return norm;
93}
94
95VkImageSubresourceRange NormalizeSubresourceRange(const IMAGE_STATE &image_state, const VkImageSubresourceRange &range) {
96 const VkImageCreateInfo &image_create_info = image_state.createInfo;
97 return NormalizeSubresourceRange(image_create_info, range);
98}
99
John Zulauf2bc1fde2020-04-24 15:09:51 -0600100// NOTE: Beware the lifespan of the rp_begin when holding the return. If the rp_begin isn't a "safe" copy, "IMAGELESS"
101// attachments won't persist past the API entry point exit.
102std::pair<uint32_t, const VkImageView *> GetFramebufferAttachments(const VkRenderPassBeginInfo &rp_begin,
103 const FRAMEBUFFER_STATE &fb_state) {
104 const VkImageView *attachments = fb_state.createInfo.pAttachments;
105 uint32_t count = fb_state.createInfo.attachmentCount;
106 if (fb_state.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) {
107 const auto *framebuffer_attachments = lvl_find_in_chain<VkRenderPassAttachmentBeginInfo>(rp_begin.pNext);
108 if (framebuffer_attachments) {
109 attachments = framebuffer_attachments->pAttachments;
110 count = framebuffer_attachments->attachmentCount;
111 }
112 }
113 return std::make_pair(count, attachments);
114}
115
116std::vector<const IMAGE_VIEW_STATE *> ValidationStateTracker::GetAttachmentViews(const VkRenderPassBeginInfo &rp_begin,
117 const FRAMEBUFFER_STATE &fb_state) const {
118 std::vector<const IMAGE_VIEW_STATE *> views;
119
120 const auto count_attachment = GetFramebufferAttachments(rp_begin, fb_state);
121 const auto attachment_count = count_attachment.first;
122 const auto *attachments = count_attachment.second;
123 views.resize(attachment_count, nullptr);
124 for (uint32_t i = 0; i < attachment_count; i++) {
125 if (attachments[i] != VK_NULL_HANDLE) {
126 views[i] = Get<IMAGE_VIEW_STATE>(attachments[i]);
127 }
128 }
129 return views;
130}
131
132std::vector<const IMAGE_VIEW_STATE *> ValidationStateTracker::GetCurrentAttachmentViews(const CMD_BUFFER_STATE &cb_state) const {
133 // Only valid *after* RecordBeginRenderPass and *before* RecordEndRenderpass as it relies on cb_state for the renderpass info.
134 std::vector<const IMAGE_VIEW_STATE *> views;
135
locke-lunargaecf2152020-05-12 17:15:41 -0600136 const auto *rp_state = cb_state.activeRenderPass.get();
John Zulauf2bc1fde2020-04-24 15:09:51 -0600137 if (!rp_state) return views;
138 const auto &rp_begin = *cb_state.activeRenderPassBeginInfo.ptr();
139 const auto *fb_state = Get<FRAMEBUFFER_STATE>(rp_begin.framebuffer);
140 if (!fb_state) return views;
141
142 return GetAttachmentViews(rp_begin, *fb_state);
143}
144
locke-lunarg3e127c72020-06-09 17:45:28 -0600145PIPELINE_STATE *GetCurrentPipelineFromCommandBuffer(const CMD_BUFFER_STATE &cmd, VkPipelineBindPoint pipelineBindPoint) {
146 const auto last_bound_it = cmd.lastBound.find(pipelineBindPoint);
147 if (last_bound_it == cmd.lastBound.cend()) {
148 return nullptr;
149 }
150 return last_bound_it->second.pipeline_state;
151}
152
153void GetCurrentPipelineAndDesriptorSetsFromCommandBuffer(const CMD_BUFFER_STATE &cmd, VkPipelineBindPoint pipelineBindPoint,
154 const PIPELINE_STATE **rtn_pipe,
155 const std::vector<LAST_BOUND_STATE::PER_SET> **rtn_sets) {
156 const auto last_bound_it = cmd.lastBound.find(pipelineBindPoint);
157 if (last_bound_it == cmd.lastBound.cend()) {
158 return;
159 }
160 *rtn_pipe = last_bound_it->second.pipeline_state;
161 *rtn_sets = &(last_bound_it->second.per_set);
162}
163
locke-lunargd556cc32019-09-17 01:21:23 -0600164#ifdef VK_USE_PLATFORM_ANDROID_KHR
165// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
166// This could also move into a seperate core_validation_android.cpp file... ?
167
168void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
169 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
170 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
Spencer Fricke224c9852020-04-06 07:47:29 -0700171 is_node->external_ahb = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600172 }
173 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
174 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
175 is_node->has_ahb_format = true;
176 is_node->ahb_format = ext_fmt_android->externalFormat;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700177 // VUID 01894 will catch if not found in map
178 auto it = ahb_ext_formats_map.find(ext_fmt_android->externalFormat);
179 if (it != ahb_ext_formats_map.end()) {
180 is_node->format_features = it->second;
181 }
locke-lunargd556cc32019-09-17 01:21:23 -0600182 }
183}
184
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700185void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {
186 const VkExternalMemoryBufferCreateInfo *embci = lvl_find_in_chain<VkExternalMemoryBufferCreateInfo>(create_info->pNext);
187 if (embci && (embci->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
188 bs_node->external_ahb = true;
189 }
190}
191
locke-lunargd556cc32019-09-17 01:21:23 -0600192void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700193 VkSamplerYcbcrConversion ycbcr_conversion,
194 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state) {
locke-lunargd556cc32019-09-17 01:21:23 -0600195 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
196 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
197 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700198 // VUID 01894 will catch if not found in map
199 auto it = ahb_ext_formats_map.find(ext_format_android->externalFormat);
200 if (it != ahb_ext_formats_map.end()) {
201 ycbcr_state->format_features = it->second;
202 }
locke-lunargd556cc32019-09-17 01:21:23 -0600203 }
204};
205
206void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
207 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
208};
209
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700210void ValidationStateTracker::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
211 VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties, VkResult result) {
212 if (VK_SUCCESS != result) return;
213 auto ahb_format_props = lvl_find_in_chain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
214 if (ahb_format_props) {
215 ahb_ext_formats_map.insert({ahb_format_props->externalFormat, ahb_format_props->formatFeatures});
216 }
217}
218
locke-lunargd556cc32019-09-17 01:21:23 -0600219#else
220
221void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
222
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700223void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {}
224
locke-lunargd556cc32019-09-17 01:21:23 -0600225void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700226 VkSamplerYcbcrConversion ycbcr_conversion,
227 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state){};
locke-lunargd556cc32019-09-17 01:21:23 -0600228
229void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
230
231#endif // VK_USE_PLATFORM_ANDROID_KHR
232
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -0600233std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPipelineLayout(PIPELINE_LAYOUT_STATE const *layout_data,
234 uint32_t set) {
235 std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> dsl = nullptr;
236 if (layout_data && (set < layout_data->set_layouts.size())) {
237 dsl = layout_data->set_layouts[set];
238 }
239 return dsl;
240}
241
Petr Kraus44f1c482020-04-25 20:09:25 +0200242void AddImageStateProps(IMAGE_STATE &image_state, const VkDevice device, const VkPhysicalDevice physical_device) {
243 // Add feature support according to Image Format Features (vkspec.html#resources-image-format-features)
244 // if format is AHB external format then the features are already set
245 if (image_state.has_ahb_format == false) {
246 const VkImageTiling image_tiling = image_state.createInfo.tiling;
247 const VkFormat image_format = image_state.createInfo.format;
248 if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
249 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {
250 VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, nullptr};
251 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state.image, &drm_format_properties);
252
253 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
254 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
255 nullptr};
256 format_properties_2.pNext = (void *)&drm_properties_list;
257 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Lionel Landwerlin09351a72020-06-22 18:15:59 +0300258 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
259 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
260 drm_properties_list.pDrmFormatModifierProperties = &drm_properties[0];
261 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Petr Kraus44f1c482020-04-25 20:09:25 +0200262
263 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300264 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier ==
265 drm_format_properties.drmFormatModifier) {
266 image_state.format_features =
Petr Kraus44f1c482020-04-25 20:09:25 +0200267 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300268 break;
Petr Kraus44f1c482020-04-25 20:09:25 +0200269 }
270 }
271 } else {
272 VkFormatProperties format_properties;
273 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_format, &format_properties);
274 image_state.format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
275 : format_properties.optimalTilingFeatures;
276 }
277 }
278}
279
locke-lunargd556cc32019-09-17 01:21:23 -0600280void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
281 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
282 if (VK_SUCCESS != result) return;
locke-lunarg296a3c92020-03-25 01:04:29 -0600283 auto is_node = std::make_shared<IMAGE_STATE>(device, *pImage, pCreateInfo);
sfricke-samsung71bc6572020-04-29 15:49:43 -0700284 is_node->disjoint = ((pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT) != 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600285 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
286 RecordCreateImageANDROID(pCreateInfo, is_node.get());
287 }
288 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
289 if (swapchain_info) {
290 is_node->create_from_swapchain = swapchain_info->swapchain;
291 }
292
locke-lunargd556cc32019-09-17 01:21:23 -0600293 // Record the memory requirements in case they won't be queried
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700294 // External AHB memory can't be queried until after memory is bound
Spencer Fricke224c9852020-04-06 07:47:29 -0700295 if (is_node->external_ahb == false) {
sfricke-samsung71bc6572020-04-29 15:49:43 -0700296 if (is_node->disjoint == false) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700297 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
298 } else {
299 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
300 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
301 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
302 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
303 mem_req_info2.pNext = &image_plane_req;
304 mem_req_info2.image = *pImage;
305
306 assert(plane_count != 0); // assumes each format has at least first plane
307 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
308 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
309 is_node->plane0_requirements = mem_reqs2.memoryRequirements;
310
311 if (plane_count >= 2) {
312 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
313 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
314 is_node->plane1_requirements = mem_reqs2.memoryRequirements;
315 }
316 if (plane_count >= 3) {
317 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
318 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
319 is_node->plane2_requirements = mem_reqs2.memoryRequirements;
320 }
321 }
locke-lunargd556cc32019-09-17 01:21:23 -0600322 }
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700323
Petr Kraus44f1c482020-04-25 20:09:25 +0200324 AddImageStateProps(*is_node, device, physical_device);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700325
sfricke-samsungedce77a2020-07-03 22:35:13 -0700326 is_node->unprotected = ((pCreateInfo->flags & VK_IMAGE_CREATE_PROTECTED_BIT) == 0);
327
locke-lunargd556cc32019-09-17 01:21:23 -0600328 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
329}
330
331void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
332 if (!image) return;
333 IMAGE_STATE *image_state = GetImageState(image);
334 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
335 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
336 // Clean up memory mapping, bindings and range references for image
337 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700338 RemoveImageMemoryRange(image, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600339 }
340 if (image_state->bind_swapchain) {
341 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
342 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -0600343 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -0600344 }
345 }
346 RemoveAliasingImage(image_state);
347 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500348 image_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600349 // Remove image from imageMap
350 imageMap.erase(image);
351}
352
353void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
354 VkImageLayout imageLayout, const VkClearColorValue *pColor,
355 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
356 auto cb_node = GetCBState(commandBuffer);
357 auto image_state = GetImageState(image);
358 if (cb_node && image_state) {
359 AddCommandBufferBindingImage(cb_node, image_state);
360 }
361}
362
363void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
364 VkImageLayout imageLayout,
365 const VkClearDepthStencilValue *pDepthStencil,
366 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
367 auto cb_node = GetCBState(commandBuffer);
368 auto image_state = GetImageState(image);
369 if (cb_node && image_state) {
370 AddCommandBufferBindingImage(cb_node, image_state);
371 }
372}
373
374void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
375 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
376 uint32_t regionCount, const VkImageCopy *pRegions) {
377 auto cb_node = GetCBState(commandBuffer);
378 auto src_image_state = GetImageState(srcImage);
379 auto dst_image_state = GetImageState(dstImage);
380
381 // Update bindings between images and cmd buffer
382 AddCommandBufferBindingImage(cb_node, src_image_state);
383 AddCommandBufferBindingImage(cb_node, dst_image_state);
384}
385
Jeff Leger178b1e52020-10-05 12:22:23 -0400386void ValidationStateTracker::PreCallRecordCmdCopyImage2KHR(VkCommandBuffer commandBuffer,
387 const VkCopyImageInfo2KHR *pCopyImageInfo) {
388 auto cb_node = GetCBState(commandBuffer);
389 auto src_image_state = GetImageState(pCopyImageInfo->srcImage);
390 auto dst_image_state = GetImageState(pCopyImageInfo->dstImage);
391
392 // Update bindings between images and cmd buffer
393 AddCommandBufferBindingImage(cb_node, src_image_state);
394 AddCommandBufferBindingImage(cb_node, dst_image_state);
395}
396
locke-lunargd556cc32019-09-17 01:21:23 -0600397void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
398 VkImageLayout srcImageLayout, VkImage dstImage,
399 VkImageLayout dstImageLayout, uint32_t regionCount,
400 const VkImageResolve *pRegions) {
401 auto cb_node = GetCBState(commandBuffer);
402 auto src_image_state = GetImageState(srcImage);
403 auto dst_image_state = GetImageState(dstImage);
404
405 // Update bindings between images and cmd buffer
406 AddCommandBufferBindingImage(cb_node, src_image_state);
407 AddCommandBufferBindingImage(cb_node, dst_image_state);
408}
409
Jeff Leger178b1e52020-10-05 12:22:23 -0400410void ValidationStateTracker::PreCallRecordCmdResolveImage2KHR(VkCommandBuffer commandBuffer,
411 const VkResolveImageInfo2KHR *pResolveImageInfo) {
412 auto cb_node = GetCBState(commandBuffer);
413 auto src_image_state = GetImageState(pResolveImageInfo->srcImage);
414 auto dst_image_state = GetImageState(pResolveImageInfo->dstImage);
415
416 // Update bindings between images and cmd buffer
417 AddCommandBufferBindingImage(cb_node, src_image_state);
418 AddCommandBufferBindingImage(cb_node, dst_image_state);
419}
420
locke-lunargd556cc32019-09-17 01:21:23 -0600421void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
422 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
423 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
424 auto cb_node = GetCBState(commandBuffer);
425 auto src_image_state = GetImageState(srcImage);
426 auto dst_image_state = GetImageState(dstImage);
427
428 // Update bindings between images and cmd buffer
429 AddCommandBufferBindingImage(cb_node, src_image_state);
430 AddCommandBufferBindingImage(cb_node, dst_image_state);
431}
432
Jeff Leger178b1e52020-10-05 12:22:23 -0400433void ValidationStateTracker::PreCallRecordCmdBlitImage2KHR(VkCommandBuffer commandBuffer,
434 const VkBlitImageInfo2KHR *pBlitImageInfo) {
435 auto cb_node = GetCBState(commandBuffer);
436 auto src_image_state = GetImageState(pBlitImageInfo->srcImage);
437 auto dst_image_state = GetImageState(pBlitImageInfo->dstImage);
438
439 // Update bindings between images and cmd buffer
440 AddCommandBufferBindingImage(cb_node, src_image_state);
441 AddCommandBufferBindingImage(cb_node, dst_image_state);
442}
443
locke-lunargd556cc32019-09-17 01:21:23 -0600444void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
445 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
446 VkResult result) {
447 if (result != VK_SUCCESS) return;
448 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500449 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600450
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700451 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
452 RecordCreateBufferANDROID(pCreateInfo, buffer_state.get());
453 }
locke-lunargd556cc32019-09-17 01:21:23 -0600454 // Get a set of requirements in the case the app does not
sfricke-samsungad90e722020-07-08 20:54:24 -0700455 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
locke-lunargd556cc32019-09-17 01:21:23 -0600456
sfricke-samsungedce77a2020-07-03 22:35:13 -0700457 buffer_state->unprotected = ((pCreateInfo->flags & VK_BUFFER_CREATE_PROTECTED_BIT) == 0);
458
locke-lunargd556cc32019-09-17 01:21:23 -0600459 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
460}
461
462void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
463 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
464 VkResult result) {
465 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500466 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
locke-lunarg25b6c352020-08-06 17:44:18 -0600467 auto buffer_view_state = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
468
469 VkFormatProperties format_properties;
470 DispatchGetPhysicalDeviceFormatProperties(physical_device, pCreateInfo->format, &format_properties);
471 buffer_view_state->format_features = format_properties.bufferFeatures;
472
473 bufferViewMap.insert(std::make_pair(*pView, std::move(buffer_view_state)));
locke-lunargd556cc32019-09-17 01:21:23 -0600474}
475
476void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
477 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
478 VkResult result) {
479 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500480 auto image_state = GetImageShared(pCreateInfo->image);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700481 auto image_view_state = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
482
483 // Add feature support according to Image View Format Features (vkspec.html#resources-image-view-format-features)
484 const VkImageTiling image_tiling = image_state->createInfo.tiling;
485 const VkFormat image_view_format = pCreateInfo->format;
486 if (image_state->has_ahb_format == true) {
487 // The ImageView uses same Image's format feature since they share same AHB
488 image_view_state->format_features = image_state->format_features;
489 } else if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
490 // Parameter validation should catch if this is used without VK_EXT_image_drm_format_modifier
491 assert(device_extensions.vk_ext_image_drm_format_modifier);
492 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
493 nullptr};
494 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state->image, &drm_format_properties);
495
496 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
497 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
498 nullptr};
499 format_properties_2.pNext = (void *)&drm_properties_list;
500 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
501
502 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300503 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier == drm_format_properties.drmFormatModifier) {
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700504 image_view_state->format_features |=
505 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300506 break;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700507 }
508 }
509 } else {
510 VkFormatProperties format_properties;
511 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_view_format, &format_properties);
512 image_view_state->format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
513 : format_properties.optimalTilingFeatures;
514 }
515
516 imageViewMap.insert(std::make_pair(*pView, std::move(image_view_state)));
locke-lunargd556cc32019-09-17 01:21:23 -0600517}
518
519void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
520 uint32_t regionCount, const VkBufferCopy *pRegions) {
521 auto cb_node = GetCBState(commandBuffer);
522 auto src_buffer_state = GetBufferState(srcBuffer);
523 auto dst_buffer_state = GetBufferState(dstBuffer);
524
525 // Update bindings between buffers and cmd buffer
526 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
527 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
528}
529
Jeff Leger178b1e52020-10-05 12:22:23 -0400530void ValidationStateTracker::PreCallRecordCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer,
531 const VkCopyBufferInfo2KHR *pCopyBufferInfos) {
532 auto cb_node = GetCBState(commandBuffer);
533 auto src_buffer_state = GetBufferState(pCopyBufferInfos->srcBuffer);
534 auto dst_buffer_state = GetBufferState(pCopyBufferInfos->dstBuffer);
535
536 // Update bindings between buffers and cmd buffer
537 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
538 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
539}
540
locke-lunargd556cc32019-09-17 01:21:23 -0600541void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
542 const VkAllocationCallbacks *pAllocator) {
543 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
544 if (!image_view_state) return;
545 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
546
547 // Any bound cmd buffers are now invalid
548 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500549 image_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600550 imageViewMap.erase(imageView);
551}
552
553void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
554 if (!buffer) return;
555 auto buffer_state = GetBufferState(buffer);
556 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
557
558 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
559 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700560 RemoveBufferMemoryRange(buffer, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600561 }
562 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500563 buffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600564 bufferMap.erase(buffer_state->buffer);
565}
566
567void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
568 const VkAllocationCallbacks *pAllocator) {
569 if (!bufferView) return;
570 auto buffer_view_state = GetBufferViewState(bufferView);
571 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
572
573 // Any bound cmd buffers are now invalid
574 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500575 buffer_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600576 bufferViewMap.erase(bufferView);
577}
578
579void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
580 VkDeviceSize size, uint32_t data) {
581 auto cb_node = GetCBState(commandBuffer);
582 auto buffer_state = GetBufferState(dstBuffer);
583 // Update bindings between buffer and cmd buffer
584 AddCommandBufferBindingBuffer(cb_node, buffer_state);
585}
586
587void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
588 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
589 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
590 auto cb_node = GetCBState(commandBuffer);
591 auto src_image_state = GetImageState(srcImage);
592 auto dst_buffer_state = GetBufferState(dstBuffer);
593
594 // Update bindings between buffer/image and cmd buffer
595 AddCommandBufferBindingImage(cb_node, src_image_state);
596 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
597}
598
Jeff Leger178b1e52020-10-05 12:22:23 -0400599void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
600 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) {
601 auto cb_node = GetCBState(commandBuffer);
602 auto src_image_state = GetImageState(pCopyImageToBufferInfo->srcImage);
603 auto dst_buffer_state = GetBufferState(pCopyImageToBufferInfo->dstBuffer);
604
605 // Update bindings between buffer/image and cmd buffer
606 AddCommandBufferBindingImage(cb_node, src_image_state);
607 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
608}
609
locke-lunargd556cc32019-09-17 01:21:23 -0600610void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
611 VkImageLayout dstImageLayout, uint32_t regionCount,
612 const VkBufferImageCopy *pRegions) {
613 auto cb_node = GetCBState(commandBuffer);
614 auto src_buffer_state = GetBufferState(srcBuffer);
615 auto dst_image_state = GetImageState(dstImage);
616
617 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
618 AddCommandBufferBindingImage(cb_node, dst_image_state);
619}
620
Jeff Leger178b1e52020-10-05 12:22:23 -0400621void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
622 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) {
623 auto cb_node = GetCBState(commandBuffer);
624 auto src_buffer_state = GetBufferState(pCopyBufferToImageInfo->srcBuffer);
625 auto dst_image_state = GetImageState(pCopyBufferToImageInfo->dstImage);
626
627 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
628 AddCommandBufferBindingImage(cb_node, dst_image_state);
629}
630
locke-lunargd556cc32019-09-17 01:21:23 -0600631// Get the image viewstate for a given framebuffer attachment
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300632IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(CMD_BUFFER_STATE *cb, FRAMEBUFFER_STATE *framebuffer,
633 uint32_t index) {
634 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) {
635 assert(index < cb->imagelessFramebufferAttachments.size());
636 return cb->imagelessFramebufferAttachments[index];
637 }
locke-lunargd556cc32019-09-17 01:21:23 -0600638 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
639 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
640 return GetImageViewState(image_view);
641}
642
643// Get the image viewstate for a given framebuffer attachment
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300644const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const CMD_BUFFER_STATE *cb,
645 const FRAMEBUFFER_STATE *framebuffer,
locke-lunargd556cc32019-09-17 01:21:23 -0600646 uint32_t index) const {
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300647 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) {
648 assert(index < cb->imagelessFramebufferAttachments.size());
649 return cb->imagelessFramebufferAttachments[index];
650 }
locke-lunargd556cc32019-09-17 01:21:23 -0600651 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
652 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
653 return GetImageViewState(image_view);
654}
655
656void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
locke-lunargd556cc32019-09-17 01:21:23 -0600657 std::unordered_set<VkImage> *bound_images = nullptr;
658
locke-lunargb3584732019-10-28 20:18:36 -0600659 if (image_state->bind_swapchain) {
660 auto swapchain_state = GetSwapchainState(image_state->bind_swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -0600661 if (swapchain_state) {
locke-lunargb3584732019-10-28 20:18:36 -0600662 bound_images = &swapchain_state->images[image_state->bind_swapchain_imageIndex].bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600663 }
664 } else {
locke-lunargcf04d582019-11-26 00:31:50 -0700665 if (image_state->binding.mem_state) {
666 bound_images = &image_state->binding.mem_state->bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600667 }
668 }
669
670 if (bound_images) {
671 for (const auto &handle : *bound_images) {
672 if (handle != image_state->image) {
673 auto is = GetImageState(handle);
674 if (is && is->IsCompatibleAliasing(image_state)) {
675 auto inserted = is->aliasing_images.emplace(image_state->image);
676 if (inserted.second) {
677 image_state->aliasing_images.emplace(handle);
678 }
679 }
680 }
681 }
682 }
683}
684
685void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
686 for (const auto &image : image_state->aliasing_images) {
687 auto is = GetImageState(image);
688 if (is) {
689 is->aliasing_images.erase(image_state->image);
690 }
691 }
692 image_state->aliasing_images.clear();
693}
694
695void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
696 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
697 // reference. It doesn't need two ways clear.
698 for (const auto &handle : bound_images) {
699 auto is = GetImageState(handle);
700 if (is) {
701 is->aliasing_images.clear();
702 }
703 }
704}
705
Jeff Bolz310775c2019-10-09 00:46:33 -0500706const EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) const {
707 auto it = eventMap.find(event);
708 if (it == eventMap.end()) {
709 return nullptr;
710 }
711 return &it->second;
712}
713
locke-lunargd556cc32019-09-17 01:21:23 -0600714EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
715 auto it = eventMap.find(event);
716 if (it == eventMap.end()) {
717 return nullptr;
718 }
719 return &it->second;
720}
721
722const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
723 auto it = queueMap.find(queue);
724 if (it == queueMap.cend()) {
725 return nullptr;
726 }
727 return &it->second;
728}
729
730QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
731 auto it = queueMap.find(queue);
732 if (it == queueMap.end()) {
733 return nullptr;
734 }
735 return &it->second;
736}
737
738const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
739 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
740 auto it = phys_dev_map->find(phys);
741 if (it == phys_dev_map->end()) {
742 return nullptr;
743 }
744 return &it->second;
745}
746
747PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
748 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
749 auto it = phys_dev_map->find(phys);
750 if (it == phys_dev_map->end()) {
751 return nullptr;
752 }
753 return &it->second;
754}
755
756PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
757const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
758
759// Return ptr to memory binding for given handle of specified type
760template <typename State, typename Result>
761static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
762 switch (typed_handle.type) {
763 case kVulkanObjectTypeImage:
764 return state->GetImageState(typed_handle.Cast<VkImage>());
765 case kVulkanObjectTypeBuffer:
766 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
767 case kVulkanObjectTypeAccelerationStructureNV:
768 return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
769 default:
770 break;
771 }
772 return nullptr;
773}
774
775const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
776 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
777}
778
779BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
780 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
781}
782
783void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
784 assert(object != NULL);
785
John Zulauf79952712020-04-07 11:25:54 -0600786 auto fake_address = fake_memory.Alloc(pAllocateInfo->allocationSize);
787 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo, fake_address);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500788 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600789
790 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
791 if (dedicated) {
792 mem_info->is_dedicated = true;
793 mem_info->dedicated_buffer = dedicated->buffer;
794 mem_info->dedicated_image = dedicated->image;
795 }
796 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
797 if (export_info) {
798 mem_info->is_export = true;
799 mem_info->export_handle_type_flags = export_info->handleTypes;
800 }
sfricke-samsung23068272020-06-21 14:49:51 -0700801
Mark Lobodzinski7bda6ed2020-07-24 09:57:38 -0600802 auto alloc_flags = lvl_find_in_chain<VkMemoryAllocateFlagsInfo>(pAllocateInfo->pNext);
803 if (alloc_flags) {
804 auto dev_mask = alloc_flags->deviceMask;
805 if ((dev_mask != 0) && (dev_mask & (dev_mask - 1))) {
806 mem_info->multi_instance = true;
807 }
808 }
809 auto heap_index = phys_dev_mem_props.memoryTypes[mem_info->alloc_info.memoryTypeIndex].heapIndex;
Tony-LunarGb399fb02020-08-06 14:20:59 -0600810 mem_info->multi_instance |= (((phys_dev_mem_props.memoryHeaps[heap_index].flags & VK_MEMORY_HEAP_MULTI_INSTANCE_BIT) != 0) &&
811 physical_device_count > 1);
Mark Lobodzinski7bda6ed2020-07-24 09:57:38 -0600812
sfricke-samsung23068272020-06-21 14:49:51 -0700813 // Assumes validation already for only a single import operation in the pNext
814#ifdef VK_USE_PLATFORM_WIN32_KHR
815 auto win32_import = lvl_find_in_chain<VkImportMemoryWin32HandleInfoKHR>(pAllocateInfo->pNext);
816 if (win32_import) {
817 mem_info->is_import = true;
818 mem_info->import_handle_type_flags = win32_import->handleType;
819 }
820#endif
821 auto fd_import = lvl_find_in_chain<VkImportMemoryFdInfoKHR>(pAllocateInfo->pNext);
822 if (fd_import) {
823 mem_info->is_import = true;
824 mem_info->import_handle_type_flags = fd_import->handleType;
825 }
826 auto host_pointer_import = lvl_find_in_chain<VkImportMemoryHostPointerInfoEXT>(pAllocateInfo->pNext);
827 if (host_pointer_import) {
828 mem_info->is_import = true;
829 mem_info->import_handle_type_flags = host_pointer_import->handleType;
830 }
831#ifdef VK_USE_PLATFORM_ANDROID_KHR
832 // AHB Import doesn't have handle in the pNext struct
833 // It should be assumed that all imported AHB can only have the same, single handleType
834 auto ahb_import = lvl_find_in_chain<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
835 if ((ahb_import) && (ahb_import->buffer != nullptr)) {
836 mem_info->is_import_ahb = true;
837 mem_info->is_import = true;
838 mem_info->import_handle_type_flags = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
839 }
840#endif
sfricke-samsungedce77a2020-07-03 22:35:13 -0700841
842 const VkMemoryType memory_type = phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex];
843 mem_info->unprotected = ((memory_type.propertyFlags & VK_MEMORY_PROPERTY_PROTECTED_BIT) == 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600844}
845
846// Create binding link between given sampler and command buffer node
847void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600848 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600849 return;
850 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500851 AddCommandBufferBinding(sampler_state->cb_bindings,
852 VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600853}
854
855// Create binding link between given image node and command buffer node
856void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600857 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600858 return;
859 }
860 // Skip validation if this image was created through WSI
861 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
862 // First update cb binding for image
Jeff Bolzadbfa852019-10-04 13:53:30 -0500863 if (AddCommandBufferBinding(image_state->cb_bindings,
864 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600865 // Now update CB binding in MemObj mini CB list
866 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700867 // Now update CBInfo's Mem reference list
868 AddCommandBufferBinding(mem_binding->cb_bindings,
869 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600870 }
871 }
872 }
873}
874
875// Create binding link between given image view node and its image with command buffer node
876void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600877 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600878 return;
879 }
880 // First add bindings for imageView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500881 if (AddCommandBufferBinding(view_state->cb_bindings,
882 VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600883 // Only need to continue if this is a new item
Jeff Bolzadbfa852019-10-04 13:53:30 -0500884 auto image_state = view_state->image_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600885 // Add bindings for image within imageView
886 if (image_state) {
887 AddCommandBufferBindingImage(cb_node, image_state);
888 }
889 }
890}
891
892// Create binding link between given buffer node and command buffer node
893void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600894 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600895 return;
896 }
897 // First update cb binding for buffer
Jeff Bolzadbfa852019-10-04 13:53:30 -0500898 if (AddCommandBufferBinding(buffer_state->cb_bindings,
899 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600900 // Now update CB binding in MemObj mini CB list
901 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700902 // Now update CBInfo's Mem reference list
903 AddCommandBufferBinding(mem_binding->cb_bindings,
904 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600905 }
906 }
907}
908
909// Create binding link between given buffer view node and its buffer with command buffer node
910void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600911 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600912 return;
913 }
914 // First add bindings for bufferView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500915 if (AddCommandBufferBinding(view_state->cb_bindings,
916 VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
917 auto buffer_state = view_state->buffer_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600918 // Add bindings for buffer within bufferView
919 if (buffer_state) {
920 AddCommandBufferBindingBuffer(cb_node, buffer_state);
921 }
922 }
923}
924
925// Create binding link between given acceleration structure and command buffer node
926void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
927 ACCELERATION_STRUCTURE_STATE *as_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600928 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600929 return;
930 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500931 if (AddCommandBufferBinding(
932 as_state->cb_bindings,
933 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600934 // Now update CB binding in MemObj mini CB list
935 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700936 // Now update CBInfo's Mem reference list
937 AddCommandBufferBinding(mem_binding->cb_bindings,
938 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600939 }
940 }
941}
942
locke-lunargd556cc32019-09-17 01:21:23 -0600943// Clear a single object binding from given memory object
locke-lunarg5f59e782019-12-19 10:32:23 -0700944void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
locke-lunargd556cc32019-09-17 01:21:23 -0600945 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
946 if (mem_info) {
947 mem_info->obj_bindings.erase(typed_handle);
948 }
949}
950
951// ClearMemoryObjectBindings clears the binding of objects to memory
952// For the given object it pulls the memory bindings and makes sure that the bindings
953// no longer refer to the object being cleared. This occurs when objects are destroyed.
954void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
955 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
956 if (mem_binding) {
957 if (!mem_binding->sparse) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700958 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600959 } else { // Sparse, clear all bindings
960 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700961 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600962 }
963 }
964 }
965}
966
967// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
968// Corresponding valid usage checks are in ValidateSetMemBinding().
969void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
970 const VulkanTypedHandle &typed_handle) {
971 assert(mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600972
973 if (mem != VK_NULL_HANDLE) {
locke-lunargcf04d582019-11-26 00:31:50 -0700974 mem_binding->binding.mem_state = GetShared<DEVICE_MEMORY_STATE>(mem);
975 if (mem_binding->binding.mem_state) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700976 mem_binding->binding.offset = memory_offset;
977 mem_binding->binding.size = mem_binding->requirements.size;
locke-lunargcf04d582019-11-26 00:31:50 -0700978 mem_binding->binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -0600979 // For image objects, make sure default memory state is correctly set
980 // TODO : What's the best/correct way to handle this?
981 if (kVulkanObjectTypeImage == typed_handle.type) {
982 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
983 if (image_state) {
984 VkImageCreateInfo ici = image_state->createInfo;
985 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
986 // TODO:: More memory state transition stuff.
987 }
988 }
989 }
locke-lunargcf04d582019-11-26 00:31:50 -0700990 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
locke-lunargd556cc32019-09-17 01:21:23 -0600991 }
992 }
993}
994
995// For NULL mem case, clear any previous binding Else...
996// Make sure given object is in its object map
997// IF a previous binding existed, update binding
998// Add reference from objectInfo to memoryInfo
999// Add reference off of object's binding info
1000// Return VK_TRUE if addition is successful, VK_FALSE otherwise
locke-lunargcf04d582019-11-26 00:31:50 -07001001bool ValidationStateTracker::SetSparseMemBinding(const VkDeviceMemory mem, const VkDeviceSize mem_offset,
1002 const VkDeviceSize mem_size, const VulkanTypedHandle &typed_handle) {
locke-lunargd556cc32019-09-17 01:21:23 -06001003 bool skip = VK_FALSE;
1004 // Handle NULL case separately, just clear previous binding & decrement reference
locke-lunargcf04d582019-11-26 00:31:50 -07001005 if (mem == VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -06001006 // TODO : This should cause the range of the resource to be unbound according to spec
1007 } else {
1008 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
1009 assert(mem_binding);
1010 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
1011 assert(mem_binding->sparse);
locke-lunargcf04d582019-11-26 00:31:50 -07001012 MEM_BINDING binding = {GetShared<DEVICE_MEMORY_STATE>(mem), mem_offset, mem_size};
1013 if (binding.mem_state) {
1014 binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -06001015 // Need to set mem binding for this object
1016 mem_binding->sparse_bindings.insert(binding);
1017 mem_binding->UpdateBoundMemorySet();
1018 }
1019 }
1020 }
1021 return skip;
1022}
1023
locke-lunarg540b2252020-08-03 13:23:36 -06001024void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, const VkPipelineBindPoint bind_point,
1025 const char *function) {
locke-lunargd556cc32019-09-17 01:21:23 -06001026 auto &state = cb_state->lastBound[bind_point];
1027 PIPELINE_STATE *pPipe = state.pipeline_state;
1028 if (VK_NULL_HANDLE != state.pipeline_layout) {
1029 for (const auto &set_binding_pair : pPipe->active_slots) {
1030 uint32_t setIndex = set_binding_pair.first;
1031 // Pull the set node
1032 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
locke-lunargd556cc32019-09-17 01:21:23 -06001033
Tony-LunarG77822802020-05-28 16:35:46 -06001034 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
locke-lunargd556cc32019-09-17 01:21:23 -06001035
Tony-LunarG77822802020-05-28 16:35:46 -06001036 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
1037 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
1038 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
1039 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
1040
1041 if (reduced_map.IsManyDescriptors()) {
1042 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
1043 descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
1044 }
1045
1046 // We can skip updating the state if "nothing" has changed since the last validation.
1047 // See CoreChecks::ValidateCmdBufDrawState for more details.
1048 bool descriptor_set_changed =
1049 !reduced_map.IsManyDescriptors() ||
1050 // Update if descriptor set (or contents) has changed
1051 state.per_set[setIndex].validated_set != descriptor_set ||
1052 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
1053 (!disabled[image_layout_validation] &&
1054 state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
1055 bool need_update = descriptor_set_changed ||
1056 // Update if previous bindingReqMap doesn't include new bindingReqMap
1057 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
1058 state.per_set[setIndex].validated_set_binding_req_map.end(), binding_req_map.begin(),
1059 binding_req_map.end());
1060
1061 if (need_update) {
1062 // Bind this set and its active descriptor resources to the command buffer
1063 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
1064 // Only record the bindings that haven't already been recorded
1065 BindingReqMap delta_reqs;
1066 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
1067 state.per_set[setIndex].validated_set_binding_req_map.begin(),
1068 state.per_set[setIndex].validated_set_binding_req_map.end(),
1069 std::inserter(delta_reqs, delta_reqs.begin()));
locke-lunarg540b2252020-08-03 13:23:36 -06001070 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pPipe, delta_reqs, function);
Tony-LunarG77822802020-05-28 16:35:46 -06001071 } else {
locke-lunarg540b2252020-08-03 13:23:36 -06001072 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pPipe, binding_req_map, function);
locke-lunargd556cc32019-09-17 01:21:23 -06001073 }
1074
Tony-LunarG77822802020-05-28 16:35:46 -06001075 state.per_set[setIndex].validated_set = descriptor_set;
1076 state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
1077 state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
1078 if (reduced_map.IsManyDescriptors()) {
1079 // Check whether old == new before assigning, the equality check is much cheaper than
1080 // freeing and reallocating the map.
1081 if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
1082 state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
Jeff Bolz56308942019-10-06 22:05:23 -05001083 }
Tony-LunarG77822802020-05-28 16:35:46 -06001084 } else {
1085 state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
locke-lunargd556cc32019-09-17 01:21:23 -06001086 }
1087 }
1088 }
1089 }
1090 if (!pPipe->vertex_binding_descriptions_.empty()) {
1091 cb_state->vertex_buffer_used = true;
1092 }
1093}
1094
1095// Remove set from setMap and delete the set
1096void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001097 descriptor_set->destroyed = true;
Jeff Bolzadbfa852019-10-04 13:53:30 -05001098 const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
Jeff Bolz41a1ced2019-10-11 11:40:49 -05001099 // Any bound cmd buffers are now invalid
Jeff Bolzadbfa852019-10-04 13:53:30 -05001100 InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
Jeff Bolz41a1ced2019-10-11 11:40:49 -05001101
locke-lunargd556cc32019-09-17 01:21:23 -06001102 setMap.erase(descriptor_set->GetSet());
1103}
1104
1105// Free all DS Pools including their Sets & related sub-structs
1106// NOTE : Calls to this function should be wrapped in mutex
1107void ValidationStateTracker::DeleteDescriptorSetPools() {
1108 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
1109 // Remove this pools' sets from setMap and delete them
1110 for (auto ds : ii->second->sets) {
1111 FreeDescriptorSet(ds);
1112 }
1113 ii->second->sets.clear();
1114 ii = descriptorPoolMap.erase(ii);
1115 }
1116}
1117
1118// For given object struct return a ptr of BASE_NODE type for its wrapping struct
1119BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001120 if (object_struct.node) {
1121#ifdef _DEBUG
1122 // assert that lookup would find the same object
1123 VulkanTypedHandle other = object_struct;
1124 other.node = nullptr;
1125 assert(object_struct.node == GetStateStructPtrFromObject(other));
1126#endif
1127 return object_struct.node;
1128 }
locke-lunargd556cc32019-09-17 01:21:23 -06001129 BASE_NODE *base_ptr = nullptr;
1130 switch (object_struct.type) {
1131 case kVulkanObjectTypeDescriptorSet: {
1132 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
1133 break;
1134 }
1135 case kVulkanObjectTypeSampler: {
1136 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
1137 break;
1138 }
1139 case kVulkanObjectTypeQueryPool: {
1140 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
1141 break;
1142 }
1143 case kVulkanObjectTypePipeline: {
1144 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
1145 break;
1146 }
1147 case kVulkanObjectTypeBuffer: {
1148 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
1149 break;
1150 }
1151 case kVulkanObjectTypeBufferView: {
1152 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
1153 break;
1154 }
1155 case kVulkanObjectTypeImage: {
1156 base_ptr = GetImageState(object_struct.Cast<VkImage>());
1157 break;
1158 }
1159 case kVulkanObjectTypeImageView: {
1160 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
1161 break;
1162 }
1163 case kVulkanObjectTypeEvent: {
1164 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
1165 break;
1166 }
1167 case kVulkanObjectTypeDescriptorPool: {
1168 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
1169 break;
1170 }
1171 case kVulkanObjectTypeCommandPool: {
1172 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
1173 break;
1174 }
1175 case kVulkanObjectTypeFramebuffer: {
1176 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
1177 break;
1178 }
1179 case kVulkanObjectTypeRenderPass: {
1180 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
1181 break;
1182 }
1183 case kVulkanObjectTypeDeviceMemory: {
1184 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
1185 break;
1186 }
1187 case kVulkanObjectTypeAccelerationStructureNV: {
1188 base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
1189 break;
1190 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001191 case kVulkanObjectTypeUnknown:
1192 // This can happen if an element of the object_bindings vector has been
1193 // zeroed out, after an object is destroyed.
1194 break;
locke-lunargd556cc32019-09-17 01:21:23 -06001195 default:
1196 // TODO : Any other objects to be handled here?
1197 assert(0);
1198 break;
1199 }
1200 return base_ptr;
1201}
1202
sfricke-samsungbf1a2ed2020-06-14 23:31:00 -07001203// Gets union of all features defined by Potential Format Features
1204// except, does not handle the external format case for AHB as that only can be used for sampled images
sfricke-samsungbe3584f2020-04-22 14:58:06 -07001205VkFormatFeatureFlags ValidationStateTracker::GetPotentialFormatFeatures(VkFormat format) const {
1206 VkFormatFeatureFlags format_features = 0;
1207
1208 if (format != VK_FORMAT_UNDEFINED) {
1209 VkFormatProperties format_properties;
1210 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
1211 format_features |= format_properties.linearTilingFeatures;
1212 format_features |= format_properties.optimalTilingFeatures;
1213 if (device_extensions.vk_ext_image_drm_format_modifier) {
1214 // VK_KHR_get_physical_device_properties2 is required in this case
1215 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2};
1216 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
1217 nullptr};
1218 format_properties_2.pNext = (void *)&drm_properties_list;
1219 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
1220 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
1221 format_features |= drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
1222 }
1223 }
1224 }
1225
1226 return format_features;
1227}
1228
locke-lunargd556cc32019-09-17 01:21:23 -06001229// Tie the VulkanTypedHandle to the cmd buffer which includes:
1230// Add object_binding to cmd buffer
1231// Add cb_binding to object
Jeff Bolzadbfa852019-10-04 13:53:30 -05001232bool ValidationStateTracker::AddCommandBufferBinding(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_bindings,
locke-lunargd556cc32019-09-17 01:21:23 -06001233 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001234 if (disabled[command_buffer_state]) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001235 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001236 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001237 // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
1238 // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
1239 auto inserted = cb_bindings.insert({cb_node, -1});
1240 if (inserted.second) {
1241 cb_node->object_bindings.push_back(obj);
1242 inserted.first->second = (int)cb_node->object_bindings.size() - 1;
1243 return true;
1244 }
1245 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001246}
1247
1248// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
1249void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
1250 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
1251 if (base_obj) base_obj->cb_bindings.erase(cb_node);
1252}
1253
1254// Reset the command buffer state
1255// Maintain the createInfo and set state to CB_NEW, but clear all other state
1256void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
1257 CMD_BUFFER_STATE *pCB = GetCBState(cb);
1258 if (pCB) {
1259 pCB->in_use.store(0);
1260 // Reset CB state (note that createInfo is not cleared)
1261 pCB->commandBuffer = cb;
1262 memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
1263 memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
1264 pCB->hasDrawCmd = false;
1265 pCB->hasTraceRaysCmd = false;
1266 pCB->hasBuildAccelerationStructureCmd = false;
1267 pCB->hasDispatchCmd = false;
1268 pCB->state = CB_NEW;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001269 pCB->commandCount = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06001270 pCB->submitCount = 0;
1271 pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
1272 pCB->status = 0;
1273 pCB->static_status = 0;
1274 pCB->viewportMask = 0;
Piers Daniell39842ee2020-07-10 16:42:33 -06001275 pCB->viewportWithCountMask = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06001276 pCB->scissorMask = 0;
Piers Daniell39842ee2020-07-10 16:42:33 -06001277 pCB->scissorWithCountMask = 0;
1278 pCB->primitiveTopology = VK_PRIMITIVE_TOPOLOGY_MAX_ENUM;
locke-lunargd556cc32019-09-17 01:21:23 -06001279
1280 for (auto &item : pCB->lastBound) {
1281 item.second.reset();
1282 }
1283
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07001284 pCB->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo();
locke-lunargd556cc32019-09-17 01:21:23 -06001285 pCB->activeRenderPass = nullptr;
1286 pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
1287 pCB->activeSubpass = 0;
1288 pCB->broken_bindings.clear();
1289 pCB->waitedEvents.clear();
1290 pCB->events.clear();
1291 pCB->writeEventsBeforeWait.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001292 pCB->activeQueries.clear();
1293 pCB->startedQueries.clear();
1294 pCB->image_layout_map.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001295 pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
1296 pCB->vertex_buffer_used = false;
1297 pCB->primaryCommandBuffer = VK_NULL_HANDLE;
1298 // If secondary, invalidate any primary command buffer that may call us.
1299 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001300 InvalidateLinkedCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -06001301 }
1302
1303 // Remove reverse command buffer links.
1304 for (auto pSubCB : pCB->linkedCommandBuffers) {
1305 pSubCB->linkedCommandBuffers.erase(pCB);
1306 }
1307 pCB->linkedCommandBuffers.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001308 pCB->queue_submit_functions.clear();
1309 pCB->cmd_execute_commands_functions.clear();
1310 pCB->eventUpdates.clear();
1311 pCB->queryUpdates.clear();
1312
1313 // Remove object bindings
1314 for (const auto &obj : pCB->object_bindings) {
1315 RemoveCommandBufferBinding(obj, pCB);
1316 }
1317 pCB->object_bindings.clear();
1318 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
1319 for (auto framebuffer : pCB->framebuffers) {
locke-lunargaecf2152020-05-12 17:15:41 -06001320 framebuffer->cb_bindings.erase(pCB);
locke-lunargd556cc32019-09-17 01:21:23 -06001321 }
1322 pCB->framebuffers.clear();
1323 pCB->activeFramebuffer = VK_NULL_HANDLE;
1324 memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
1325
1326 pCB->qfo_transfer_image_barriers.Reset();
1327 pCB->qfo_transfer_buffer_barriers.Reset();
1328
1329 // Clean up the label data
1330 ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
1331 pCB->debug_label.Reset();
locke-gb3ce08f2019-09-30 12:30:56 -06001332 pCB->validate_descriptorsets_in_queuesubmit.clear();
Attilio Provenzano02859b22020-02-27 14:17:28 +00001333
1334 // Best practices info
1335 pCB->small_indexed_draw_call_count = 0;
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06001336
1337 pCB->transform_feedback_active = false;
locke-lunargd556cc32019-09-17 01:21:23 -06001338 }
1339 if (command_buffer_reset_callback) {
1340 (*command_buffer_reset_callback)(cb);
1341 }
1342}
1343
1344void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
1345 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
1346 VkResult result) {
1347 if (VK_SUCCESS != result) return;
1348
1349 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
1350 if (nullptr == enabled_features_found) {
1351 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
1352 if (features2) {
1353 enabled_features_found = &(features2->features);
1354 }
1355 }
1356
1357 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
1358 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
1359 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
1360
1361 if (nullptr == enabled_features_found) {
1362 state_tracker->enabled_features.core = {};
1363 } else {
1364 state_tracker->enabled_features.core = *enabled_features_found;
1365 }
1366
1367 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
1368 // previously set them through an explicit API call.
1369 uint32_t count;
1370 auto pd_state = GetPhysicalDeviceState(gpu);
1371 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
1372 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
1373 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
1374 // Save local link to this device's physical device state
1375 state_tracker->physical_device_state = pd_state;
1376
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001377 const auto *vulkan_12_features = lvl_find_in_chain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
1378 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001379 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001380 } else {
sfricke-samsung27c70722020-05-02 08:42:39 -07001381 // Set Extension Feature Aliases to false as there is no struct to check
1382 state_tracker->enabled_features.core12.drawIndirectCount = VK_FALSE;
1383 state_tracker->enabled_features.core12.samplerMirrorClampToEdge = VK_FALSE;
1384 state_tracker->enabled_features.core12.descriptorIndexing = VK_FALSE;
1385 state_tracker->enabled_features.core12.samplerFilterMinmax = VK_FALSE;
1386 state_tracker->enabled_features.core12.shaderOutputLayer = VK_FALSE;
1387 state_tracker->enabled_features.core12.shaderOutputViewportIndex = VK_FALSE;
1388
1389 // These structs are only allowed in pNext chain if there is no VkPhysicalDeviceVulkan12Features
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001390
1391 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
1392 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001393 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
1394 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
1395 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
1396 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001397 }
1398
1399 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
1400 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001401 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
1402 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001403 }
1404
1405 const auto *descriptor_indexing_features =
1406 lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
1407 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001408 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
1409 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
1410 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
1411 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
1412 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
1413 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
1414 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
1415 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
1416 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
1417 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
1418 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
1419 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
1420 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
1421 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
1422 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
1423 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
1424 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
1425 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
1426 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
1427 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
1428 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
1429 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
1430 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
1431 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
1432 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
1433 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
1434 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
1435 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
1436 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
1437 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
1438 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
1439 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
1440 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
1441 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
1442 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
1443 descriptor_indexing_features->descriptorBindingPartiallyBound;
1444 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
1445 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
1446 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001447 }
1448
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001449 const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001450 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001451 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001452 }
1453
1454 const auto *imageless_framebuffer_features =
1455 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
1456 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001457 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001458 }
1459
1460 const auto *uniform_buffer_standard_layout_features =
1461 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
1462 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001463 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
1464 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001465 }
1466
1467 const auto *subgroup_extended_types_features =
1468 lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
1469 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001470 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
1471 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001472 }
1473
1474 const auto *separate_depth_stencil_layouts_features =
1475 lvl_find_in_chain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
1476 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001477 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
1478 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001479 }
1480
1481 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
1482 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001483 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001484 }
1485
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001486 const auto *timeline_semaphore_features = lvl_find_in_chain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001487 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001488 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001489 }
1490
1491 const auto *buffer_device_address = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
1492 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001493 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
1494 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1495 buffer_device_address->bufferDeviceAddressCaptureReplay;
1496 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1497 buffer_device_address->bufferDeviceAddressMultiDevice;
1498 }
1499 }
1500
1501 const auto *vulkan_11_features = lvl_find_in_chain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
1502 if (vulkan_11_features) {
1503 state_tracker->enabled_features.core11 = *vulkan_11_features;
1504 } else {
1505 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
1506
1507 const auto *sixteen_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
1508 if (sixteen_bit_storage_features) {
1509 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
1510 sixteen_bit_storage_features->storageBuffer16BitAccess;
1511 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
1512 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
1513 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
1514 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
1515 }
1516
1517 const auto *multiview_features = lvl_find_in_chain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
1518 if (multiview_features) {
1519 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1520 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1521 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1522 }
1523
1524 const auto *variable_pointers_features = lvl_find_in_chain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
1525 if (variable_pointers_features) {
1526 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1527 variable_pointers_features->variablePointersStorageBuffer;
1528 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1529 }
1530
1531 const auto *protected_memory_features = lvl_find_in_chain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
1532 if (protected_memory_features) {
1533 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1534 }
1535
1536 const auto *ycbcr_conversion_features =
1537 lvl_find_in_chain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
1538 if (ycbcr_conversion_features) {
1539 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1540 }
1541
1542 const auto *shader_draw_parameters_features =
1543 lvl_find_in_chain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
1544 if (shader_draw_parameters_features) {
1545 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001546 }
1547 }
1548
locke-lunargd556cc32019-09-17 01:21:23 -06001549 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
Tony-LunarGca4891a2020-08-10 15:46:46 -06001550 if (device_group_ci) {
1551 state_tracker->physical_device_count = device_group_ci->physicalDeviceCount;
1552 state_tracker->device_group_create_info = *device_group_ci;
1553 } else {
1554 state_tracker->physical_device_count = 1;
1555 }
locke-lunargd556cc32019-09-17 01:21:23 -06001556
locke-lunargd556cc32019-09-17 01:21:23 -06001557 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
1558 if (exclusive_scissor_features) {
1559 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1560 }
1561
1562 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1563 if (shading_rate_image_features) {
1564 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1565 }
1566
1567 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1568 if (mesh_shader_features) {
1569 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1570 }
1571
1572 const auto *inline_uniform_block_features =
1573 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1574 if (inline_uniform_block_features) {
1575 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1576 }
1577
1578 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1579 if (transform_feedback_features) {
1580 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1581 }
1582
locke-lunargd556cc32019-09-17 01:21:23 -06001583 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1584 if (vtx_attrib_div_features) {
1585 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1586 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001587
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001588 const auto *buffer_device_address_ext = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
1589 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001590 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001591 }
1592
1593 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1594 if (cooperative_matrix_features) {
1595 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1596 }
1597
locke-lunargd556cc32019-09-17 01:21:23 -06001598 const auto *compute_shader_derivatives_features =
1599 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1600 if (compute_shader_derivatives_features) {
1601 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1602 }
1603
1604 const auto *fragment_shader_barycentric_features =
1605 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1606 if (fragment_shader_barycentric_features) {
1607 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1608 }
1609
1610 const auto *shader_image_footprint_features =
1611 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1612 if (shader_image_footprint_features) {
1613 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1614 }
1615
1616 const auto *fragment_shader_interlock_features =
1617 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1618 if (fragment_shader_interlock_features) {
1619 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1620 }
1621
1622 const auto *demote_to_helper_invocation_features =
1623 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1624 if (demote_to_helper_invocation_features) {
1625 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1626 }
1627
1628 const auto *texel_buffer_alignment_features =
1629 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1630 if (texel_buffer_alignment_features) {
1631 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1632 }
1633
locke-lunargd556cc32019-09-17 01:21:23 -06001634 const auto *pipeline_exe_props_features =
1635 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1636 if (pipeline_exe_props_features) {
1637 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1638 }
1639
Jeff Bolz82f854d2019-09-17 14:56:47 -05001640 const auto *dedicated_allocation_image_aliasing_features =
1641 lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
1642 if (dedicated_allocation_image_aliasing_features) {
1643 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1644 *dedicated_allocation_image_aliasing_features;
1645 }
1646
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001647 const auto *performance_query_features = lvl_find_in_chain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
1648 if (performance_query_features) {
1649 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1650 }
1651
Tobias Hector782bcde2019-11-28 16:19:42 +00001652 const auto *device_coherent_memory_features = lvl_find_in_chain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
1653 if (device_coherent_memory_features) {
1654 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1655 }
1656
sfricke-samsungcead0802020-01-30 22:20:10 -08001657 const auto *ycbcr_image_array_features = lvl_find_in_chain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
1658 if (ycbcr_image_array_features) {
1659 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1660 }
1661
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001662 const auto *ray_tracing_features = lvl_find_in_chain<VkPhysicalDeviceRayTracingFeaturesKHR>(pCreateInfo->pNext);
1663 if (ray_tracing_features) {
1664 state_tracker->enabled_features.ray_tracing_features = *ray_tracing_features;
1665 }
1666
Jeff Bolz165818a2020-05-08 11:19:03 -05001667 const auto *robustness2_features = lvl_find_in_chain<VkPhysicalDeviceRobustness2FeaturesEXT>(pCreateInfo->pNext);
1668 if (robustness2_features) {
1669 state_tracker->enabled_features.robustness2_features = *robustness2_features;
1670 }
1671
janharaldfredriksen-arm3b793772020-05-12 18:55:53 +02001672 const auto *fragment_density_map_features =
1673 lvl_find_in_chain<VkPhysicalDeviceFragmentDensityMapFeaturesEXT>(pCreateInfo->pNext);
1674 if (fragment_density_map_features) {
1675 state_tracker->enabled_features.fragment_density_map_features = *fragment_density_map_features;
1676 }
1677
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001678 const auto *fragment_density_map_features2 =
1679 lvl_find_in_chain<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT>(pCreateInfo->pNext);
1680 if (fragment_density_map_features2) {
1681 state_tracker->enabled_features.fragment_density_map2_features = *fragment_density_map_features2;
1682 }
1683
sfricke-samsung0c4a06f2020-06-27 01:24:32 -07001684 const auto *astc_decode_features = lvl_find_in_chain<VkPhysicalDeviceASTCDecodeFeaturesEXT>(pCreateInfo->pNext);
1685 if (astc_decode_features) {
1686 state_tracker->enabled_features.astc_decode_features = *astc_decode_features;
1687 }
1688
Tony-LunarG7337b312020-04-15 16:40:25 -06001689 const auto *custom_border_color_features = lvl_find_in_chain<VkPhysicalDeviceCustomBorderColorFeaturesEXT>(pCreateInfo->pNext);
1690 if (custom_border_color_features) {
1691 state_tracker->enabled_features.custom_border_color_features = *custom_border_color_features;
1692 }
1693
sfricke-samsungfd661d62020-05-16 00:57:27 -07001694 const auto *pipeline_creation_cache_control_features =
1695 lvl_find_in_chain<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT>(pCreateInfo->pNext);
1696 if (pipeline_creation_cache_control_features) {
1697 state_tracker->enabled_features.pipeline_creation_cache_control_features = *pipeline_creation_cache_control_features;
1698 }
1699
Piers Daniell39842ee2020-07-10 16:42:33 -06001700 const auto *extended_dynamic_state_features =
1701 lvl_find_in_chain<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT>(pCreateInfo->pNext);
1702 if (extended_dynamic_state_features) {
1703 state_tracker->enabled_features.extended_dynamic_state_features = *extended_dynamic_state_features;
1704 }
1705
locke-lunargd556cc32019-09-17 01:21:23 -06001706 // Store physical device properties and physical device mem limits into CoreChecks structs
1707 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1708 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001709 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1710 &state_tracker->phys_dev_props_core11);
1711 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1712 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001713
1714 const auto &dev_ext = state_tracker->device_extensions;
1715 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1716
1717 if (dev_ext.vk_khr_push_descriptor) {
1718 // Get the needed push_descriptor limits
1719 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1720 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1721 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1722 }
1723
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001724 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
1725 VkPhysicalDeviceDescriptorIndexingPropertiesEXT descriptor_indexing_prop;
1726 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1727 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1728 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1729 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1730 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1731 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1732 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1733 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1734 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1735 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1736 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1737 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1738 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1739 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1740 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1741 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1742 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1743 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1744 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1745 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1746 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1747 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1748 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1749 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1750 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1751 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1752 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
1753 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1754 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
1755 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
1756 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
1757 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
1758 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
1759 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
1760 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1761 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1762 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
1763 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
1764 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1765 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
1766 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
1767 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
1768 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
1769 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
1770 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
1771 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
1772 }
1773
locke-lunargd556cc32019-09-17 01:21:23 -06001774 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1775 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1776 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1777 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001778
1779 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
1780 VkPhysicalDeviceDepthStencilResolvePropertiesKHR depth_stencil_resolve_props;
1781 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
1782 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
1783 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
1784 depth_stencil_resolve_props.supportedStencilResolveModes;
1785 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
1786 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
1787 }
1788
locke-lunargd556cc32019-09-17 01:21:23 -06001789 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001790 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
1791 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing, &phys_dev_props->ray_tracing_propsKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06001792 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1793 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001794 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map_2, &phys_dev_props->fragment_density_map2_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001795 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
sfricke-samsung8f658d42020-05-03 20:12:24 -07001796 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_sample_locations, &phys_dev_props->sample_locations_props);
Tony-LunarG7337b312020-04-15 16:40:25 -06001797 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_custom_border_color, &phys_dev_props->custom_border_color_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001798
1799 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
1800 VkPhysicalDeviceTimelineSemaphorePropertiesKHR timeline_semaphore_props;
1801 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
1802 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
1803 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
1804 }
1805
1806 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
1807 VkPhysicalDeviceFloatControlsPropertiesKHR float_controls_props;
1808 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
1809 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
1810 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
1811 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
1812 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
1813 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
1814 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
1815 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
1816 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
1817 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
1818 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
1819 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
1820 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
1821 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
1822 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
1823 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
1824 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
1825 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
1826 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
1827 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
1828 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
1829 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07001830
locke-lunargd556cc32019-09-17 01:21:23 -06001831 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1832 // Get the needed cooperative_matrix properties
1833 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1834 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
1835 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1836 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1837
1838 uint32_t numCooperativeMatrixProperties = 0;
1839 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
1840 state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
1841 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
1842
1843 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
1844 state_tracker->cooperative_matrix_properties.data());
1845 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001846 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06001847 // Get the needed subgroup limits
1848 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
1849 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
1850 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1851
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001852 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
1853 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
1854 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
1855 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06001856 }
1857
1858 // Store queue family data
1859 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1860 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001861 const VkDeviceQueueCreateInfo &queue_create_info = pCreateInfo->pQueueCreateInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06001862 state_tracker->queue_family_index_map.insert(
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001863 std::make_pair(queue_create_info.queueFamilyIndex, queue_create_info.queueCount));
1864 state_tracker->queue_family_create_flags_map.insert(
1865 std::make_pair(queue_create_info.queueFamilyIndex, queue_create_info.flags));
locke-lunargd556cc32019-09-17 01:21:23 -06001866 }
1867 }
1868}
1869
1870void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1871 if (!device) return;
1872
locke-lunargd556cc32019-09-17 01:21:23 -06001873 // Reset all command buffers before destroying them, to unlink object_bindings.
1874 for (auto &commandBuffer : commandBufferMap) {
1875 ResetCommandBufferState(commandBuffer.first);
1876 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001877 pipelineMap.clear();
1878 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001879 commandBufferMap.clear();
1880
1881 // This will also delete all sets in the pool & remove them from setMap
1882 DeleteDescriptorSetPools();
1883 // All sets should be removed
1884 assert(setMap.empty());
1885 descriptorSetLayoutMap.clear();
1886 imageViewMap.clear();
1887 imageMap.clear();
1888 bufferViewMap.clear();
1889 bufferMap.clear();
1890 // Queues persist until device is destroyed
1891 queueMap.clear();
1892}
1893
1894// Loop through bound objects and increment their in_use counts.
1895void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
1896 for (auto obj : cb_node->object_bindings) {
1897 auto base_obj = GetStateStructPtrFromObject(obj);
1898 if (base_obj) {
1899 base_obj->in_use.fetch_add(1);
1900 }
1901 }
1902}
1903
1904// Track which resources are in-flight by atomically incrementing their "in_use" count
1905void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1906 cb_node->submitCount++;
1907 cb_node->in_use.fetch_add(1);
1908
1909 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
1910 IncrementBoundObjects(cb_node);
1911 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1912 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1913 // should then be flagged prior to calling this function
1914 for (auto event : cb_node->writeEventsBeforeWait) {
1915 auto event_state = GetEventState(event);
1916 if (event_state) event_state->write_in_use++;
1917 }
1918}
1919
1920// Decrement in-use count for objects bound to command buffer
1921void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
1922 BASE_NODE *base_obj = nullptr;
1923 for (auto obj : cb_node->object_bindings) {
1924 base_obj = GetStateStructPtrFromObject(obj);
1925 if (base_obj) {
1926 base_obj->in_use.fetch_sub(1);
1927 }
1928 }
1929}
1930
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001931void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
locke-lunargd556cc32019-09-17 01:21:23 -06001932 std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
1933
1934 // Roll this queue forward, one submission at a time.
1935 while (pQueue->seq < seq) {
1936 auto &submission = pQueue->submissions.front();
1937
1938 for (auto &wait : submission.waitSemaphores) {
1939 auto pSemaphore = GetSemaphoreState(wait.semaphore);
1940 if (pSemaphore) {
1941 pSemaphore->in_use.fetch_sub(1);
1942 }
1943 auto &lastSeq = otherQueueSeqs[wait.queue];
1944 lastSeq = std::max(lastSeq, wait.seq);
1945 }
1946
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001947 for (auto &signal : submission.signalSemaphores) {
1948 auto pSemaphore = GetSemaphoreState(signal.semaphore);
locke-lunargd556cc32019-09-17 01:21:23 -06001949 if (pSemaphore) {
1950 pSemaphore->in_use.fetch_sub(1);
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001951 if (pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && pSemaphore->payload < signal.payload) {
1952 pSemaphore->payload = signal.payload;
1953 }
locke-lunargd556cc32019-09-17 01:21:23 -06001954 }
1955 }
1956
1957 for (auto &semaphore : submission.externalSemaphores) {
1958 auto pSemaphore = GetSemaphoreState(semaphore);
1959 if (pSemaphore) {
1960 pSemaphore->in_use.fetch_sub(1);
1961 }
1962 }
1963
1964 for (auto cb : submission.cbs) {
1965 auto cb_node = GetCBState(cb);
1966 if (!cb_node) {
1967 continue;
1968 }
1969 // First perform decrement on general case bound objects
1970 DecrementBoundResources(cb_node);
1971 for (auto event : cb_node->writeEventsBeforeWait) {
1972 auto eventNode = eventMap.find(event);
1973 if (eventNode != eventMap.end()) {
1974 eventNode->second.write_in_use--;
1975 }
1976 }
Jeff Bolz310775c2019-10-09 00:46:33 -05001977 QueryMap localQueryToStateMap;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001978 VkQueryPool first_pool = VK_NULL_HANDLE;
Jeff Bolz310775c2019-10-09 00:46:33 -05001979 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001980 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &localQueryToStateMap);
Jeff Bolz310775c2019-10-09 00:46:33 -05001981 }
1982
1983 for (auto queryStatePair : localQueryToStateMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001984 if (queryStatePair.second == QUERYSTATE_ENDED) {
1985 queryToStateMap[queryStatePair.first] = QUERYSTATE_AVAILABLE;
1986 }
locke-lunargd556cc32019-09-17 01:21:23 -06001987 }
locke-lunargd556cc32019-09-17 01:21:23 -06001988 cb_node->in_use.fetch_sub(1);
1989 }
1990
1991 auto pFence = GetFenceState(submission.fence);
1992 if (pFence && pFence->scope == kSyncScopeInternal) {
1993 pFence->state = FENCE_RETIRED;
1994 }
1995
1996 pQueue->submissions.pop_front();
1997 pQueue->seq++;
1998 }
1999
2000 // Roll other queues forward to the highest seq we saw a wait for
2001 for (auto qs : otherQueueSeqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002002 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002003 }
2004}
2005
2006// Submit a fence to a queue, delimiting previous fences and previous untracked
2007// work by it.
2008static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
2009 pFence->state = FENCE_INFLIGHT;
2010 pFence->signaler.first = pQueue->queue;
2011 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
2012}
2013
2014void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
2015 VkFence fence, VkResult result) {
Mark Lobodzinski09379db2020-05-07 08:22:01 -06002016 if (result != VK_SUCCESS) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002017 uint64_t early_retire_seq = 0;
2018 auto pQueue = GetQueueState(queue);
2019 auto pFence = GetFenceState(fence);
2020
2021 if (pFence) {
2022 if (pFence->scope == kSyncScopeInternal) {
2023 // Mark fence in use
2024 SubmitFence(pQueue, pFence, std::max(1u, submitCount));
2025 if (!submitCount) {
2026 // If no submissions, but just dropping a fence on the end of the queue,
2027 // record an empty submission with just the fence, so we can determine
2028 // its completion.
2029 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002030 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002031 }
2032 } else {
2033 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
2034 early_retire_seq = pQueue->seq + pQueue->submissions.size();
2035 }
2036 }
2037
2038 // Now process each individual submit
2039 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
2040 std::vector<VkCommandBuffer> cbs;
2041 const VkSubmitInfo *submit = &pSubmits[submit_idx];
Jeremy Gebben404e6832020-09-29 14:58:07 -06002042 std::vector<SEMAPHORE_WAIT> semaphore_waits;
2043 std::vector<SEMAPHORE_SIGNAL> semaphore_signals;
2044 std::vector<VkSemaphore> semaphore_externals;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002045 const uint64_t next_seq = pQueue->seq + pQueue->submissions.size() + 1;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002046 auto *timeline_semaphore_submit = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002047 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
2048 VkSemaphore semaphore = submit->pWaitSemaphores[i];
2049 auto pSemaphore = GetSemaphoreState(semaphore);
2050 if (pSemaphore) {
2051 if (pSemaphore->scope == kSyncScopeInternal) {
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01002052 SEMAPHORE_WAIT wait;
2053 wait.semaphore = semaphore;
2054 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
2055 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
2056 wait.queue = pSemaphore->signaler.first;
2057 wait.seq = pSemaphore->signaler.second;
2058 semaphore_waits.push_back(wait);
2059 pSemaphore->in_use.fetch_add(1);
2060 }
2061 pSemaphore->signaler.first = VK_NULL_HANDLE;
2062 pSemaphore->signaled = false;
2063 } else if (pSemaphore->payload < timeline_semaphore_submit->pWaitSemaphoreValues[i]) {
2064 wait.queue = queue;
2065 wait.seq = next_seq;
2066 wait.payload = timeline_semaphore_submit->pWaitSemaphoreValues[i];
2067 semaphore_waits.push_back(wait);
locke-lunargd556cc32019-09-17 01:21:23 -06002068 pSemaphore->in_use.fetch_add(1);
2069 }
locke-lunargd556cc32019-09-17 01:21:23 -06002070 } else {
2071 semaphore_externals.push_back(semaphore);
2072 pSemaphore->in_use.fetch_add(1);
2073 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
2074 pSemaphore->scope = kSyncScopeInternal;
2075 }
2076 }
2077 }
2078 }
2079 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
2080 VkSemaphore semaphore = submit->pSignalSemaphores[i];
2081 auto pSemaphore = GetSemaphoreState(semaphore);
2082 if (pSemaphore) {
2083 if (pSemaphore->scope == kSyncScopeInternal) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002084 SEMAPHORE_SIGNAL signal;
2085 signal.semaphore = semaphore;
2086 signal.seq = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002087 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
2088 pSemaphore->signaler.first = queue;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002089 pSemaphore->signaler.second = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002090 pSemaphore->signaled = true;
2091 } else {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002092 signal.payload = timeline_semaphore_submit->pSignalSemaphoreValues[i];
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002093 }
locke-lunargd556cc32019-09-17 01:21:23 -06002094 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002095 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06002096 } else {
2097 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002098 early_retire_seq = std::max(early_retire_seq, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002099 }
2100 }
2101 }
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002102 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
2103 uint32_t perf_pass = perf_submit ? perf_submit->counterPassIndex : 0;
2104
locke-lunargd556cc32019-09-17 01:21:23 -06002105 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
2106 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
2107 if (cb_node) {
2108 cbs.push_back(submit->pCommandBuffers[i]);
2109 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
2110 cbs.push_back(secondaryCmdBuffer->commandBuffer);
2111 IncrementResources(secondaryCmdBuffer);
2112 }
2113 IncrementResources(cb_node);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002114
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002115 VkQueryPool first_pool = VK_NULL_HANDLE;
2116 EventToStageMap localEventToStageMap;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002117 QueryMap localQueryToStateMap;
2118 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002119 function(nullptr, /*do_validate*/ false, first_pool, perf_pass, &localQueryToStateMap);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002120 }
2121
2122 for (auto queryStatePair : localQueryToStateMap) {
2123 queryToStateMap[queryStatePair.first] = queryStatePair.second;
2124 }
2125
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002126 for (auto &function : cb_node->eventUpdates) {
2127 function(nullptr, /*do_validate*/ false, &localEventToStageMap);
2128 }
2129
2130 for (auto eventStagePair : localEventToStageMap) {
2131 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
2132 }
locke-lunargd556cc32019-09-17 01:21:23 -06002133 }
2134 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002135
locke-lunargd556cc32019-09-17 01:21:23 -06002136 pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002137 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, perf_pass);
locke-lunargd556cc32019-09-17 01:21:23 -06002138 }
2139
2140 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002141 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002142 }
2143}
2144
2145void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
2146 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
2147 VkResult result) {
2148 if (VK_SUCCESS == result) {
2149 AddMemObjInfo(device, *pMemory, pAllocateInfo);
2150 }
2151 return;
2152}
2153
2154void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
2155 if (!mem) return;
2156 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
2157 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
2158
2159 // Clear mem binding for any bound objects
2160 for (const auto &obj : mem_info->obj_bindings) {
2161 BINDABLE *bindable_state = nullptr;
2162 switch (obj.type) {
2163 case kVulkanObjectTypeImage:
2164 bindable_state = GetImageState(obj.Cast<VkImage>());
2165 break;
2166 case kVulkanObjectTypeBuffer:
2167 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
2168 break;
2169 case kVulkanObjectTypeAccelerationStructureNV:
2170 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
2171 break;
2172
2173 default:
2174 // Should only have acceleration structure, buffer, or image objects bound to memory
2175 assert(0);
2176 }
2177
2178 if (bindable_state) {
Jeff Bolz41e29052020-03-29 22:33:55 -05002179 // Remove any sparse bindings bound to the resource that use this memory.
2180 for (auto it = bindable_state->sparse_bindings.begin(); it != bindable_state->sparse_bindings.end();) {
2181 auto nextit = it;
2182 nextit++;
2183
2184 auto &sparse_mem_binding = *it;
2185 if (sparse_mem_binding.mem_state.get() == mem_info) {
2186 bindable_state->sparse_bindings.erase(it);
2187 }
2188
2189 it = nextit;
2190 }
locke-lunargd556cc32019-09-17 01:21:23 -06002191 bindable_state->UpdateBoundMemorySet();
2192 }
2193 }
2194 // Any bound cmd buffers are now invalid
2195 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
2196 RemoveAliasingImages(mem_info->bound_images);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002197 mem_info->destroyed = true;
John Zulauf79952712020-04-07 11:25:54 -06002198 fake_memory.Free(mem_info->fake_base_address);
locke-lunargd556cc32019-09-17 01:21:23 -06002199 memObjMap.erase(mem);
2200}
2201
2202void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
2203 VkFence fence, VkResult result) {
2204 if (result != VK_SUCCESS) return;
2205 uint64_t early_retire_seq = 0;
2206 auto pFence = GetFenceState(fence);
2207 auto pQueue = GetQueueState(queue);
2208
2209 if (pFence) {
2210 if (pFence->scope == kSyncScopeInternal) {
2211 SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
2212 if (!bindInfoCount) {
2213 // No work to do, just dropping a fence in the queue by itself.
2214 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002215 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002216 }
2217 } else {
2218 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
2219 early_retire_seq = pQueue->seq + pQueue->submissions.size();
2220 }
2221 }
2222
2223 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
2224 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
2225 // Track objects tied to memory
2226 for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
2227 for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
2228 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07002229 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06002230 VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
2231 }
2232 }
2233 for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
2234 for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
2235 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07002236 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06002237 VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
2238 }
2239 }
2240 for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
2241 for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
2242 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
2243 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
2244 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
locke-lunargcf04d582019-11-26 00:31:50 -07002245 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, size,
locke-lunargd556cc32019-09-17 01:21:23 -06002246 VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
2247 }
2248 }
2249
2250 std::vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002251 std::vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06002252 std::vector<VkSemaphore> semaphore_externals;
2253 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
2254 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
2255 auto pSemaphore = GetSemaphoreState(semaphore);
2256 if (pSemaphore) {
2257 if (pSemaphore->scope == kSyncScopeInternal) {
2258 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
2259 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
2260 pSemaphore->in_use.fetch_add(1);
2261 }
2262 pSemaphore->signaler.first = VK_NULL_HANDLE;
2263 pSemaphore->signaled = false;
2264 } else {
2265 semaphore_externals.push_back(semaphore);
2266 pSemaphore->in_use.fetch_add(1);
2267 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
2268 pSemaphore->scope = kSyncScopeInternal;
2269 }
2270 }
2271 }
2272 }
2273 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
2274 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
2275 auto pSemaphore = GetSemaphoreState(semaphore);
2276 if (pSemaphore) {
2277 if (pSemaphore->scope == kSyncScopeInternal) {
2278 pSemaphore->signaler.first = queue;
2279 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
2280 pSemaphore->signaled = true;
2281 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002282
2283 SEMAPHORE_SIGNAL signal;
2284 signal.semaphore = semaphore;
2285 signal.seq = pSemaphore->signaler.second;
2286 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06002287 } else {
2288 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
2289 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
2290 }
2291 }
2292 }
2293
2294 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002295 bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002296 }
2297
2298 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002299 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002300 }
2301}
2302
2303void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
2304 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
2305 VkResult result) {
2306 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002307 auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002308 semaphore_state->signaler.first = VK_NULL_HANDLE;
2309 semaphore_state->signaler.second = 0;
2310 semaphore_state->signaled = false;
2311 semaphore_state->scope = kSyncScopeInternal;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002312 semaphore_state->type = VK_SEMAPHORE_TYPE_BINARY_KHR;
2313 semaphore_state->payload = 0;
2314 auto semaphore_type_create_info = lvl_find_in_chain<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo->pNext);
2315 if (semaphore_type_create_info) {
2316 semaphore_state->type = semaphore_type_create_info->semaphoreType;
2317 semaphore_state->payload = semaphore_type_create_info->initialValue;
2318 }
locke-lunargd556cc32019-09-17 01:21:23 -06002319 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
2320}
2321
2322void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
2323 VkSemaphoreImportFlagsKHR flags) {
2324 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
2325 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
2326 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
2327 sema_node->scope == kSyncScopeInternal) {
2328 sema_node->scope = kSyncScopeExternalTemporary;
2329 } else {
2330 sema_node->scope = kSyncScopeExternalPermanent;
2331 }
2332 }
2333}
2334
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002335void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR *pSignalInfo,
2336 VkResult result) {
2337 auto *pSemaphore = GetSemaphoreState(pSignalInfo->semaphore);
2338 pSemaphore->payload = pSignalInfo->value;
2339}
2340
locke-lunargd556cc32019-09-17 01:21:23 -06002341void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
2342 auto mem_info = GetDevMemState(mem);
2343 if (mem_info) {
2344 mem_info->mapped_range.offset = offset;
2345 mem_info->mapped_range.size = size;
2346 mem_info->p_driver_data = *ppData;
2347 }
2348}
2349
2350void ValidationStateTracker::RetireFence(VkFence fence) {
2351 auto pFence = GetFenceState(fence);
2352 if (pFence && pFence->scope == kSyncScopeInternal) {
2353 if (pFence->signaler.first != VK_NULL_HANDLE) {
2354 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002355 RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002356 } else {
2357 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
2358 // the fence as retired.
2359 pFence->state = FENCE_RETIRED;
2360 }
2361 }
2362}
2363
2364void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2365 VkBool32 waitAll, uint64_t timeout, VkResult result) {
2366 if (VK_SUCCESS != result) return;
2367
2368 // When we know that all fences are complete we can clean/remove their CBs
2369 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
2370 for (uint32_t i = 0; i < fenceCount; i++) {
2371 RetireFence(pFences[i]);
2372 }
2373 }
2374 // NOTE : Alternate case not handled here is when some fences have completed. In
2375 // this case for app to guarantee which fences completed it will have to call
2376 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
2377}
2378
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002379void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
2380 auto pSemaphore = GetSemaphoreState(semaphore);
2381 if (pSemaphore) {
2382 for (auto &pair : queueMap) {
2383 QUEUE_STATE &queueState = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002384 uint64_t max_seq = 0;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002385 for (const auto &submission : queueState.submissions) {
2386 for (const auto &signalSemaphore : submission.signalSemaphores) {
2387 if (signalSemaphore.semaphore == semaphore && signalSemaphore.payload <= until_payload) {
Tony-LunarG47d5e272020-04-07 15:35:55 -06002388 if (signalSemaphore.seq > max_seq) {
2389 max_seq = signalSemaphore.seq;
2390 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002391 }
2392 }
2393 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06002394 if (max_seq) {
2395 RetireWorkOnQueue(&queueState, max_seq);
2396 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002397 }
2398 }
2399}
2400
John Zulauff89de662020-04-13 18:57:34 -06002401void ValidationStateTracker::RecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2402 VkResult result) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002403 if (VK_SUCCESS != result) return;
2404
2405 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
2406 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
2407 }
2408}
2409
John Zulauff89de662020-04-13 18:57:34 -06002410void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2411 VkResult result) {
2412 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2413}
2414
2415void ValidationStateTracker::PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo,
2416 uint64_t timeout, VkResult result) {
2417 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2418}
2419
Adrian Coca Lorentec7d76102020-09-28 13:58:16 +02002420void ValidationStateTracker::RecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2421 VkResult result) {
2422 if (VK_SUCCESS != result) return;
2423
2424 RetireTimelineSemaphore(semaphore, *pValue);
2425}
2426
2427void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2428 VkResult result) {
2429 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2430}
2431void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2432 VkResult result) {
2433 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2434}
2435
locke-lunargd556cc32019-09-17 01:21:23 -06002436void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
2437 if (VK_SUCCESS != result) return;
2438 RetireFence(fence);
2439}
2440
2441void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
2442 // Add queue to tracking set only if it is new
2443 auto queue_is_new = queues.emplace(queue);
2444 if (queue_is_new.second == true) {
2445 QUEUE_STATE *queue_state = &queueMap[queue];
2446 queue_state->queue = queue;
2447 queue_state->queueFamilyIndex = queue_family_index;
2448 queue_state->seq = 0;
2449 }
2450}
2451
2452void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2453 VkQueue *pQueue) {
2454 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2455}
2456
2457void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2458 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2459}
2460
2461void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2462 if (VK_SUCCESS != result) return;
2463 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002464 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002465}
2466
2467void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2468 if (VK_SUCCESS != result) return;
2469 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002470 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002471 }
2472}
2473
2474void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2475 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002476 auto fence_state = GetFenceState(fence);
2477 fence_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002478 fenceMap.erase(fence);
2479}
2480
2481void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2482 const VkAllocationCallbacks *pAllocator) {
2483 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002484 auto semaphore_state = GetSemaphoreState(semaphore);
2485 semaphore_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002486 semaphoreMap.erase(semaphore);
2487}
2488
2489void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2490 if (!event) return;
2491 EVENT_STATE *event_state = GetEventState(event);
2492 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
2493 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
2494 eventMap.erase(event);
2495}
2496
2497void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2498 const VkAllocationCallbacks *pAllocator) {
2499 if (!queryPool) return;
2500 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
2501 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
2502 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002503 qp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002504 queryPoolMap.erase(queryPool);
2505}
2506
2507// Object with given handle is being bound to memory w/ given mem_info struct.
2508// Track the newly bound memory range with given memoryOffset
2509// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
2510// and non-linear range incorrectly overlap.
locke-lunargd556cc32019-09-17 01:21:23 -06002511void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002512 VkDeviceSize memoryOffset) {
locke-lunargd556cc32019-09-17 01:21:23 -06002513 if (typed_handle.type == kVulkanObjectTypeImage) {
2514 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
2515 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002516 mem_info->bound_buffers.insert(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002517 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002518 mem_info->bound_acceleration_structures.insert(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002519 } else {
2520 // Unsupported object type
2521 assert(false);
2522 }
2523}
2524
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002525void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2526 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002527}
2528
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002529void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2530 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002531}
2532
2533void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002534 VkDeviceSize mem_offset) {
2535 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002536}
2537
2538// This function will remove the handle-to-index mapping from the appropriate map.
2539static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
2540 if (typed_handle.type == kVulkanObjectTypeImage) {
2541 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
2542 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002543 mem_info->bound_buffers.erase(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002544 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002545 mem_info->bound_acceleration_structures.erase(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002546 } else {
2547 // Unsupported object type
2548 assert(false);
2549 }
2550}
2551
2552void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
2553 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
2554}
2555
2556void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
2557 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
2558}
2559
2560void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
2561 RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
2562}
2563
2564void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2565 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2566 if (buffer_state) {
2567 // Track bound memory range information
2568 auto mem_info = GetDevMemState(mem);
2569 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002570 InsertBufferMemoryRange(buffer, mem_info, memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06002571 }
2572 // Track objects tied to memory
2573 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
2574 }
2575}
2576
2577void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2578 VkDeviceSize memoryOffset, VkResult result) {
2579 if (VK_SUCCESS != result) return;
2580 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2581}
2582
2583void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
2584 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2585 for (uint32_t i = 0; i < bindInfoCount; i++) {
2586 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2587 }
2588}
2589
2590void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
2591 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2592 for (uint32_t i = 0; i < bindInfoCount; i++) {
2593 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2594 }
2595}
2596
Spencer Fricke6c127102020-04-16 06:25:20 -07002597void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer) {
locke-lunargd556cc32019-09-17 01:21:23 -06002598 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2599 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002600 buffer_state->memory_requirements_checked = true;
2601 }
2602}
2603
2604void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2605 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002606 RecordGetBufferMemoryRequirementsState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002607}
2608
2609void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
2610 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2611 VkMemoryRequirements2KHR *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002612 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002613}
2614
2615void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
2616 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2617 VkMemoryRequirements2KHR *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002618 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002619}
2620
Spencer Fricke6c127102020-04-16 06:25:20 -07002621void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002622 const VkImagePlaneMemoryRequirementsInfo *plane_info =
2623 (pInfo == nullptr) ? nullptr : lvl_find_in_chain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002624 IMAGE_STATE *image_state = GetImageState(image);
2625 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002626 if (plane_info != nullptr) {
2627 // Multi-plane image
2628 image_state->memory_requirements_checked = false; // Each image plane needs to be checked itself
2629 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
2630 image_state->plane0_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002631 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
2632 image_state->plane1_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002633 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
2634 image_state->plane2_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002635 }
2636 } else {
2637 // Single Plane image
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002638 image_state->memory_requirements_checked = true;
2639 }
locke-lunargd556cc32019-09-17 01:21:23 -06002640 }
2641}
2642
2643void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2644 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002645 RecordGetImageMemoryRequirementsState(image, nullptr);
locke-lunargd556cc32019-09-17 01:21:23 -06002646}
2647
2648void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2649 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002650 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002651}
2652
2653void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2654 const VkImageMemoryRequirementsInfo2 *pInfo,
2655 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002656 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002657}
2658
2659static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2660 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2661 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2662 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2663 image_state->sparse_metadata_required = true;
2664 }
2665}
2666
2667void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2668 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2669 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2670 auto image_state = GetImageState(image);
2671 image_state->get_sparse_reqs_called = true;
2672 if (!pSparseMemoryRequirements) return;
2673 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2674 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2675 }
2676}
2677
2678void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
2679 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2680 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2681 auto image_state = GetImageState(pInfo->image);
2682 image_state->get_sparse_reqs_called = true;
2683 if (!pSparseMemoryRequirements) return;
2684 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2685 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2686 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2687 }
2688}
2689
2690void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
2691 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2692 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2693 auto image_state = GetImageState(pInfo->image);
2694 image_state->get_sparse_reqs_called = true;
2695 if (!pSparseMemoryRequirements) return;
2696 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2697 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2698 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2699 }
2700}
2701
2702void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2703 const VkAllocationCallbacks *pAllocator) {
2704 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002705 auto shader_module_state = GetShaderModuleState(shaderModule);
2706 shader_module_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002707 shaderModuleMap.erase(shaderModule);
2708}
2709
2710void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2711 const VkAllocationCallbacks *pAllocator) {
2712 if (!pipeline) return;
2713 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
2714 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
2715 // Any bound cmd buffers are now invalid
2716 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002717 pipeline_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002718 pipelineMap.erase(pipeline);
2719}
2720
2721void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2722 const VkAllocationCallbacks *pAllocator) {
2723 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002724 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
2725 pipeline_layout_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002726 pipelineLayoutMap.erase(pipelineLayout);
2727}
2728
2729void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2730 const VkAllocationCallbacks *pAllocator) {
2731 if (!sampler) return;
2732 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
2733 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
2734 // Any bound cmd buffers are now invalid
2735 if (sampler_state) {
2736 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
Yuly Novikov424cdd52020-05-26 16:45:12 -04002737
2738 if (sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2739 sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
2740 custom_border_color_sampler_count--;
2741 }
2742
2743 sampler_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002744 }
2745 samplerMap.erase(sampler);
2746}
2747
2748void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2749 const VkAllocationCallbacks *pAllocator) {
2750 if (!descriptorSetLayout) return;
2751 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2752 if (layout_it != descriptorSetLayoutMap.end()) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002753 layout_it->second.get()->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002754 descriptorSetLayoutMap.erase(layout_it);
2755 }
2756}
2757
2758void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2759 const VkAllocationCallbacks *pAllocator) {
2760 if (!descriptorPool) return;
2761 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
2762 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
2763 if (desc_pool_state) {
2764 // Any bound cmd buffers are now invalid
2765 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
2766 // Free sets that were in this pool
2767 for (auto ds : desc_pool_state->sets) {
2768 FreeDescriptorSet(ds);
2769 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002770 desc_pool_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002771 descriptorPoolMap.erase(descriptorPool);
2772 }
2773}
2774
2775// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2776void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2777 const VkCommandBuffer *command_buffers) {
2778 for (uint32_t i = 0; i < command_buffer_count; i++) {
John Zulaufd1f85d42020-04-15 12:23:15 -06002779 // Allow any derived class to clean up command buffer state
2780 if (command_buffer_free_callback) {
2781 (*command_buffer_free_callback)(command_buffers[i]);
2782 }
2783
locke-lunargd556cc32019-09-17 01:21:23 -06002784 auto cb_state = GetCBState(command_buffers[i]);
2785 // Remove references to command buffer's state and delete
2786 if (cb_state) {
2787 // reset prior to delete, removing various references to it.
2788 // TODO: fix this, it's insane.
2789 ResetCommandBufferState(cb_state->commandBuffer);
2790 // Remove the cb_state's references from COMMAND_POOL_STATEs
2791 pool_state->commandBuffers.erase(command_buffers[i]);
2792 // Remove the cb debug labels
2793 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
2794 // Remove CBState from CB map
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002795 cb_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002796 commandBufferMap.erase(cb_state->commandBuffer);
2797 }
2798 }
2799}
2800
2801void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2802 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
2803 auto pPool = GetCommandPoolState(commandPool);
2804 FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
2805}
2806
2807void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2808 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2809 VkResult result) {
2810 if (VK_SUCCESS != result) return;
sfricke-samsungc1543372020-08-18 22:37:27 -07002811 VkCommandPool command_pool = *pCommandPool;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002812 auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
sfricke-samsungc1543372020-08-18 22:37:27 -07002813 cmd_pool_state->commandPool = command_pool;
locke-lunargd556cc32019-09-17 01:21:23 -06002814 cmd_pool_state->createFlags = pCreateInfo->flags;
2815 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
sfricke-samsung0c45edc2020-07-01 22:19:53 -07002816 cmd_pool_state->unprotected = ((pCreateInfo->flags & VK_COMMAND_POOL_CREATE_PROTECTED_BIT) == 0);
sfricke-samsungc1543372020-08-18 22:37:27 -07002817 commandPoolMap[command_pool] = std::move(cmd_pool_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002818}
2819
2820void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2821 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2822 VkResult result) {
2823 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002824 auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002825 query_pool_state->createInfo = *pCreateInfo;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002826 query_pool_state->pool = *pQueryPool;
2827 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
2828 const auto *perf = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
Mark Lobodzinski7e948e42020-09-09 10:23:36 -06002829 query_pool_state->perf_counter_index_count = perf->counterIndexCount;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002830
Mark Lobodzinski7e948e42020-09-09 10:23:36 -06002831 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002832 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2833 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2834 switch (counter.scope) {
2835 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
2836 query_pool_state->has_perf_scope_command_buffer = true;
2837 break;
2838 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
2839 query_pool_state->has_perf_scope_render_pass = true;
2840 break;
2841 default:
2842 break;
2843 }
2844 }
2845
2846 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
2847 &query_pool_state->n_performance_passes);
2848 }
2849
locke-lunargd556cc32019-09-17 01:21:23 -06002850 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
2851
2852 QueryObject query_obj{*pQueryPool, 0u};
2853 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2854 query_obj.query = i;
2855 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2856 }
2857}
2858
2859void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2860 const VkAllocationCallbacks *pAllocator) {
2861 if (!commandPool) return;
2862 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2863 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2864 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2865 if (cp_state) {
2866 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2867 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2868 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002869 cp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002870 commandPoolMap.erase(commandPool);
2871 }
2872}
2873
2874void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2875 VkCommandPoolResetFlags flags, VkResult result) {
2876 if (VK_SUCCESS != result) return;
2877 // Reset all of the CBs allocated from this pool
2878 auto command_pool_state = GetCommandPoolState(commandPool);
2879 for (auto cmdBuffer : command_pool_state->commandBuffers) {
2880 ResetCommandBufferState(cmdBuffer);
2881 }
2882}
2883
2884void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2885 VkResult result) {
2886 for (uint32_t i = 0; i < fenceCount; ++i) {
2887 auto pFence = GetFenceState(pFences[i]);
2888 if (pFence) {
2889 if (pFence->scope == kSyncScopeInternal) {
2890 pFence->state = FENCE_UNSIGNALED;
2891 } else if (pFence->scope == kSyncScopeExternalTemporary) {
2892 pFence->scope = kSyncScopeInternal;
2893 }
2894 }
2895 }
2896}
2897
Jeff Bolzadbfa852019-10-04 13:53:30 -05002898// For given cb_nodes, invalidate them and track object causing invalidation.
2899// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
2900// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
2901// can also unlink objects from command buffers.
2902void ValidationStateTracker::InvalidateCommandBuffers(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_nodes,
2903 const VulkanTypedHandle &obj, bool unlink) {
2904 for (const auto &cb_node_pair : cb_nodes) {
2905 auto &cb_node = cb_node_pair.first;
2906 if (cb_node->state == CB_RECORDING) {
2907 cb_node->state = CB_INVALID_INCOMPLETE;
2908 } else if (cb_node->state == CB_RECORDED) {
2909 cb_node->state = CB_INVALID_COMPLETE;
2910 }
2911 cb_node->broken_bindings.push_back(obj);
2912
2913 // if secondary, then propagate the invalidation to the primaries that will call us.
2914 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2915 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
2916 }
2917 if (unlink) {
2918 int index = cb_node_pair.second;
2919 assert(cb_node->object_bindings[index] == obj);
2920 cb_node->object_bindings[index] = VulkanTypedHandle();
2921 }
2922 }
2923 if (unlink) {
2924 cb_nodes.clear();
2925 }
2926}
2927
2928void ValidationStateTracker::InvalidateLinkedCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
2929 const VulkanTypedHandle &obj) {
locke-lunargd556cc32019-09-17 01:21:23 -06002930 for (auto cb_node : cb_nodes) {
2931 if (cb_node->state == CB_RECORDING) {
2932 cb_node->state = CB_INVALID_INCOMPLETE;
2933 } else if (cb_node->state == CB_RECORDED) {
2934 cb_node->state = CB_INVALID_COMPLETE;
2935 }
2936 cb_node->broken_bindings.push_back(obj);
2937
2938 // if secondary, then propagate the invalidation to the primaries that will call us.
2939 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002940 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
locke-lunargd556cc32019-09-17 01:21:23 -06002941 }
2942 }
2943}
2944
2945void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2946 const VkAllocationCallbacks *pAllocator) {
2947 if (!framebuffer) return;
2948 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
2949 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
2950 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002951 framebuffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002952 frameBufferMap.erase(framebuffer);
2953}
2954
2955void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2956 const VkAllocationCallbacks *pAllocator) {
2957 if (!renderPass) return;
2958 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
2959 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
2960 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002961 rp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002962 renderPassMap.erase(renderPass);
2963}
2964
2965void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2966 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2967 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002968 auto fence_state = std::make_shared<FENCE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002969 fence_state->fence = *pFence;
2970 fence_state->createInfo = *pCreateInfo;
2971 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
2972 fenceMap[*pFence] = std::move(fence_state);
2973}
2974
2975bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2976 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2977 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002978 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002979 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2980 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2981 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2982 cgpl_state->pipe_state.reserve(count);
2983 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002984 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002985 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002986 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002987 }
2988 return false;
2989}
2990
2991void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2992 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2993 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2994 VkResult result, void *cgpl_state_data) {
2995 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2996 // This API may create pipelines regardless of the return value
2997 for (uint32_t i = 0; i < count; i++) {
2998 if (pPipelines[i] != VK_NULL_HANDLE) {
2999 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3000 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
3001 }
3002 }
3003 cgpl_state->pipe_state.clear();
3004}
3005
3006bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
3007 const VkComputePipelineCreateInfo *pCreateInfos,
3008 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003009 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003010 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
3011 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
3012 ccpl_state->pipe_state.reserve(count);
3013 for (uint32_t i = 0; i < count; i++) {
3014 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003015 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06003016 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003017 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003018 }
3019 return false;
3020}
3021
3022void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
3023 const VkComputePipelineCreateInfo *pCreateInfos,
3024 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
3025 VkResult result, void *ccpl_state_data) {
3026 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
3027
3028 // This API may create pipelines regardless of the return value
3029 for (uint32_t i = 0; i < count; i++) {
3030 if (pPipelines[i] != VK_NULL_HANDLE) {
3031 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3032 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
3033 }
3034 }
3035 ccpl_state->pipe_state.clear();
3036}
3037
3038bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
3039 uint32_t count,
3040 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
3041 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003042 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003043 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
3044 crtpl_state->pipe_state.reserve(count);
3045 for (uint32_t i = 0; i < count; i++) {
3046 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003047 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05003048 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003049 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003050 }
3051 return false;
3052}
3053
3054void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
3055 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
3056 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
3057 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
3058 // This API may create pipelines regardless of the return value
3059 for (uint32_t i = 0; i < count; i++) {
3060 if (pPipelines[i] != VK_NULL_HANDLE) {
3061 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3062 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
3063 }
3064 }
3065 crtpl_state->pipe_state.clear();
3066}
3067
Jeff Bolz443c2ca2020-03-19 12:11:51 -05003068bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkPipelineCache pipelineCache,
3069 uint32_t count,
3070 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
3071 const VkAllocationCallbacks *pAllocator,
3072 VkPipeline *pPipelines, void *crtpl_state_data) const {
3073 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
3074 crtpl_state->pipe_state.reserve(count);
3075 for (uint32_t i = 0; i < count; i++) {
3076 // Create and initialize internal tracking data structure
3077 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
3078 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
3079 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
3080 }
3081 return false;
3082}
3083
3084void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(
3085 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
3086 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
3087 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
3088 // This API may create pipelines regardless of the return value
3089 for (uint32_t i = 0; i < count; i++) {
3090 if (pPipelines[i] != VK_NULL_HANDLE) {
3091 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3092 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
3093 }
3094 }
3095 crtpl_state->pipe_state.clear();
3096}
3097
locke-lunargd556cc32019-09-17 01:21:23 -06003098void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
3099 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
3100 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003101 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
Tony-LunarG7337b312020-04-15 16:40:25 -06003102 if (pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT || pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT)
3103 custom_border_color_sampler_count++;
locke-lunargd556cc32019-09-17 01:21:23 -06003104}
3105
3106void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
3107 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
3108 const VkAllocationCallbacks *pAllocator,
3109 VkDescriptorSetLayout *pSetLayout, VkResult result) {
3110 if (VK_SUCCESS != result) return;
3111 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
3112}
3113
3114// For repeatable sorting, not very useful for "memory in range" search
3115struct PushConstantRangeCompare {
3116 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
3117 if (lhs->offset == rhs->offset) {
3118 if (lhs->size == rhs->size) {
3119 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
3120 return lhs->stageFlags < rhs->stageFlags;
3121 }
3122 // If the offsets are the same then sorting by the end of range is useful for validation
3123 return lhs->size < rhs->size;
3124 }
3125 return lhs->offset < rhs->offset;
3126 }
3127};
3128
3129static PushConstantRangesDict push_constant_ranges_dict;
3130
3131PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
3132 if (!info->pPushConstantRanges) {
3133 // Hand back the empty entry (creating as needed)...
3134 return push_constant_ranges_dict.look_up(PushConstantRanges());
3135 }
3136
3137 // Sort the input ranges to ensure equivalent ranges map to the same id
3138 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
3139 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
3140 sorted.insert(info->pPushConstantRanges + i);
3141 }
3142
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07003143 PushConstantRanges ranges;
3144 ranges.reserve(sorted.size());
locke-lunargd556cc32019-09-17 01:21:23 -06003145 for (const auto range : sorted) {
3146 ranges.emplace_back(*range);
3147 }
3148 return push_constant_ranges_dict.look_up(std::move(ranges));
3149}
3150
3151// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
3152static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
3153
3154// Dictionary of canonical form of the "compatible for set" records
3155static PipelineLayoutCompatDict pipeline_layout_compat_dict;
3156
3157static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
3158 const PipelineLayoutSetLayoutsId set_layouts_id) {
3159 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
3160}
3161
3162void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
3163 const VkAllocationCallbacks *pAllocator,
3164 VkPipelineLayout *pPipelineLayout, VkResult result) {
3165 if (VK_SUCCESS != result) return;
3166
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003167 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003168 pipeline_layout_state->layout = *pPipelineLayout;
3169 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
3170 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
3171 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05003172 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003173 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
3174 }
3175
3176 // Get canonical form IDs for the "compatible for set" contents
3177 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
3178 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
3179 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
3180
3181 // Create table of "compatible for set N" cannonical forms for trivial accept validation
3182 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
3183 pipeline_layout_state->compat_for_set.emplace_back(
3184 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
3185 }
3186 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
3187}
3188
3189void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
3190 const VkAllocationCallbacks *pAllocator,
3191 VkDescriptorPool *pDescriptorPool, VkResult result) {
3192 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003193 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003194}
3195
3196void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
3197 VkDescriptorPoolResetFlags flags, VkResult result) {
3198 if (VK_SUCCESS != result) return;
3199 DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
3200 // TODO: validate flags
3201 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
3202 for (auto ds : pPool->sets) {
3203 FreeDescriptorSet(ds);
3204 }
3205 pPool->sets.clear();
3206 // Reset available count for each type and available sets for this pool
3207 for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
3208 pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
3209 }
3210 pPool->availableSets = pPool->maxSets;
3211}
3212
3213bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
3214 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003215 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003216 // Always update common data
3217 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
3218 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
3219 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
3220
3221 return false;
3222}
3223
3224// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
3225void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
3226 VkDescriptorSet *pDescriptorSets, VkResult result,
3227 void *ads_state_data) {
3228 if (VK_SUCCESS != result) return;
3229 // All the updates are contained in a single cvdescriptorset function
3230 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
3231 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
3232 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
3233}
3234
3235void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
3236 const VkDescriptorSet *pDescriptorSets) {
3237 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
3238 // Update available descriptor sets in pool
3239 pool_state->availableSets += count;
3240
3241 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
3242 for (uint32_t i = 0; i < count; ++i) {
3243 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
3244 auto descriptor_set = setMap[pDescriptorSets[i]].get();
3245 uint32_t type_index = 0, descriptor_count = 0;
3246 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
3247 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
3248 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
3249 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
3250 }
3251 FreeDescriptorSet(descriptor_set);
3252 pool_state->sets.erase(descriptor_set);
3253 }
3254 }
3255}
3256
3257void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
3258 const VkWriteDescriptorSet *pDescriptorWrites,
3259 uint32_t descriptorCopyCount,
3260 const VkCopyDescriptorSet *pDescriptorCopies) {
3261 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
3262 pDescriptorCopies);
3263}
3264
3265void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
3266 VkCommandBuffer *pCommandBuffer, VkResult result) {
3267 if (VK_SUCCESS != result) return;
Jeff Bolz6835fda2019-10-06 00:15:34 -05003268 auto pPool = GetCommandPoolShared(pCreateInfo->commandPool);
locke-lunargd556cc32019-09-17 01:21:23 -06003269 if (pPool) {
3270 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
3271 // Add command buffer to its commandPool map
3272 pPool->commandBuffers.insert(pCommandBuffer[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003273 auto pCB = std::make_shared<CMD_BUFFER_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003274 pCB->createInfo = *pCreateInfo;
Jeff Bolz6835fda2019-10-06 00:15:34 -05003275 pCB->command_pool = pPool;
sfricke-samsung0c45edc2020-07-01 22:19:53 -07003276 pCB->unprotected = pPool->unprotected;
locke-lunargd556cc32019-09-17 01:21:23 -06003277 // Add command buffer to map
3278 commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
3279 ResetCommandBufferState(pCommandBuffer[i]);
3280 }
3281 }
3282}
3283
3284// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
3285void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003286 AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003287 cb_state);
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -07003288 // If imageless fb, skip fb binding
3289 if (fb_state->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003290 const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
3291 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003292 auto view_state = GetAttachmentImageViewState(cb_state, fb_state, attachment);
locke-lunargd556cc32019-09-17 01:21:23 -06003293 if (view_state) {
3294 AddCommandBufferBindingImageView(cb_state, view_state);
3295 }
3296 }
3297}
3298
3299void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
3300 const VkCommandBufferBeginInfo *pBeginInfo) {
3301 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3302 if (!cb_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003303 if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
3304 // Secondary Command Buffer
3305 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
3306 if (pInfo) {
3307 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
3308 assert(pInfo->renderPass);
3309 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
3310 if (framebuffer) {
3311 // Connect this framebuffer and its children to this cmdBuffer
3312 AddFramebufferBinding(cb_state, framebuffer);
3313 }
3314 }
3315 }
3316 }
3317 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
3318 ResetCommandBufferState(commandBuffer);
3319 }
3320 // Set updated state here in case implicit reset occurs above
3321 cb_state->state = CB_RECORDING;
3322 cb_state->beginInfo = *pBeginInfo;
3323 if (cb_state->beginInfo.pInheritanceInfo) {
3324 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
3325 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
3326 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
3327 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
3328 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
locke-lunargaecf2152020-05-12 17:15:41 -06003329 cb_state->activeRenderPass = GetShared<RENDER_PASS_STATE>(cb_state->beginInfo.pInheritanceInfo->renderPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003330 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
locke-lunargaecf2152020-05-12 17:15:41 -06003331 if (cb_state->beginInfo.pInheritanceInfo->framebuffer) {
3332 cb_state->activeFramebuffer = GetShared<FRAMEBUFFER_STATE>(cb_state->beginInfo.pInheritanceInfo->framebuffer);
3333 if (cb_state->activeFramebuffer) cb_state->framebuffers.insert(cb_state->activeFramebuffer);
3334 }
locke-lunargd556cc32019-09-17 01:21:23 -06003335 }
3336 }
3337
3338 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
3339 if (chained_device_group_struct) {
3340 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
3341 } else {
3342 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
3343 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003344
3345 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06003346}
3347
3348void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
3349 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3350 if (!cb_state) return;
3351 // Cached validation is specific to a specific recording of a specific command buffer.
3352 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
3353 descriptor_set->ClearCachedValidation(cb_state);
3354 }
3355 cb_state->validated_descriptor_sets.clear();
3356 if (VK_SUCCESS == result) {
3357 cb_state->state = CB_RECORDED;
3358 }
3359}
3360
3361void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
3362 VkResult result) {
3363 if (VK_SUCCESS == result) {
3364 ResetCommandBufferState(commandBuffer);
3365 }
3366}
3367
3368CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
3369 // initially assume everything is static state
3370 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
3371
3372 if (ds) {
3373 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
3374 switch (ds->pDynamicStates[i]) {
3375 case VK_DYNAMIC_STATE_LINE_WIDTH:
3376 flags &= ~CBSTATUS_LINE_WIDTH_SET;
3377 break;
3378 case VK_DYNAMIC_STATE_DEPTH_BIAS:
3379 flags &= ~CBSTATUS_DEPTH_BIAS_SET;
3380 break;
3381 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
3382 flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
3383 break;
3384 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
3385 flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
3386 break;
3387 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
3388 flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
3389 break;
3390 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
3391 flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
3392 break;
3393 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
3394 flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
3395 break;
3396 case VK_DYNAMIC_STATE_SCISSOR:
3397 flags &= ~CBSTATUS_SCISSOR_SET;
3398 break;
3399 case VK_DYNAMIC_STATE_VIEWPORT:
3400 flags &= ~CBSTATUS_VIEWPORT_SET;
3401 break;
3402 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
3403 flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
3404 break;
3405 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
3406 flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
3407 break;
3408 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
3409 flags &= ~CBSTATUS_LINE_STIPPLE_SET;
3410 break;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003411 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
3412 flags &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
3413 break;
Piers Daniell39842ee2020-07-10 16:42:33 -06003414 case VK_DYNAMIC_STATE_CULL_MODE_EXT:
3415 flags &= ~CBSTATUS_CULL_MODE_SET;
3416 break;
3417 case VK_DYNAMIC_STATE_FRONT_FACE_EXT:
3418 flags &= ~CBSTATUS_FRONT_FACE_SET;
3419 break;
3420 case VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT:
3421 flags &= ~CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
3422 break;
3423 case VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT:
3424 flags &= ~CBSTATUS_VIEWPORT_WITH_COUNT_SET;
3425 break;
3426 case VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT:
3427 flags &= ~CBSTATUS_SCISSOR_WITH_COUNT_SET;
3428 break;
3429 case VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT:
3430 flags &= ~CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
3431 break;
3432 case VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT:
3433 flags &= ~CBSTATUS_DEPTH_TEST_ENABLE_SET;
3434 break;
3435 case VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT:
3436 flags &= ~CBSTATUS_DEPTH_WRITE_ENABLE_SET;
3437 break;
3438 case VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT:
3439 flags &= ~CBSTATUS_DEPTH_COMPARE_OP_SET;
3440 break;
3441 case VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT:
3442 flags &= ~CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
3443 break;
3444 case VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT:
3445 flags &= ~CBSTATUS_STENCIL_TEST_ENABLE_SET;
3446 break;
3447 case VK_DYNAMIC_STATE_STENCIL_OP_EXT:
3448 flags &= ~CBSTATUS_STENCIL_OP_SET;
3449 break;
locke-lunargd556cc32019-09-17 01:21:23 -06003450 default:
3451 break;
3452 }
3453 }
3454 }
3455
3456 return flags;
3457}
3458
3459// Validation cache:
3460// CV is the bottommost implementor of this extension. Don't pass calls down.
3461// utility function to set collective state for pipeline
3462void SetPipelineState(PIPELINE_STATE *pPipe) {
3463 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
3464 if (pPipe->graphicsPipelineCI.pColorBlendState) {
3465 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
3466 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
3467 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3468 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3469 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3470 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3471 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3472 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3473 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3474 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
3475 pPipe->blendConstantsEnabled = true;
3476 }
3477 }
3478 }
3479 }
sfricke-samsung8f658d42020-05-03 20:12:24 -07003480 // Check if sample location is enabled
3481 if (pPipe->graphicsPipelineCI.pMultisampleState) {
3482 const VkPipelineSampleLocationsStateCreateInfoEXT *sample_location_state =
3483 lvl_find_in_chain<VkPipelineSampleLocationsStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pMultisampleState->pNext);
3484 if (sample_location_state != nullptr) {
3485 pPipe->sample_location_enabled = sample_location_state->sampleLocationsEnable;
3486 }
3487 }
locke-lunargd556cc32019-09-17 01:21:23 -06003488}
3489
3490void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
3491 VkPipeline pipeline) {
3492 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3493 assert(cb_state);
3494
3495 auto pipe_state = GetPipelineState(pipeline);
3496 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
3497 cb_state->status &= ~cb_state->static_status;
3498 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
3499 cb_state->status |= cb_state->static_status;
3500 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003501 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout->layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003502 cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
3503 SetPipelineState(pipe_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003504 AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003505}
3506
3507void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3508 uint32_t viewportCount, const VkViewport *pViewports) {
3509 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3510 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
3511 cb_state->status |= CBSTATUS_VIEWPORT_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003512 cb_state->static_status &= ~CBSTATUS_VIEWPORT_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003513}
3514
3515void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
3516 uint32_t exclusiveScissorCount,
3517 const VkRect2D *pExclusiveScissors) {
3518 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3519 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
3520 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
3521 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003522 cb_state->static_status &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003523}
3524
3525void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3526 VkImageLayout imageLayout) {
3527 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3528
3529 if (imageView != VK_NULL_HANDLE) {
3530 auto view_state = GetImageViewState(imageView);
3531 AddCommandBufferBindingImageView(cb_state, view_state);
3532 }
3533}
3534
3535void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3536 uint32_t viewportCount,
3537 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3538 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3539 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3540 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3541 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003542 cb_state->static_status &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003543}
3544
3545void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3546 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3547 const VkAllocationCallbacks *pAllocator,
3548 VkAccelerationStructureNV *pAccelerationStructure,
3549 VkResult result) {
3550 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003551 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003552
3553 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3554 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
3555 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3556 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
3557 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3558 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3559
3560 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
3561 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3562 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
3563 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3564 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3565 &as_state->build_scratch_memory_requirements);
3566
3567 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
3568 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3569 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
3570 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3571 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3572 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003573 as_state->allocator = pAllocator;
locke-lunargd556cc32019-09-17 01:21:23 -06003574 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3575}
3576
Jeff Bolz95176d02020-04-01 00:36:16 -05003577void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3578 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3579 const VkAllocationCallbacks *pAllocator,
3580 VkAccelerationStructureKHR *pAccelerationStructure,
3581 VkResult result) {
3582 if (VK_SUCCESS != result) return;
3583 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
3584
3585 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3586 VkAccelerationStructureMemoryRequirementsInfoKHR as_memory_requirements_info = {};
3587 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3588 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR;
3589 as_memory_requirements_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3590 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3591 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &as_memory_requirements_info, &as_state->memory_requirements);
3592
3593 VkAccelerationStructureMemoryRequirementsInfoKHR scratch_memory_req_info = {};
3594 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3595 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR;
3596 scratch_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3597 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3598 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &scratch_memory_req_info,
3599 &as_state->build_scratch_memory_requirements);
3600
3601 VkAccelerationStructureMemoryRequirementsInfoKHR update_memory_req_info = {};
3602 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3603 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR;
3604 update_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3605 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3606 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &update_memory_req_info,
3607 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003608 as_state->allocator = pAllocator;
Jeff Bolz95176d02020-04-01 00:36:16 -05003609 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3610}
3611
locke-lunargd556cc32019-09-17 01:21:23 -06003612void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
3613 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
3614 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
3615 if (as_state != nullptr) {
3616 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
3617 as_state->memory_requirements = *pMemoryRequirements;
3618 as_state->memory_requirements_checked = true;
3619 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
3620 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
3621 as_state->build_scratch_memory_requirements_checked = true;
3622 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
3623 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
3624 as_state->update_scratch_memory_requirements_checked = true;
3625 }
3626 }
3627}
3628
Jeff Bolz95176d02020-04-01 00:36:16 -05003629void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryCommon(
3630 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result,
3631 bool isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003632 if (VK_SUCCESS != result) return;
3633 for (uint32_t i = 0; i < bindInfoCount; i++) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003634 const VkBindAccelerationStructureMemoryInfoKHR &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06003635
3636 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
3637 if (as_state) {
3638 // Track bound memory range information
3639 auto mem_info = GetDevMemState(info.memory);
3640 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07003641 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06003642 }
3643 // Track objects tied to memory
3644 SetMemBinding(info.memory, as_state, info.memoryOffset,
Jeff Bolz95176d02020-04-01 00:36:16 -05003645 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR));
locke-lunargd556cc32019-09-17 01:21:23 -06003646
3647 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05003648 // XXX TODO: Query device address for KHR extension
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003649 if (enabled[gpu_validation] && isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003650 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
3651 }
3652 }
3653 }
3654}
3655
Jeff Bolz95176d02020-04-01 00:36:16 -05003656void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
3657 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
3658 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, true);
3659}
3660
3661void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryKHR(
3662 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result) {
3663 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, false);
3664}
3665
locke-lunargd556cc32019-09-17 01:21:23 -06003666void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
3667 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
3668 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
3669 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3670 if (cb_state == nullptr) {
3671 return;
3672 }
3673
3674 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3675 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3676 if (dst_as_state != nullptr) {
3677 dst_as_state->built = true;
3678 dst_as_state->build_info.initialize(pInfo);
3679 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3680 }
3681 if (src_as_state != nullptr) {
3682 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3683 }
3684 cb_state->hasBuildAccelerationStructureCmd = true;
3685}
3686
3687void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
3688 VkAccelerationStructureNV dst,
3689 VkAccelerationStructureNV src,
3690 VkCopyAccelerationStructureModeNV mode) {
3691 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3692 if (cb_state) {
3693 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3694 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3695 if (dst_as_state != nullptr && src_as_state != nullptr) {
3696 dst_as_state->built = true;
3697 dst_as_state->build_info = src_as_state->build_info;
3698 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3699 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3700 }
3701 }
3702}
3703
Jeff Bolz95176d02020-04-01 00:36:16 -05003704void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
3705 VkAccelerationStructureKHR accelerationStructure,
3706 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06003707 if (!accelerationStructure) return;
3708 auto *as_state = GetAccelerationStructureState(accelerationStructure);
3709 if (as_state) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003710 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06003711 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
3712 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07003713 RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -06003714 }
3715 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003716 as_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003717 accelerationStructureMap.erase(accelerationStructure);
3718 }
3719}
3720
Jeff Bolz95176d02020-04-01 00:36:16 -05003721void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
3722 VkAccelerationStructureNV accelerationStructure,
3723 const VkAllocationCallbacks *pAllocator) {
3724 PreCallRecordDestroyAccelerationStructureKHR(device, accelerationStructure, pAllocator);
3725}
3726
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003727void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3728 uint32_t viewportCount,
3729 const VkViewportWScalingNV *pViewportWScalings) {
3730 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3731 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003732 cb_state->static_status &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003733}
3734
locke-lunargd556cc32019-09-17 01:21:23 -06003735void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
3736 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3737 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003738 cb_state->static_status &= ~CBSTATUS_LINE_WIDTH_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003739}
3740
3741void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
3742 uint16_t lineStipplePattern) {
3743 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3744 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003745 cb_state->static_status &= ~CBSTATUS_LINE_STIPPLE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003746}
3747
3748void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
3749 float depthBiasClamp, float depthBiasSlopeFactor) {
3750 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3751 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003752 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003753}
3754
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003755void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
3756 const VkRect2D *pScissors) {
3757 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3758 cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
3759 cb_state->status |= CBSTATUS_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003760 cb_state->static_status &= ~CBSTATUS_SCISSOR_SET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003761}
3762
locke-lunargd556cc32019-09-17 01:21:23 -06003763void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
3764 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3765 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003766 cb_state->static_status &= ~CBSTATUS_BLEND_CONSTANTS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003767}
3768
3769void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
3770 float maxDepthBounds) {
3771 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3772 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003773 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003774}
3775
3776void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3777 uint32_t compareMask) {
3778 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3779 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003780 cb_state->static_status &= ~CBSTATUS_STENCIL_READ_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003781}
3782
3783void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3784 uint32_t writeMask) {
3785 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3786 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003787 cb_state->static_status &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003788}
3789
3790void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3791 uint32_t reference) {
3792 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3793 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003794 cb_state->static_status &= ~CBSTATUS_STENCIL_REFERENCE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003795}
3796
3797// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
3798// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
3799// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
3800void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
3801 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
3802 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
3803 cvdescriptorset::DescriptorSet *push_descriptor_set,
3804 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
3805 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
3806 // Defensive
3807 assert(pipeline_layout);
3808 if (!pipeline_layout) return;
3809
3810 uint32_t required_size = first_set + set_count;
3811 const uint32_t last_binding_index = required_size - 1;
3812 assert(last_binding_index < pipeline_layout->compat_for_set.size());
3813
3814 // Some useful shorthand
3815 auto &last_bound = cb_state->lastBound[pipeline_bind_point];
3816 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
3817 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
3818
3819 // We need this three times in this function, but nowhere else
3820 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
3821 if (ds && ds->IsPushDescriptor()) {
3822 assert(ds == last_bound.push_descriptor_set.get());
3823 last_bound.push_descriptor_set = nullptr;
3824 return true;
3825 }
3826 return false;
3827 };
3828
3829 // Clean up the "disturbed" before and after the range to be set
3830 if (required_size < current_size) {
3831 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
3832 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
3833 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
3834 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
3835 }
3836 } else {
3837 // We're not disturbing past last, so leave the upper binding data alone.
3838 required_size = current_size;
3839 }
3840 }
3841
3842 // We resize if we need more set entries or if those past "last" are disturbed
3843 if (required_size != current_size) {
3844 last_bound.per_set.resize(required_size);
3845 }
3846
3847 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
3848 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
3849 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
3850 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3851 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
3852 last_bound.per_set[set_idx].dynamicOffsets.clear();
3853 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
3854 }
3855 }
3856
3857 // Now update the bound sets with the input sets
3858 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
3859 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
3860 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
3861 cvdescriptorset::DescriptorSet *descriptor_set =
3862 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
3863
3864 // Record binding (or push)
3865 if (descriptor_set != last_bound.push_descriptor_set.get()) {
3866 // Only cleanup the push descriptors if they aren't the currently used set.
3867 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3868 }
3869 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
3870 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
3871
3872 if (descriptor_set) {
3873 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
3874 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
3875 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
3876 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
3877 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
3878 input_dynamic_offsets = end_offset;
3879 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
3880 } else {
3881 last_bound.per_set[set_idx].dynamicOffsets.clear();
3882 }
3883 if (!descriptor_set->IsPushDescriptor()) {
3884 // Can't cache validation of push_descriptors
3885 cb_state->validated_descriptor_sets.insert(descriptor_set);
3886 }
3887 }
3888 }
3889}
3890
3891// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
3892void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
3893 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3894 uint32_t firstSet, uint32_t setCount,
3895 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
3896 const uint32_t *pDynamicOffsets) {
3897 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3898 auto pipeline_layout = GetPipelineLayout(layout);
3899
3900 // Resize binding arrays
3901 uint32_t last_set_index = firstSet + setCount - 1;
3902 if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
3903 cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
3904 }
3905
3906 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
3907 dynamicOffsetCount, pDynamicOffsets);
3908 cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
3909 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3910}
3911
3912void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
3913 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
3914 const VkWriteDescriptorSet *pDescriptorWrites) {
3915 const auto &pipeline_layout = GetPipelineLayout(layout);
3916 // Short circuit invalid updates
3917 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
3918 !pipeline_layout->set_layouts[set]->IsPushDescriptor())
3919 return;
3920
3921 // We need a descriptor set to update the bindings with, compatible with the passed layout
3922 const auto dsl = pipeline_layout->set_layouts[set];
3923 auto &last_bound = cb_state->lastBound[pipelineBindPoint];
3924 auto &push_descriptor_set = last_bound.push_descriptor_set;
3925 // If we are disturbing the current push_desriptor_set clear it
3926 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
John Zulaufd2c3dae2019-12-12 11:02:17 -07003927 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003928 }
3929
3930 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3931 nullptr);
3932 last_bound.pipeline_layout = layout;
3933
3934 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003935 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003936}
3937
3938void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3939 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3940 uint32_t set, uint32_t descriptorWriteCount,
3941 const VkWriteDescriptorSet *pDescriptorWrites) {
3942 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3943 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3944}
3945
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003946void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3947 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3948 const void *pValues) {
3949 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3950 if (cb_state != nullptr) {
3951 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3952
3953 auto &push_constant_data = cb_state->push_constant_data;
3954 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3955 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
3956 }
3957}
3958
locke-lunargd556cc32019-09-17 01:21:23 -06003959void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3960 VkIndexType indexType) {
3961 auto buffer_state = GetBufferState(buffer);
3962 auto cb_state = GetCBState(commandBuffer);
3963
3964 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
Piers Daniell39842ee2020-07-10 16:42:33 -06003965 cb_state->static_status &= ~CBSTATUS_INDEX_BUFFER_BOUND;
locke-lunargd556cc32019-09-17 01:21:23 -06003966 cb_state->index_buffer_binding.buffer = buffer;
3967 cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
3968 cb_state->index_buffer_binding.offset = offset;
3969 cb_state->index_buffer_binding.index_type = indexType;
3970 // Add binding for this index buffer to this commandbuffer
3971 AddCommandBufferBindingBuffer(cb_state, buffer_state);
3972}
3973
3974void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3975 uint32_t bindingCount, const VkBuffer *pBuffers,
3976 const VkDeviceSize *pOffsets) {
3977 auto cb_state = GetCBState(commandBuffer);
3978
3979 uint32_t end = firstBinding + bindingCount;
3980 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3981 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3982 }
3983
3984 for (uint32_t i = 0; i < bindingCount; ++i) {
3985 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
3986 vertex_buffer_binding.buffer = pBuffers[i];
3987 vertex_buffer_binding.offset = pOffsets[i];
Piers Daniell39842ee2020-07-10 16:42:33 -06003988 vertex_buffer_binding.size = VK_WHOLE_SIZE;
3989 vertex_buffer_binding.stride = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06003990 // Add binding for this vertex buffer to this commandbuffer
Jeff Bolz165818a2020-05-08 11:19:03 -05003991 if (pBuffers[i]) {
3992 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
3993 }
locke-lunargd556cc32019-09-17 01:21:23 -06003994 }
3995}
3996
3997void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3998 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
3999 auto cb_state = GetCBState(commandBuffer);
4000 auto dst_buffer_state = GetBufferState(dstBuffer);
4001
4002 // Update bindings between buffer and cmd buffer
4003 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
4004}
4005
Jeff Bolz310775c2019-10-09 00:46:33 -05004006bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags stageMask,
4007 EventToStageMap *localEventToStageMap) {
4008 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06004009 return false;
4010}
4011
4012void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
4013 VkPipelineStageFlags stageMask) {
4014 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4015 auto event_state = GetEventState(event);
4016 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05004017 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004018 }
4019 cb_state->events.push_back(event);
4020 if (!cb_state->waitedEvents.count(event)) {
4021 cb_state->writeEventsBeforeWait.push_back(event);
4022 }
Jeff Bolz310775c2019-10-09 00:46:33 -05004023 cb_state->eventUpdates.emplace_back(
4024 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
4025 return SetEventStageMask(event, stageMask, localEventToStageMap);
4026 });
locke-lunargd556cc32019-09-17 01:21:23 -06004027}
4028
4029void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
4030 VkPipelineStageFlags stageMask) {
4031 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4032 auto event_state = GetEventState(event);
4033 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05004034 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004035 }
4036 cb_state->events.push_back(event);
4037 if (!cb_state->waitedEvents.count(event)) {
4038 cb_state->writeEventsBeforeWait.push_back(event);
4039 }
4040
4041 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05004042 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
4043 return SetEventStageMask(event, VkPipelineStageFlags(0), localEventToStageMap);
4044 });
locke-lunargd556cc32019-09-17 01:21:23 -06004045}
4046
4047void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
4048 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
4049 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
4050 uint32_t bufferMemoryBarrierCount,
4051 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
4052 uint32_t imageMemoryBarrierCount,
4053 const VkImageMemoryBarrier *pImageMemoryBarriers) {
4054 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4055 for (uint32_t i = 0; i < eventCount; ++i) {
4056 auto event_state = GetEventState(pEvents[i]);
4057 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05004058 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
4059 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004060 }
4061 cb_state->waitedEvents.insert(pEvents[i]);
4062 cb_state->events.push_back(pEvents[i]);
4063 }
4064}
4065
Jeff Bolz310775c2019-10-09 00:46:33 -05004066bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
4067 (*localQueryToStateMap)[object] = value;
4068 return false;
4069}
4070
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004071bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
4072 QueryState value, QueryMap *localQueryToStateMap) {
Jeff Bolz310775c2019-10-09 00:46:33 -05004073 for (uint32_t i = 0; i < queryCount; i++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004074 QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
Jeff Bolz310775c2019-10-09 00:46:33 -05004075 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06004076 }
4077 return false;
4078}
4079
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004080QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex,
4081 uint32_t perfPass) const {
4082 QueryObject query = QueryObject(QueryObject(queryPool, queryIndex), perfPass);
locke-lunargd556cc32019-09-17 01:21:23 -06004083
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004084 auto iter = localQueryToStateMap->find(query);
4085 if (iter != localQueryToStateMap->end()) return iter->second;
Jeff Bolz310775c2019-10-09 00:46:33 -05004086
Jeff Bolz310775c2019-10-09 00:46:33 -05004087 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06004088}
4089
4090void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004091 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004092 cb_state->activeQueries.insert(query_obj);
4093 cb_state->startedQueries.insert(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004094 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
4095 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4096 QueryMap *localQueryToStateMap) {
4097 SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
4098 return false;
4099 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004100 auto pool_state = GetQueryPoolState(query_obj.pool);
4101 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
4102 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004103}
4104
4105void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
4106 VkFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004107 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004108 QueryObject query = {queryPool, slot};
4109 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4110 RecordCmdBeginQuery(cb_state, query);
4111}
4112
4113void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004114 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004115 cb_state->activeQueries.erase(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004116 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
4117 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4118 QueryMap *localQueryToStateMap) {
4119 return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
4120 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004121 auto pool_state = GetQueryPoolState(query_obj.pool);
4122 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
4123 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004124}
4125
4126void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004127 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004128 QueryObject query_obj = {queryPool, slot};
4129 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4130 RecordCmdEndQuery(cb_state, query_obj);
4131}
4132
4133void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4134 uint32_t firstQuery, uint32_t queryCount) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004135 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004136 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4137
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02004138 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
4139 QueryObject query = {queryPool, slot};
4140 cb_state->resetQueries.insert(query);
4141 }
4142
Jeff Bolz310775c2019-10-09 00:46:33 -05004143 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004144 bool do_validate, VkQueryPool &firstPerfQueryPool,
4145 uint32_t perfQueryPass,
4146 QueryMap *localQueryToStateMap) {
4147 return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06004148 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004149 auto pool_state = GetQueryPoolState(queryPool);
4150 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004151 cb_state);
4152}
4153
4154void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4155 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
4156 VkDeviceSize dstOffset, VkDeviceSize stride,
4157 VkQueryResultFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004158 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004159 auto cb_state = GetCBState(commandBuffer);
4160 auto dst_buff_state = GetBufferState(dstBuffer);
4161 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05004162 auto pool_state = GetQueryPoolState(queryPool);
4163 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004164 cb_state);
4165}
4166
4167void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
4168 VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004169 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004170 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeff Bolzadbfa852019-10-04 13:53:30 -05004171 auto pool_state = GetQueryPoolState(queryPool);
4172 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004173 cb_state);
4174 QueryObject query = {queryPool, slot};
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004175 cb_state->queryUpdates.emplace_back([query](const ValidationStateTracker *device_data, bool do_validate,
4176 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4177 QueryMap *localQueryToStateMap) {
4178 return SetQueryState(QueryObject(query, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
4179 });
locke-lunargd556cc32019-09-17 01:21:23 -06004180}
4181
4182void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
4183 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
4184 VkResult result) {
4185 if (VK_SUCCESS != result) return;
4186 // Shadow create info and store in map
Jeff Bolz6ae39612019-10-11 20:57:36 -05004187 auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
locke-lunargd556cc32019-09-17 01:21:23 -06004188
4189 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
4190 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
4191 VkImageView view = pCreateInfo->pAttachments[i];
4192 auto view_state = GetImageViewState(view);
4193 if (!view_state) {
4194 continue;
4195 }
4196 }
4197 }
4198 frameBufferMap[*pFramebuffer] = std::move(fb_state);
4199}
4200
4201void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4202 RENDER_PASS_STATE *render_pass) {
4203 auto &subpass_to_node = render_pass->subpassToNode;
4204 subpass_to_node.resize(pCreateInfo->subpassCount);
4205 auto &self_dependencies = render_pass->self_dependencies;
4206 self_dependencies.resize(pCreateInfo->subpassCount);
John Zulauf4aff5d92020-02-21 08:29:35 -07004207 auto &subpass_dependencies = render_pass->subpass_dependencies;
4208 subpass_dependencies.resize(pCreateInfo->subpassCount);
locke-lunargd556cc32019-09-17 01:21:23 -06004209
4210 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
4211 subpass_to_node[i].pass = i;
4212 self_dependencies[i].clear();
John Zulauf4aff5d92020-02-21 08:29:35 -07004213 subpass_dependencies[i].pass = i;
locke-lunargd556cc32019-09-17 01:21:23 -06004214 }
4215 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004216 const auto &dependency = pCreateInfo->pDependencies[i];
John Zulauf4aff5d92020-02-21 08:29:35 -07004217 const auto srcSubpass = dependency.srcSubpass;
4218 const auto dstSubpass = dependency.dstSubpass;
locke-lunargd556cc32019-09-17 01:21:23 -06004219 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
4220 if (dependency.srcSubpass == dependency.dstSubpass) {
4221 self_dependencies[dependency.srcSubpass].push_back(i);
4222 } else {
4223 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
4224 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
4225 }
4226 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004227 if (srcSubpass == VK_SUBPASS_EXTERNAL) {
4228 assert(dstSubpass != VK_SUBPASS_EXTERNAL); // this is invalid per VUID-VkSubpassDependency-srcSubpass-00865
John Zulaufbaea94f2020-09-15 17:55:16 -06004229 subpass_dependencies[dstSubpass].barrier_from_external.emplace_back(&dependency);
John Zulauf4aff5d92020-02-21 08:29:35 -07004230 } else if (dstSubpass == VK_SUBPASS_EXTERNAL) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004231 subpass_dependencies[srcSubpass].barrier_to_external.emplace_back(&dependency);
John Zulauf4aff5d92020-02-21 08:29:35 -07004232 } else if (dependency.srcSubpass != dependency.dstSubpass) {
4233 // ignore self dependencies in prev and next
John Zulaufbaea94f2020-09-15 17:55:16 -06004234 subpass_dependencies[srcSubpass].next[&subpass_dependencies[dstSubpass]].emplace_back(&dependency);
4235 subpass_dependencies[dstSubpass].prev[&subpass_dependencies[srcSubpass]].emplace_back(&dependency);
John Zulauf4aff5d92020-02-21 08:29:35 -07004236 }
4237 }
4238
4239 //
4240 // Determine "asynchrononous" subpassess
4241 // syncronization is only interested in asyncronous stages *earlier* that the current one... so we'll only look towards those.
4242 // NOTE: This is O(N^3), which we could shrink to O(N^2logN) using sets instead of arrays, but given that N is likely to be
4243 // small and the K for |= from the prev is must less than for set, we'll accept the brute force.
4244 std::vector<std::vector<bool>> pass_depends(pCreateInfo->subpassCount);
4245 for (uint32_t i = 1; i < pCreateInfo->subpassCount; ++i) {
4246 auto &depends = pass_depends[i];
4247 depends.resize(i);
4248 auto &subpass_dep = subpass_dependencies[i];
4249 for (const auto &prev : subpass_dep.prev) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004250 const auto prev_pass = prev.first->pass;
John Zulauf4aff5d92020-02-21 08:29:35 -07004251 const auto &prev_depends = pass_depends[prev_pass];
4252 for (uint32_t j = 0; j < prev_pass; j++) {
4253 depends[j] = depends[j] | prev_depends[j];
4254 }
4255 depends[prev_pass] = true;
4256 }
4257 for (uint32_t pass = 0; pass < subpass_dep.pass; pass++) {
4258 if (!depends[pass]) {
4259 subpass_dep.async.push_back(pass);
4260 }
4261 }
locke-lunargd556cc32019-09-17 01:21:23 -06004262 }
4263}
4264
John Zulauf4aff5d92020-02-21 08:29:35 -07004265static VkSubpassDependency2 ImplicitDependencyFromExternal(uint32_t subpass) {
4266 VkSubpassDependency2 from_external = {VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
4267 nullptr,
4268 VK_SUBPASS_EXTERNAL,
4269 subpass,
4270 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
4271 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
4272 0,
4273 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
4274 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
4275 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
4276 0,
4277 0};
4278 return from_external;
4279}
4280
4281static VkSubpassDependency2 ImplicitDependencyToExternal(uint32_t subpass) {
4282 VkSubpassDependency2 to_external = {VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
4283 nullptr,
4284 subpass,
4285 VK_SUBPASS_EXTERNAL,
4286 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
4287 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
4288 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
4289 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
4290 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
4291 0,
4292 0,
4293 0};
4294 return to_external;
4295}
4296
locke-lunargd556cc32019-09-17 01:21:23 -06004297void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
4298 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
4299 VkRenderPass *pRenderPass) {
4300 render_pass->renderPass = *pRenderPass;
4301 auto create_info = render_pass->createInfo.ptr();
4302
4303 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
4304
John Zulauf8863c332020-03-20 10:34:33 -06004305 struct AttachmentTracker { // This is really only of local interest, but a bit big for a lambda
4306 RENDER_PASS_STATE *const rp;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004307 std::vector<uint32_t> &first;
John Zulauf1507ee42020-05-18 11:33:09 -06004308 std::vector<bool> &first_is_transition;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004309 std::vector<uint32_t> &last;
John Zulauf8863c332020-03-20 10:34:33 -06004310 std::vector<std::vector<RENDER_PASS_STATE::AttachmentTransition>> &subpass_transitions;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004311 std::unordered_map<uint32_t, bool> &first_read;
4312 const uint32_t attachment_count;
John Zulauf8863c332020-03-20 10:34:33 -06004313 std::vector<VkImageLayout> attachment_layout;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004314 std::vector<std::vector<VkImageLayout>> subpass_attachment_layout;
John Zulauf8863c332020-03-20 10:34:33 -06004315 AttachmentTracker(std::shared_ptr<RENDER_PASS_STATE> &render_pass)
4316 : rp(render_pass.get()),
4317 first(rp->attachment_first_subpass),
John Zulauf1507ee42020-05-18 11:33:09 -06004318 first_is_transition(rp->attachment_first_is_transition),
John Zulauf8863c332020-03-20 10:34:33 -06004319 last(rp->attachment_last_subpass),
4320 subpass_transitions(rp->subpass_transitions),
4321 first_read(rp->attachment_first_read),
4322 attachment_count(rp->createInfo.attachmentCount),
John Zulauf2bc1fde2020-04-24 15:09:51 -06004323 attachment_layout(),
4324 subpass_attachment_layout() {
John Zulaufbb9f07f2020-03-19 16:53:06 -06004325 first.resize(attachment_count, VK_SUBPASS_EXTERNAL);
John Zulauf1507ee42020-05-18 11:33:09 -06004326 first_is_transition.resize(attachment_count, false);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004327 last.resize(attachment_count, VK_SUBPASS_EXTERNAL);
John Zulauf8863c332020-03-20 10:34:33 -06004328 subpass_transitions.resize(rp->createInfo.subpassCount + 1); // Add an extra for EndRenderPass
4329 attachment_layout.reserve(attachment_count);
John Zulauf2bc1fde2020-04-24 15:09:51 -06004330 subpass_attachment_layout.resize(rp->createInfo.subpassCount);
4331 for (auto &subpass_layouts : subpass_attachment_layout) {
4332 subpass_layouts.resize(attachment_count, kInvalidLayout);
4333 }
4334
John Zulauf8863c332020-03-20 10:34:33 -06004335 for (uint32_t j = 0; j < attachment_count; j++) {
4336 attachment_layout.push_back(rp->createInfo.pAttachments[j].initialLayout);
4337 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004338 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004339
John Zulaufbb9f07f2020-03-19 16:53:06 -06004340 void Update(uint32_t subpass, const VkAttachmentReference2 *attach_ref, uint32_t count, bool is_read) {
4341 if (nullptr == attach_ref) return;
4342 for (uint32_t j = 0; j < count; ++j) {
4343 const auto attachment = attach_ref[j].attachment;
4344 if (attachment != VK_ATTACHMENT_UNUSED) {
John Zulauf8863c332020-03-20 10:34:33 -06004345 const auto layout = attach_ref[j].layout;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004346 // Take advantage of the fact that insert won't overwrite, so we'll only write the first time.
4347 first_read.insert(std::make_pair(attachment, is_read));
John Zulauf2bc1fde2020-04-24 15:09:51 -06004348 if (first[attachment] == VK_SUBPASS_EXTERNAL) {
4349 first[attachment] = subpass;
4350 const auto initial_layout = rp->createInfo.pAttachments[attachment].initialLayout;
John Zulauf1507ee42020-05-18 11:33:09 -06004351 if (initial_layout != layout) {
4352 subpass_transitions[subpass].emplace_back(VK_SUBPASS_EXTERNAL, attachment, initial_layout, layout);
4353 first_is_transition[attachment] = true;
4354 }
John Zulauf2bc1fde2020-04-24 15:09:51 -06004355 }
John Zulaufbb9f07f2020-03-19 16:53:06 -06004356 last[attachment] = subpass;
John Zulauf8863c332020-03-20 10:34:33 -06004357
John Zulauf2bc1fde2020-04-24 15:09:51 -06004358 for (const auto &prev : rp->subpass_dependencies[subpass].prev) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004359 const auto prev_pass = prev.first->pass;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004360 const auto prev_layout = subpass_attachment_layout[prev_pass][attachment];
4361 if ((prev_layout != kInvalidLayout) && (prev_layout != layout)) {
4362 subpass_transitions[subpass].emplace_back(prev_pass, attachment, prev_layout, layout);
4363 }
John Zulauf8863c332020-03-20 10:34:33 -06004364 }
John Zulauf2bc1fde2020-04-24 15:09:51 -06004365 attachment_layout[attachment] = layout;
John Zulauf8863c332020-03-20 10:34:33 -06004366 }
4367 }
4368 }
4369 void FinalTransitions() {
4370 auto &final_transitions = subpass_transitions[rp->createInfo.subpassCount];
4371
4372 for (uint32_t attachment = 0; attachment < attachment_count; ++attachment) {
4373 const auto final_layout = rp->createInfo.pAttachments[attachment].finalLayout;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004374 // Add final transitions for attachments that were used and change layout.
4375 if ((last[attachment] != VK_SUBPASS_EXTERNAL) && final_layout != attachment_layout[attachment]) {
4376 final_transitions.emplace_back(last[attachment], attachment, attachment_layout[attachment], final_layout);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004377 }
locke-lunargd556cc32019-09-17 01:21:23 -06004378 }
4379 }
John Zulaufbb9f07f2020-03-19 16:53:06 -06004380 };
John Zulauf8863c332020-03-20 10:34:33 -06004381 AttachmentTracker attachment_tracker(render_pass);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004382
4383 for (uint32_t subpass_index = 0; subpass_index < create_info->subpassCount; ++subpass_index) {
4384 const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[subpass_index];
John Zulauf8863c332020-03-20 10:34:33 -06004385 attachment_tracker.Update(subpass_index, subpass.pColorAttachments, subpass.colorAttachmentCount, false);
4386 attachment_tracker.Update(subpass_index, subpass.pResolveAttachments, subpass.colorAttachmentCount, false);
4387 attachment_tracker.Update(subpass_index, subpass.pDepthStencilAttachment, 1, false);
4388 attachment_tracker.Update(subpass_index, subpass.pInputAttachments, subpass.inputAttachmentCount, true);
John Zulauf4aff5d92020-02-21 08:29:35 -07004389 }
John Zulauf8863c332020-03-20 10:34:33 -06004390 attachment_tracker.FinalTransitions();
John Zulauf4aff5d92020-02-21 08:29:35 -07004391
John Zulaufbb9f07f2020-03-19 16:53:06 -06004392 // Add implicit dependencies
John Zulauf8863c332020-03-20 10:34:33 -06004393 for (uint32_t attachment = 0; attachment < attachment_tracker.attachment_count; attachment++) {
4394 const auto first_use = attachment_tracker.first[attachment];
John Zulauf4aff5d92020-02-21 08:29:35 -07004395 if (first_use != VK_SUBPASS_EXTERNAL) {
4396 auto &subpass_dep = render_pass->subpass_dependencies[first_use];
John Zulaufbaea94f2020-09-15 17:55:16 -06004397 if (subpass_dep.barrier_from_external.size() == 0) {
4398 // Add implicit from barrier if they're aren't any
John Zulauf4aff5d92020-02-21 08:29:35 -07004399 subpass_dep.implicit_barrier_from_external.reset(
4400 new VkSubpassDependency2(ImplicitDependencyFromExternal(first_use)));
John Zulaufbaea94f2020-09-15 17:55:16 -06004401 subpass_dep.barrier_from_external.emplace_back(subpass_dep.implicit_barrier_from_external.get());
John Zulauf4aff5d92020-02-21 08:29:35 -07004402 }
4403 }
4404
John Zulauf8863c332020-03-20 10:34:33 -06004405 const auto last_use = attachment_tracker.last[attachment];
John Zulauf4aff5d92020-02-21 08:29:35 -07004406 if (last_use != VK_SUBPASS_EXTERNAL) {
4407 auto &subpass_dep = render_pass->subpass_dependencies[last_use];
John Zulaufbaea94f2020-09-15 17:55:16 -06004408 if (render_pass->subpass_dependencies[last_use].barrier_to_external.size() == 0) {
4409 // Add implicit to barrier if they're aren't any
John Zulauf4aff5d92020-02-21 08:29:35 -07004410 subpass_dep.implicit_barrier_to_external.reset(new VkSubpassDependency2(ImplicitDependencyToExternal(last_use)));
John Zulaufbaea94f2020-09-15 17:55:16 -06004411 subpass_dep.barrier_to_external.emplace_back(subpass_dep.implicit_barrier_to_external.get());
John Zulauf4aff5d92020-02-21 08:29:35 -07004412 }
locke-lunargd556cc32019-09-17 01:21:23 -06004413 }
4414 }
4415
4416 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
4417 renderPassMap[*pRenderPass] = std::move(render_pass);
4418}
4419
4420// Style note:
4421// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
4422// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
4423// construction or assignment.
4424void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
4425 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4426 VkResult result) {
4427 if (VK_SUCCESS != result) return;
4428 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
4429 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
4430}
4431
Tony-LunarG977448c2019-12-02 14:52:02 -07004432void ValidationStateTracker::RecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4433 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4434 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004435 if (VK_SUCCESS != result) return;
4436 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
4437 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
4438}
4439
Tony-LunarG977448c2019-12-02 14:52:02 -07004440void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4441 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4442 VkResult result) {
4443 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
4444}
4445
4446void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4447 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4448 VkResult result) {
4449 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
4450}
4451
locke-lunargd556cc32019-09-17 01:21:23 -06004452void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
4453 const VkRenderPassBeginInfo *pRenderPassBegin,
4454 const VkSubpassContents contents) {
4455 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunargaecf2152020-05-12 17:15:41 -06004456 auto render_pass_state = pRenderPassBegin ? GetShared<RENDER_PASS_STATE>(pRenderPassBegin->renderPass) : nullptr;
4457 auto framebuffer = pRenderPassBegin ? GetShared<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer) : nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004458
4459 if (render_pass_state) {
locke-lunargaecf2152020-05-12 17:15:41 -06004460 cb_state->activeFramebuffer = framebuffer;
locke-lunargd556cc32019-09-17 01:21:23 -06004461 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07004462 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06004463 cb_state->activeSubpass = 0;
4464 cb_state->activeSubpassContents = contents;
locke-lunargaecf2152020-05-12 17:15:41 -06004465 if (framebuffer) cb_state->framebuffers.insert(framebuffer);
locke-lunargd556cc32019-09-17 01:21:23 -06004466 // Connect this framebuffer and its children to this cmdBuffer
locke-lunargaecf2152020-05-12 17:15:41 -06004467 AddFramebufferBinding(cb_state, framebuffer.get());
locke-lunargd556cc32019-09-17 01:21:23 -06004468 // Connect this RP to cmdBuffer
locke-lunargaecf2152020-05-12 17:15:41 -06004469 AddCommandBufferBinding(
4470 render_pass_state->cb_bindings,
4471 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state.get()), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004472
4473 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
4474 if (chained_device_group_struct) {
4475 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
4476 } else {
4477 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
4478 }
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004479
4480 cb_state->imagelessFramebufferAttachments.clear();
4481 auto attachment_info_struct = lvl_find_in_chain<VkRenderPassAttachmentBeginInfo>(pRenderPassBegin->pNext);
4482 if (attachment_info_struct) {
4483 for (uint32_t i = 0; i < attachment_info_struct->attachmentCount; i++) {
4484 IMAGE_VIEW_STATE *img_view_state = GetImageViewState(attachment_info_struct->pAttachments[i]);
4485 cb_state->imagelessFramebufferAttachments.push_back(img_view_state);
4486 }
4487 }
locke-lunargd556cc32019-09-17 01:21:23 -06004488 }
4489}
4490
4491void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
4492 const VkRenderPassBeginInfo *pRenderPassBegin,
4493 VkSubpassContents contents) {
4494 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
4495}
4496
4497void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
4498 const VkRenderPassBeginInfo *pRenderPassBegin,
4499 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
4500 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4501}
4502
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06004503void ValidationStateTracker::PostCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4504 uint32_t counterBufferCount,
4505 const VkBuffer *pCounterBuffers,
4506 const VkDeviceSize *pCounterBufferOffsets) {
4507 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4508
4509 cb_state->transform_feedback_active = true;
4510}
4511
4512void ValidationStateTracker::PostCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4513 uint32_t counterBufferCount, const VkBuffer *pCounterBuffers,
4514 const VkDeviceSize *pCounterBufferOffsets) {
4515 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4516
4517 cb_state->transform_feedback_active = false;
4518}
4519
Tony-LunarG977448c2019-12-02 14:52:02 -07004520void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
4521 const VkRenderPassBeginInfo *pRenderPassBegin,
4522 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
4523 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4524}
4525
locke-lunargd556cc32019-09-17 01:21:23 -06004526void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4527 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4528 cb_state->activeSubpass++;
4529 cb_state->activeSubpassContents = contents;
4530}
4531
4532void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4533 RecordCmdNextSubpass(commandBuffer, contents);
4534}
4535
4536void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
4537 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
4538 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4539 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4540}
4541
Tony-LunarG977448c2019-12-02 14:52:02 -07004542void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
4543 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
4544 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4545 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4546}
4547
locke-lunargd556cc32019-09-17 01:21:23 -06004548void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
4549 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4550 cb_state->activeRenderPass = nullptr;
4551 cb_state->activeSubpass = 0;
4552 cb_state->activeFramebuffer = VK_NULL_HANDLE;
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004553 cb_state->imagelessFramebufferAttachments.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06004554}
4555
4556void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
4557 RecordCmdEndRenderPassState(commandBuffer);
4558}
4559
4560void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
4561 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4562 RecordCmdEndRenderPassState(commandBuffer);
4563}
4564
Tony-LunarG977448c2019-12-02 14:52:02 -07004565void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
4566 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4567 RecordCmdEndRenderPassState(commandBuffer);
4568}
locke-lunargd556cc32019-09-17 01:21:23 -06004569void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
4570 const VkCommandBuffer *pCommandBuffers) {
4571 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4572
4573 CMD_BUFFER_STATE *sub_cb_state = NULL;
4574 for (uint32_t i = 0; i < commandBuffersCount; i++) {
4575 sub_cb_state = GetCBState(pCommandBuffers[i]);
4576 assert(sub_cb_state);
4577 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
4578 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
4579 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
4580 // from the validation step to the recording step
4581 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
4582 }
4583 }
4584
4585 // Propagate inital layout and current layout state to the primary cmd buffer
4586 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
4587 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
4588 // for those other classes.
4589 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
4590 const auto image = sub_layout_map_entry.first;
4591 const auto *image_state = GetImageState(image);
4592 if (!image_state) continue; // Can't set layouts of a dead image
4593
4594 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
4595 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
4596 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
4597 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
4598 }
4599
4600 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
4601 cb_state->linkedCommandBuffers.insert(sub_cb_state);
4602 sub_cb_state->linkedCommandBuffers.insert(cb_state);
4603 for (auto &function : sub_cb_state->queryUpdates) {
4604 cb_state->queryUpdates.push_back(function);
4605 }
4606 for (auto &function : sub_cb_state->queue_submit_functions) {
4607 cb_state->queue_submit_functions.push_back(function);
4608 }
4609 }
4610}
4611
4612void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
4613 VkFlags flags, void **ppData, VkResult result) {
4614 if (VK_SUCCESS != result) return;
4615 RecordMappedMemory(mem, offset, size, ppData);
4616}
4617
4618void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
4619 auto mem_info = GetDevMemState(mem);
4620 if (mem_info) {
4621 mem_info->mapped_range = MemRange();
4622 mem_info->p_driver_data = nullptr;
4623 }
4624}
4625
4626void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
4627 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
4628 if (image_state) {
locke-lunargae26eac2020-04-16 15:29:05 -06004629 // An Android sepcial image cannot get VkSubresourceLayout until the image binds a memory.
4630 // See: VUID-vkGetImageSubresourceLayout-image-01895
4631 image_state->fragment_encoder =
4632 std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(new subresource_adapter::ImageRangeEncoder(*image_state));
locke-lunargd556cc32019-09-17 01:21:23 -06004633 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
4634 if (swapchain_info) {
4635 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
4636 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -06004637 swapchain->images[swapchain_info->imageIndex].bound_images.emplace(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -06004638 image_state->bind_swapchain = swapchain_info->swapchain;
4639 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
4640 }
4641 } else {
4642 // Track bound memory range information
4643 auto mem_info = GetDevMemState(bindInfo.memory);
4644 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07004645 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06004646 }
4647
4648 // Track objects tied to memory
4649 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
4650 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
4651 }
Tony-LunarG330cf4c2020-03-04 16:29:03 -07004652 if ((image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) || swapchain_info) {
locke-lunargd556cc32019-09-17 01:21:23 -06004653 AddAliasingImage(image_state);
4654 }
4655 }
4656}
4657
4658void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
4659 VkDeviceSize memoryOffset, VkResult result) {
4660 if (VK_SUCCESS != result) return;
4661 VkBindImageMemoryInfo bindInfo = {};
4662 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
4663 bindInfo.image = image;
4664 bindInfo.memory = mem;
4665 bindInfo.memoryOffset = memoryOffset;
4666 UpdateBindImageMemoryState(bindInfo);
4667}
4668
4669void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
4670 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
4671 if (VK_SUCCESS != result) return;
4672 for (uint32_t i = 0; i < bindInfoCount; i++) {
4673 UpdateBindImageMemoryState(pBindInfos[i]);
4674 }
4675}
4676
4677void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
4678 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
4679 if (VK_SUCCESS != result) return;
4680 for (uint32_t i = 0; i < bindInfoCount; i++) {
4681 UpdateBindImageMemoryState(pBindInfos[i]);
4682 }
4683}
4684
4685void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
4686 auto event_state = GetEventState(event);
4687 if (event_state) {
4688 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
4689 }
locke-lunargd556cc32019-09-17 01:21:23 -06004690}
4691
4692void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
4693 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
4694 VkResult result) {
4695 if (VK_SUCCESS != result) return;
4696 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
4697 pImportSemaphoreFdInfo->flags);
4698}
4699
4700void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
4701 VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
4702 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
4703 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4704 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
4705 semaphore_state->scope = kSyncScopeExternalPermanent;
4706 }
4707}
4708
4709#ifdef VK_USE_PLATFORM_WIN32_KHR
4710void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
4711 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
4712 if (VK_SUCCESS != result) return;
4713 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
4714 pImportSemaphoreWin32HandleInfo->flags);
4715}
4716
4717void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
4718 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4719 HANDLE *pHandle, VkResult result) {
4720 if (VK_SUCCESS != result) return;
4721 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
4722}
4723
4724void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
4725 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
4726 if (VK_SUCCESS != result) return;
4727 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
4728 pImportFenceWin32HandleInfo->flags);
4729}
4730
4731void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
4732 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4733 HANDLE *pHandle, VkResult result) {
4734 if (VK_SUCCESS != result) return;
4735 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
4736}
4737#endif
4738
4739void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
4740 VkResult result) {
4741 if (VK_SUCCESS != result) return;
4742 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
4743}
4744
4745void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
4746 VkFenceImportFlagsKHR flags) {
4747 FENCE_STATE *fence_node = GetFenceState(fence);
4748 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
4749 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
4750 fence_node->scope == kSyncScopeInternal) {
4751 fence_node->scope = kSyncScopeExternalTemporary;
4752 } else {
4753 fence_node->scope = kSyncScopeExternalPermanent;
4754 }
4755 }
4756}
4757
4758void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
4759 VkResult result) {
4760 if (VK_SUCCESS != result) return;
4761 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
4762}
4763
4764void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
4765 FENCE_STATE *fence_state = GetFenceState(fence);
4766 if (fence_state) {
4767 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4768 // Export with reference transference becomes external
4769 fence_state->scope = kSyncScopeExternalPermanent;
4770 } else if (fence_state->scope == kSyncScopeInternal) {
4771 // Export with copy transference has a side effect of resetting the fence
4772 fence_state->state = FENCE_UNSIGNALED;
4773 }
4774 }
4775}
4776
4777void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
4778 VkResult result) {
4779 if (VK_SUCCESS != result) return;
4780 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
4781}
4782
4783void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
4784 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
4785 if (VK_SUCCESS != result) return;
4786 eventMap[*pEvent].write_in_use = 0;
4787 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
4788}
4789
4790void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
4791 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
4792 SWAPCHAIN_NODE *old_swapchain_state) {
4793 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004794 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004795 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
4796 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
4797 swapchain_state->shared_presentable = true;
4798 }
4799 surface_state->swapchain = swapchain_state.get();
4800 swapchainMap[*pSwapchain] = std::move(swapchain_state);
4801 } else {
4802 surface_state->swapchain = nullptr;
4803 }
4804 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
4805 if (old_swapchain_state) {
4806 old_swapchain_state->retired = true;
4807 }
4808 return;
4809}
4810
4811void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
4812 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
4813 VkResult result) {
4814 auto surface_state = GetSurfaceState(pCreateInfo->surface);
4815 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
4816 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
4817}
4818
4819void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
4820 const VkAllocationCallbacks *pAllocator) {
4821 if (!swapchain) return;
4822 auto swapchain_data = GetSwapchainState(swapchain);
4823 if (swapchain_data) {
4824 for (const auto &swapchain_image : swapchain_data->images) {
locke-lunargb3584732019-10-28 20:18:36 -06004825 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image, kVulkanObjectTypeImage));
4826 imageMap.erase(swapchain_image.image);
4827 RemoveAliasingImages(swapchain_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06004828 }
4829
4830 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
4831 if (surface_state) {
4832 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
4833 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004834 swapchain_data->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004835 swapchainMap.erase(swapchain);
4836 }
4837}
4838
4839void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
4840 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
4841 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
4842 auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
4843 if (pSemaphore) {
4844 pSemaphore->signaler.first = VK_NULL_HANDLE;
4845 pSemaphore->signaled = false;
4846 }
4847 }
4848
4849 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
4850 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
4851 // confused itself just as much.
4852 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
4853 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
4854 // Mark the image as having been released to the WSI
4855 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
4856 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
locke-lunargb3584732019-10-28 20:18:36 -06004857 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004858 auto image_state = GetImageState(image);
4859 if (image_state) {
4860 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004861 if (image_state->shared_presentable) {
4862 image_state->layout_locked = true;
4863 }
locke-lunargd556cc32019-09-17 01:21:23 -06004864 }
4865 }
4866 }
4867 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
4868 // its semaphore waits) /never/ participate in any completion proof.
4869}
4870
4871void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
4872 const VkSwapchainCreateInfoKHR *pCreateInfos,
4873 const VkAllocationCallbacks *pAllocator,
4874 VkSwapchainKHR *pSwapchains, VkResult result) {
4875 if (pCreateInfos) {
4876 for (uint32_t i = 0; i < swapchainCount; i++) {
4877 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
4878 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
4879 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
4880 }
4881 }
4882}
4883
4884void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4885 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
4886 auto pFence = GetFenceState(fence);
4887 if (pFence && pFence->scope == kSyncScopeInternal) {
4888 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
4889 // import
4890 pFence->state = FENCE_INFLIGHT;
4891 pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
4892 }
4893
4894 auto pSemaphore = GetSemaphoreState(semaphore);
4895 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
4896 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
4897 // temporary import
4898 pSemaphore->signaled = true;
4899 pSemaphore->signaler.first = VK_NULL_HANDLE;
4900 }
4901
4902 // Mark the image as acquired.
4903 auto swapchain_data = GetSwapchainState(swapchain);
4904 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
locke-lunargb3584732019-10-28 20:18:36 -06004905 auto image = swapchain_data->images[*pImageIndex].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004906 auto image_state = GetImageState(image);
4907 if (image_state) {
4908 image_state->acquired = true;
4909 image_state->shared_presentable = swapchain_data->shared_presentable;
4910 }
4911 }
4912}
4913
4914void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4915 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
4916 VkResult result) {
4917 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4918 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
4919}
4920
4921void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
4922 uint32_t *pImageIndex, VkResult result) {
4923 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4924 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
4925 pAcquireInfo->fence, pImageIndex);
4926}
4927
4928void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
4929 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
4930 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
4931 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
4932 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
4933 phys_device_state.phys_device = pPhysicalDevices[i];
4934 // Init actual features for each physical device
4935 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
4936 }
4937 }
4938}
4939
4940// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
4941static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
4942 VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4943 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
4944
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004945 if (pQueueFamilyProperties) { // Save queue family properties
locke-lunargd556cc32019-09-17 01:21:23 -06004946 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
4947 for (uint32_t i = 0; i < count; ++i) {
4948 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
4949 }
4950 }
4951}
4952
4953void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
4954 uint32_t *pQueueFamilyPropertyCount,
4955 VkQueueFamilyProperties *pQueueFamilyProperties) {
4956 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4957 assert(physical_device_state);
4958 VkQueueFamilyProperties2KHR *pqfp = nullptr;
4959 std::vector<VkQueueFamilyProperties2KHR> qfp;
4960 qfp.resize(*pQueueFamilyPropertyCount);
4961 if (pQueueFamilyProperties) {
4962 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
4963 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
4964 qfp[i].pNext = nullptr;
4965 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
4966 }
4967 pqfp = qfp.data();
4968 }
4969 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
4970}
4971
4972void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
4973 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4974 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4975 assert(physical_device_state);
4976 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4977 pQueueFamilyProperties);
4978}
4979
4980void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
4981 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4982 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4983 assert(physical_device_state);
4984 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4985 pQueueFamilyProperties);
4986}
4987void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
4988 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004989 if (!surface) return;
4990 auto surface_state = GetSurfaceState(surface);
4991 surface_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004992 surface_map.erase(surface);
4993}
4994
4995void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004996 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06004997}
4998
4999void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
5000 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
5001 const VkAllocationCallbacks *pAllocator,
5002 VkSurfaceKHR *pSurface, VkResult result) {
5003 if (VK_SUCCESS != result) return;
5004 RecordVulkanSurface(pSurface);
5005}
5006
5007#ifdef VK_USE_PLATFORM_ANDROID_KHR
5008void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
5009 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
5010 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5011 VkResult result) {
5012 if (VK_SUCCESS != result) return;
5013 RecordVulkanSurface(pSurface);
5014}
5015#endif // VK_USE_PLATFORM_ANDROID_KHR
5016
5017#ifdef VK_USE_PLATFORM_IOS_MVK
5018void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
5019 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5020 VkResult result) {
5021 if (VK_SUCCESS != result) return;
5022 RecordVulkanSurface(pSurface);
5023}
5024#endif // VK_USE_PLATFORM_IOS_MVK
5025
5026#ifdef VK_USE_PLATFORM_MACOS_MVK
5027void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
5028 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
5029 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5030 VkResult result) {
5031 if (VK_SUCCESS != result) return;
5032 RecordVulkanSurface(pSurface);
5033}
5034#endif // VK_USE_PLATFORM_MACOS_MVK
5035
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07005036#ifdef VK_USE_PLATFORM_METAL_EXT
5037void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
5038 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
5039 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5040 VkResult result) {
5041 if (VK_SUCCESS != result) return;
5042 RecordVulkanSurface(pSurface);
5043}
5044#endif // VK_USE_PLATFORM_METAL_EXT
5045
locke-lunargd556cc32019-09-17 01:21:23 -06005046#ifdef VK_USE_PLATFORM_WAYLAND_KHR
5047void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
5048 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
5049 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5050 VkResult result) {
5051 if (VK_SUCCESS != result) return;
5052 RecordVulkanSurface(pSurface);
5053}
5054#endif // VK_USE_PLATFORM_WAYLAND_KHR
5055
5056#ifdef VK_USE_PLATFORM_WIN32_KHR
5057void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
5058 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
5059 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5060 VkResult result) {
5061 if (VK_SUCCESS != result) return;
5062 RecordVulkanSurface(pSurface);
5063}
5064#endif // VK_USE_PLATFORM_WIN32_KHR
5065
5066#ifdef VK_USE_PLATFORM_XCB_KHR
5067void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
5068 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5069 VkResult result) {
5070 if (VK_SUCCESS != result) return;
5071 RecordVulkanSurface(pSurface);
5072}
5073#endif // VK_USE_PLATFORM_XCB_KHR
5074
5075#ifdef VK_USE_PLATFORM_XLIB_KHR
5076void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
5077 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5078 VkResult result) {
5079 if (VK_SUCCESS != result) return;
5080 RecordVulkanSurface(pSurface);
5081}
5082#endif // VK_USE_PLATFORM_XLIB_KHR
5083
Niklas Haas8b84af12020-04-19 22:20:11 +02005084void ValidationStateTracker::PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance,
5085 const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
5086 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5087 VkResult result) {
5088 if (VK_SUCCESS != result) return;
5089 RecordVulkanSurface(pSurface);
5090}
5091
Cort23cf2282019-09-20 18:58:18 +02005092void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005093 VkPhysicalDeviceFeatures *pFeatures) {
5094 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Yilong Li358152a2020-07-08 02:16:45 -07005095 // Reset the features2 safe struct before setting up the features field.
5096 physical_device_state->features2 = safe_VkPhysicalDeviceFeatures2();
Cortffba2642019-09-20 22:09:41 +02005097 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02005098}
5099
5100void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005101 VkPhysicalDeviceFeatures2 *pFeatures) {
5102 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02005103 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02005104}
5105
5106void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005107 VkPhysicalDeviceFeatures2 *pFeatures) {
5108 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02005109 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02005110}
5111
locke-lunargd556cc32019-09-17 01:21:23 -06005112void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
5113 VkSurfaceKHR surface,
5114 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
5115 VkResult result) {
5116 if (VK_SUCCESS != result) return;
5117 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005118 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005119
5120 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
5121 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005122}
5123
5124void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
5125 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
5126 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
5127 if (VK_SUCCESS != result) return;
5128 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005129 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005130
5131 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
5132 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005133}
5134
5135void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
5136 VkSurfaceKHR surface,
5137 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
5138 VkResult result) {
5139 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005140 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
5141 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
5142 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
5143 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
5144 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
5145 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
5146 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
5147 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
5148 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
5149 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005150
5151 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
5152 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005153}
5154
5155void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
5156 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
5157 VkBool32 *pSupported, VkResult result) {
5158 if (VK_SUCCESS != result) return;
5159 auto surface_state = GetSurfaceState(surface);
5160 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
5161}
5162
5163void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
5164 VkSurfaceKHR surface,
5165 uint32_t *pPresentModeCount,
5166 VkPresentModeKHR *pPresentModes,
5167 VkResult result) {
5168 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5169
5170 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
5171 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005172
5173 if (*pPresentModeCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005174 if (*pPresentModeCount > physical_device_state->present_modes.size())
5175 physical_device_state->present_modes.resize(*pPresentModeCount);
5176 }
5177 if (pPresentModes) {
locke-lunargd556cc32019-09-17 01:21:23 -06005178 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
5179 physical_device_state->present_modes[i] = pPresentModes[i];
5180 }
5181 }
5182}
5183
5184void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
5185 uint32_t *pSurfaceFormatCount,
5186 VkSurfaceFormatKHR *pSurfaceFormats,
5187 VkResult result) {
5188 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5189
5190 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005191
5192 if (*pSurfaceFormatCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005193 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
5194 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
5195 }
5196 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06005197 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
5198 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
5199 }
5200 }
5201}
5202
5203void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
5204 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
5205 uint32_t *pSurfaceFormatCount,
5206 VkSurfaceFormat2KHR *pSurfaceFormats,
5207 VkResult result) {
5208 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5209
5210 auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
5211 if (*pSurfaceFormatCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005212 if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
5213 physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
5214 }
5215 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06005216 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
5217 physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
5218 }
5219 }
5220}
5221
5222void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
5223 const VkDebugUtilsLabelEXT *pLabelInfo) {
5224 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
5225}
5226
5227void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
5228 EndCmdDebugUtilsLabel(report_data, commandBuffer);
5229}
5230
5231void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
5232 const VkDebugUtilsLabelEXT *pLabelInfo) {
5233 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
5234
5235 // Squirrel away an easily accessible copy.
5236 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5237 cb_state->debug_label = LoggingLabel(pLabelInfo);
5238}
5239
5240void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
5241 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
5242 if (NULL != pPhysicalDeviceGroupProperties) {
5243 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
5244 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
5245 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
5246 auto &phys_device_state = physical_device_map[cur_phys_dev];
5247 phys_device_state.phys_device = cur_phys_dev;
5248 // Init actual features for each physical device
5249 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
5250 }
5251 }
5252 }
5253}
5254
5255void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
5256 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
5257 VkResult result) {
5258 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5259 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
5260}
5261
5262void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
5263 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
5264 VkResult result) {
5265 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5266 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
5267}
5268
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005269void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
5270 uint32_t queueFamilyIndex,
5271 uint32_t *pCounterCount,
5272 VkPerformanceCounterKHR *pCounters) {
5273 if (NULL == pCounters) return;
5274
5275 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5276 assert(physical_device_state);
5277
5278 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queueFamilyCounters(new QUEUE_FAMILY_PERF_COUNTERS());
5279 queueFamilyCounters->counters.resize(*pCounterCount);
5280 for (uint32_t i = 0; i < *pCounterCount; i++) queueFamilyCounters->counters[i] = pCounters[i];
5281
5282 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queueFamilyCounters);
5283}
5284
5285void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
5286 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
5287 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
5288 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5289 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
5290}
5291
5292void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
5293 VkResult result) {
5294 if (result == VK_SUCCESS) performance_lock_acquired = true;
5295}
5296
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005297void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
5298 performance_lock_acquired = false;
5299 for (auto &cmd_buffer : commandBufferMap) {
5300 cmd_buffer.second->performance_lock_released = true;
5301 }
5302}
5303
locke-lunargd556cc32019-09-17 01:21:23 -06005304void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
5305 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5306 const VkAllocationCallbacks *pAllocator) {
5307 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005308 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5309 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005310 desc_template_map.erase(descriptorUpdateTemplate);
5311}
5312
5313void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
5314 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5315 const VkAllocationCallbacks *pAllocator) {
5316 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005317 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5318 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005319 desc_template_map.erase(descriptorUpdateTemplate);
5320}
5321
5322void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
5323 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
5324 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005325 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06005326 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
5327}
5328
5329void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
5330 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
5331 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
5332 if (VK_SUCCESS != result) return;
5333 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
5334}
5335
5336void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
5337 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
5338 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
5339 if (VK_SUCCESS != result) return;
5340 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
5341}
5342
5343void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
5344 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5345 const void *pData) {
5346 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
5347 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
5348 assert(0);
5349 } else {
5350 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
5351 // TODO: Record template push descriptor updates
5352 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
5353 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
5354 }
5355 }
5356}
5357
5358void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
5359 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
5360 const void *pData) {
5361 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
5362}
5363
5364void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
5365 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5366 const void *pData) {
5367 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
5368}
5369
5370void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
5371 VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
5372 const void *pData) {
5373 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5374
5375 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5376 if (template_state) {
5377 auto layout_data = GetPipelineLayout(layout);
5378 auto dsl = GetDslFromPipelineLayout(layout_data, set);
5379 const auto &template_ci = template_state->create_info;
Jeff Bolz6ae39612019-10-11 20:57:36 -05005380 if (dsl && !dsl->destroyed) {
locke-lunargd556cc32019-09-17 01:21:23 -06005381 // Decode the template into a set of write updates
5382 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
5383 dsl->GetDescriptorSetLayout());
5384 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
5385 static_cast<uint32_t>(decoded_template.desc_writes.size()),
5386 decoded_template.desc_writes.data());
5387 }
5388 }
5389}
5390
5391void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
5392 uint32_t *pPropertyCount, void *pProperties) {
5393 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5394 if (*pPropertyCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005395 physical_device_state->display_plane_property_count = *pPropertyCount;
5396 }
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005397 if (*pPropertyCount || pProperties) {
5398 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005399 }
5400}
5401
5402void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
5403 uint32_t *pPropertyCount,
5404 VkDisplayPlanePropertiesKHR *pProperties,
5405 VkResult result) {
5406 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5407 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5408}
5409
5410void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
5411 uint32_t *pPropertyCount,
5412 VkDisplayPlaneProperties2KHR *pProperties,
5413 VkResult result) {
5414 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5415 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5416}
5417
5418void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5419 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
5420 QueryObject query_obj = {queryPool, query, index};
5421 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5422 RecordCmdBeginQuery(cb_state, query_obj);
5423}
5424
5425void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5426 uint32_t query, uint32_t index) {
5427 QueryObject query_obj = {queryPool, query, index};
5428 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5429 RecordCmdEndQuery(cb_state, query_obj);
5430}
5431
5432void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
5433 VkSamplerYcbcrConversion ycbcr_conversion) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005434 auto ycbcr_state = std::make_shared<SAMPLER_YCBCR_CONVERSION_STATE>();
5435
locke-lunargd556cc32019-09-17 01:21:23 -06005436 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005437 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion, ycbcr_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06005438 }
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005439
5440 const VkFormat conversion_format = create_info->format;
5441
5442 if (conversion_format != VK_FORMAT_UNDEFINED) {
5443 // If format is VK_FORMAT_UNDEFINED, will be set by external AHB features
5444 ycbcr_state->format_features = GetPotentialFormatFeatures(conversion_format);
5445 }
5446
5447 ycbcr_state->chromaFilter = create_info->chromaFilter;
5448 ycbcr_state->format = conversion_format;
5449 samplerYcbcrConversionMap[ycbcr_conversion] = std::move(ycbcr_state);
locke-lunargd556cc32019-09-17 01:21:23 -06005450}
5451
5452void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
5453 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5454 const VkAllocationCallbacks *pAllocator,
5455 VkSamplerYcbcrConversion *pYcbcrConversion,
5456 VkResult result) {
5457 if (VK_SUCCESS != result) return;
5458 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5459}
5460
5461void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
5462 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5463 const VkAllocationCallbacks *pAllocator,
5464 VkSamplerYcbcrConversion *pYcbcrConversion,
5465 VkResult result) {
5466 if (VK_SUCCESS != result) return;
5467 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5468}
5469
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005470void ValidationStateTracker::RecordDestroySamplerYcbcrConversionState(VkSamplerYcbcrConversion ycbcr_conversion) {
5471 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
5472 RecordDestroySamplerYcbcrConversionANDROID(ycbcr_conversion);
5473 }
5474
5475 auto ycbcr_state = GetSamplerYcbcrConversionState(ycbcr_conversion);
5476 ycbcr_state->destroyed = true;
5477 samplerYcbcrConversionMap.erase(ycbcr_conversion);
5478}
5479
locke-lunargd556cc32019-09-17 01:21:23 -06005480void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
5481 const VkAllocationCallbacks *pAllocator) {
5482 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005483 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005484}
5485
5486void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
5487 VkSamplerYcbcrConversion ycbcrConversion,
5488 const VkAllocationCallbacks *pAllocator) {
5489 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005490 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005491}
5492
Tony-LunarG977448c2019-12-02 14:52:02 -07005493void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5494 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005495 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07005496 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06005497
5498 // Do nothing if the query pool has been destroyed.
5499 auto query_pool_state = GetQueryPoolState(queryPool);
5500 if (!query_pool_state) return;
5501
5502 // Reset the state of existing entries.
5503 QueryObject query_obj{queryPool, 0};
5504 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
5505 for (uint32_t i = 0; i < max_query_count; ++i) {
5506 query_obj.query = firstQuery + i;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005507 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005508 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
5509 for (uint32_t passIndex = 0; passIndex < query_pool_state->n_performance_passes; passIndex++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005510 query_obj.perf_pass = passIndex;
5511 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005512 }
5513 }
locke-lunargd556cc32019-09-17 01:21:23 -06005514 }
5515}
5516
Tony-LunarG977448c2019-12-02 14:52:02 -07005517void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5518 uint32_t queryCount) {
5519 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5520}
5521
5522void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5523 uint32_t queryCount) {
5524 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5525}
5526
locke-lunargd556cc32019-09-17 01:21:23 -06005527void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
5528 const TEMPLATE_STATE *template_state, const void *pData) {
5529 // Translate the templated update into a normal update for validation...
5530 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
5531 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
5532 decoded_update.desc_writes.data(), 0, NULL);
5533}
5534
5535// Update the common AllocateDescriptorSetsData
5536void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005537 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005538 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05005539 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06005540 if (layout) {
5541 ds_data->layout_nodes[i] = layout;
5542 // Count total descriptors required per type
5543 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
5544 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
5545 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
5546 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
5547 }
5548 }
5549 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
5550 }
5551}
5552
5553// Decrement allocated sets from the pool and insert new sets into set_map
5554void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
5555 const VkDescriptorSet *descriptor_sets,
5556 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
5557 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
5558 // Account for sets and individual descriptors allocated from pool
5559 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
5560 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
5561 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
5562 }
5563
5564 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
5565 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
5566
5567 // Create tracking object for each descriptor set; insert into global map and the pool's set.
5568 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
5569 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
5570
Jeff Bolz41a1ced2019-10-11 11:40:49 -05005571 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07005572 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06005573 pool_state->sets.insert(new_ds.get());
5574 new_ds->in_use.store(0);
5575 setMap[descriptor_sets[i]] = std::move(new_ds);
5576 }
5577}
5578
5579// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
Jeremy Kniager05631e72020-06-08 14:21:35 -06005580void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type,
locke-lunarg540b2252020-08-03 13:23:36 -06005581 VkPipelineBindPoint bind_point, const char *function) {
5582 UpdateDrawState(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005583 cb_state->hasDispatchCmd = true;
5584}
5585
locke-lunargd556cc32019-09-17 01:21:23 -06005586// Generic function to handle state update for all CmdDraw* type functions
locke-lunarg540b2252020-08-03 13:23:36 -06005587void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, VkPipelineBindPoint bind_point,
5588 const char *function) {
5589 UpdateStateCmdDrawDispatchType(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005590 cb_state->hasDrawCmd = true;
5591}
5592
5593void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
5594 uint32_t firstVertex, uint32_t firstInstance) {
5595 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005596 UpdateStateCmdDrawType(cb_state, CMD_DRAW, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDraw()");
locke-lunargd556cc32019-09-17 01:21:23 -06005597}
5598
5599void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
5600 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
5601 uint32_t firstInstance) {
5602 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005603 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXED, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexed()");
locke-lunargd556cc32019-09-17 01:21:23 -06005604}
5605
5606void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5607 uint32_t count, uint32_t stride) {
5608 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5609 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005610 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06005611 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5612}
5613
5614void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5615 VkDeviceSize offset, uint32_t count, uint32_t stride) {
5616 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5617 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005618 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexedIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06005619 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5620}
5621
5622void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
5623 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005624 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCH, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatch()");
locke-lunargd556cc32019-09-17 01:21:23 -06005625}
5626
5627void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5628 VkDeviceSize offset) {
5629 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005630 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCHINDIRECT, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatchIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06005631 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5632 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5633}
5634
Tony-LunarG977448c2019-12-02 14:52:02 -07005635void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5636 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
locke-lunarg540b2252020-08-03 13:23:36 -06005637 uint32_t stride, const char *function) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005638 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5639 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5640 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005641 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
Tony-LunarG977448c2019-12-02 14:52:02 -07005642 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5643 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5644}
5645
locke-lunargd556cc32019-09-17 01:21:23 -06005646void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5647 VkDeviceSize offset, VkBuffer countBuffer,
5648 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5649 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005650 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5651 "vkCmdDrawIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005652}
5653
5654void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5655 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5656 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005657 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5658 "vkCmdDrawIndirectCount()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005659}
5660
5661void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5662 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
locke-lunarg540b2252020-08-03 13:23:36 -06005663 uint32_t maxDrawCount, uint32_t stride, const char *function) {
locke-lunargd556cc32019-09-17 01:21:23 -06005664 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5665 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5666 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005667 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005668 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5669 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5670}
5671
5672void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5673 VkDeviceSize offset, VkBuffer countBuffer,
5674 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5675 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005676 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5677 "vkCmdDrawIndexedIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005678}
5679
5680void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
5681 VkDeviceSize offset, VkBuffer countBuffer,
5682 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5683 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005684 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5685 "vkCmdDrawIndexedIndirectCount()");
locke-lunargd556cc32019-09-17 01:21:23 -06005686}
5687
5688void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
5689 uint32_t firstTask) {
5690 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005691 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSNV, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawMeshTasksNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005692}
5693
5694void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5695 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
5696 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005697 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
5698 "vkCmdDrawMeshTasksIndirectNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005699 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5700 if (buffer_state) {
5701 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5702 }
5703}
5704
5705void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5706 VkDeviceSize offset, VkBuffer countBuffer,
5707 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5708 uint32_t stride) {
5709 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5710 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5711 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005712 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTCOUNTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
5713 "vkCmdDrawMeshTasksIndirectCountNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005714 if (buffer_state) {
5715 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5716 }
5717 if (count_buffer_state) {
5718 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5719 }
5720}
5721
5722void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
5723 const VkAllocationCallbacks *pAllocator,
5724 VkShaderModule *pShaderModule, VkResult result,
5725 void *csm_state_data) {
5726 if (VK_SUCCESS != result) return;
5727 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
5728
Tony-LunarG8a51b7d2020-07-01 15:57:23 -06005729 spv_target_env spirv_environment = PickSpirvEnv(api_version, (device_extensions.vk_khr_spirv_1_4 != kNotEnabled));
locke-lunargd556cc32019-09-17 01:21:23 -06005730 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005731 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
5732 csm_state->unique_shader_id)
5733 : std::make_shared<SHADER_MODULE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06005734 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
5735}
5736
5737void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005738 PIPELINE_STATE::StageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005739 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
5740 auto module = GetShaderModuleState(pStage->module);
5741 if (!module->has_valid_spirv) return;
5742
5743 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
5744 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
5745 if (entrypoint == module->end()) return;
5746
locke-lunarg654e3692020-06-04 17:19:15 -06005747 stage_state->stage_flag = pStage->stage;
5748
locke-lunargd556cc32019-09-17 01:21:23 -06005749 // Mark accessible ids
5750 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
5751 ProcessExecutionModes(module, entrypoint, pipeline);
5752
locke-lunarg63e4daf2020-08-17 17:53:25 -06005753 stage_state->descriptor_uses = CollectInterfaceByDescriptorSlot(
5754 module, stage_state->accessible_ids, &stage_state->has_writable_descriptor, &stage_state->has_atomic_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06005755 // Capture descriptor uses for the pipeline
locke-lunarg36045992020-08-20 16:54:37 -06005756 for (const auto &use : stage_state->descriptor_uses) {
locke-lunargd556cc32019-09-17 01:21:23 -06005757 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06005758 const uint32_t slot = use.first.first;
locke-lunarg36045992020-08-20 16:54:37 -06005759 auto &reqs = pipeline->active_slots[slot][use.first.second].reqs;
locke-lunargd556cc32019-09-17 01:21:23 -06005760 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
locke-lunarg25b6c352020-08-06 17:44:18 -06005761 if (use.second.is_atomic_operation) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_VIEW_ATOMIC_OPERATION);
locke-lunarg12d20992020-09-21 12:46:49 -06005762 if (use.second.is_sampler_implicitLod_dref_proj) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_IMPLICITLOD_DREF_PROJ);
locke-lunargae2a43c2020-09-22 17:21:57 -06005763 if (use.second.is_sampler_bias_offset) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_BIAS_OFFSET);
locke-lunarg12d20992020-09-21 12:46:49 -06005764
John Zulauf649edd52019-10-02 14:39:41 -06005765 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunarg36045992020-08-20 16:54:37 -06005766 if (use.second.samplers_used_by_image.size()) {
5767 pipeline->active_slots[slot][use.first.second].samplers_used_by_image[stage_state->stage_flag] =
5768 &use.second.samplers_used_by_image;
5769 }
locke-lunargd556cc32019-09-17 01:21:23 -06005770 }
locke-lunarg78486832020-09-09 19:39:42 -06005771
locke-lunarg96dc9632020-06-10 17:22:18 -06005772 if (pStage->stage == VK_SHADER_STAGE_FRAGMENT_BIT) {
5773 pipeline->fragmentShader_writable_output_location_list = CollectWritableOutputLocationinFS(*module, *pStage);
5774 }
locke-lunargd556cc32019-09-17 01:21:23 -06005775}
5776
5777void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
5778 if (cb_state == nullptr) {
5779 return;
5780 }
5781
5782 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
5783 if (pipeline_layout_state == nullptr) {
5784 return;
5785 }
5786
5787 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
5788 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
5789 cb_state->push_constant_data.clear();
5790 uint32_t size_needed = 0;
5791 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
5792 size_needed = std::max(size_needed, (push_constant_range.offset + push_constant_range.size));
5793 }
5794 cb_state->push_constant_data.resize(size_needed, 0);
5795 }
5796}
John Zulauf22b0fbe2019-10-15 06:26:16 -06005797
5798void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
5799 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
5800 VkResult result) {
5801 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
5802 auto swapchain_state = GetSwapchainState(swapchain);
5803
5804 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
5805
5806 if (pSwapchainImages) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005807 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
locke-lunargb3584732019-10-28 20:18:36 -06005808 if (swapchain_state->images[i].image != VK_NULL_HANDLE) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06005809
5810 // Add imageMap entries for each swapchain image
5811 VkImageCreateInfo image_ci;
5812 image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -06005813 image_ci.pNext = nullptr; // to be set later
5814 image_ci.flags = 0; // to be updated below
John Zulauf22b0fbe2019-10-15 06:26:16 -06005815 image_ci.imageType = VK_IMAGE_TYPE_2D;
5816 image_ci.format = swapchain_state->createInfo.imageFormat;
5817 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
5818 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
5819 image_ci.extent.depth = 1;
5820 image_ci.mipLevels = 1;
5821 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
5822 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
5823 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
5824 image_ci.usage = swapchain_state->createInfo.imageUsage;
5825 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
5826 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
5827 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
5828 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5829
5830 image_ci.pNext = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(swapchain_state->createInfo.pNext);
5831
5832 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR)
5833 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
5834 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)
5835 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
5836 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
5837 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR);
5838
locke-lunarg296a3c92020-03-25 01:04:29 -06005839 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(device, pSwapchainImages[i], &image_ci);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005840 auto &image_state = imageMap[pSwapchainImages[i]];
5841 image_state->valid = false;
5842 image_state->create_from_swapchain = swapchain;
5843 image_state->bind_swapchain = swapchain;
5844 image_state->bind_swapchain_imageIndex = i;
Tony-LunarGe64e4fe2020-02-17 16:21:55 -07005845 image_state->is_swapchain_image = true;
locke-lunargb3584732019-10-28 20:18:36 -06005846 swapchain_state->images[i].image = pSwapchainImages[i];
5847 swapchain_state->images[i].bound_images.emplace(pSwapchainImages[i]);
Petr Kraus44f1c482020-04-25 20:09:25 +02005848
5849 AddImageStateProps(*image_state, device, physical_device);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005850 }
5851 }
5852
5853 if (*pSwapchainImageCount) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005854 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
5855 }
5856}
sourav parmar35e7a002020-06-09 17:58:44 -07005857
5858void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureKHR(
5859 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
5860 const VkAccelerationStructureBuildOffsetInfoKHR *const *ppOffsetInfos) {
5861 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5862 if (cb_state == nullptr) {
5863 return;
5864 }
5865 for (uint32_t i = 0; i < infoCount; ++i) {
5866 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(pInfos[i].dstAccelerationStructure);
5867 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(pInfos[i].srcAccelerationStructure);
5868 if (dst_as_state != nullptr) {
5869 dst_as_state->built = true;
5870 dst_as_state->build_info_khr.initialize(pInfos);
5871 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
5872 }
5873 if (src_as_state != nullptr) {
5874 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
5875 }
5876 }
5877 cb_state->hasBuildAccelerationStructureCmd = true;
5878}
5879
5880void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer,
5881 const VkCopyAccelerationStructureInfoKHR *pInfo) {
5882 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5883 if (cb_state) {
5884 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(pInfo->src);
5885 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(pInfo->dst);
5886 if (dst_as_state != nullptr && src_as_state != nullptr) {
5887 dst_as_state->built = true;
5888 dst_as_state->build_info_khr = src_as_state->build_info_khr;
5889 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
5890 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
5891 }
5892 }
5893}
Piers Daniell39842ee2020-07-10 16:42:33 -06005894
5895void ValidationStateTracker::PreCallRecordCmdSetCullModeEXT(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) {
5896 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5897 cb_state->status |= CBSTATUS_CULL_MODE_SET;
5898 cb_state->static_status &= ~CBSTATUS_CULL_MODE_SET;
5899}
5900
5901void ValidationStateTracker::PreCallRecordCmdSetFrontFaceEXT(VkCommandBuffer commandBuffer, VkFrontFace frontFace) {
5902 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5903 cb_state->status |= CBSTATUS_FRONT_FACE_SET;
5904 cb_state->static_status &= ~CBSTATUS_FRONT_FACE_SET;
5905}
5906
5907void ValidationStateTracker::PreCallRecordCmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer,
5908 VkPrimitiveTopology primitiveTopology) {
5909 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5910 cb_state->primitiveTopology = primitiveTopology;
5911 cb_state->status |= CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5912 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5913}
5914
5915void ValidationStateTracker::PreCallRecordCmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer, uint32_t viewportCount,
5916 const VkViewport *pViewports) {
5917 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5918 cb_state->viewportWithCountMask |= (1u << viewportCount) - 1u;
5919 cb_state->status |= CBSTATUS_VIEWPORT_WITH_COUNT_SET;
5920 cb_state->static_status &= ~CBSTATUS_VIEWPORT_WITH_COUNT_SET;
5921}
5922
5923void ValidationStateTracker::PreCallRecordCmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer, uint32_t scissorCount,
5924 const VkRect2D *pScissors) {
5925 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5926 cb_state->scissorWithCountMask |= (1u << scissorCount) - 1u;
5927 cb_state->status |= CBSTATUS_SCISSOR_WITH_COUNT_SET;
5928 cb_state->static_status &= ~CBSTATUS_SCISSOR_WITH_COUNT_SET;
5929}
5930
5931void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer, uint32_t firstBinding,
5932 uint32_t bindingCount, const VkBuffer *pBuffers,
5933 const VkDeviceSize *pOffsets, const VkDeviceSize *pSizes,
5934 const VkDeviceSize *pStrides) {
5935 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5936 if (pStrides) {
5937 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5938 cb_state->static_status &= ~CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5939 }
5940
5941 uint32_t end = firstBinding + bindingCount;
5942 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
5943 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
5944 }
5945
5946 for (uint32_t i = 0; i < bindingCount; ++i) {
5947 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
5948 vertex_buffer_binding.buffer = pBuffers[i];
5949 vertex_buffer_binding.offset = pOffsets[i];
5950 vertex_buffer_binding.size = (pSizes) ? pSizes[i] : VK_WHOLE_SIZE;
5951 vertex_buffer_binding.stride = (pStrides) ? pStrides[i] : 0;
5952 // Add binding for this vertex buffer to this commandbuffer
5953 if (pBuffers[i]) {
5954 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
5955 }
5956 }
5957}
5958
5959void ValidationStateTracker::PreCallRecordCmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) {
5960 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5961 cb_state->status |= CBSTATUS_DEPTH_TEST_ENABLE_SET;
5962 cb_state->static_status &= ~CBSTATUS_DEPTH_TEST_ENABLE_SET;
5963}
5964
5965void ValidationStateTracker::PreCallRecordCmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) {
5966 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5967 cb_state->status |= CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5968 cb_state->static_status &= ~CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5969}
5970
5971void ValidationStateTracker::PreCallRecordCmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) {
5972 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5973 cb_state->status |= CBSTATUS_DEPTH_COMPARE_OP_SET;
5974 cb_state->static_status &= ~CBSTATUS_DEPTH_COMPARE_OP_SET;
5975}
5976
5977void ValidationStateTracker::PreCallRecordCmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer,
5978 VkBool32 depthBoundsTestEnable) {
5979 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5980 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5981 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5982}
5983void ValidationStateTracker::PreCallRecordCmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) {
5984 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5985 cb_state->status |= CBSTATUS_STENCIL_TEST_ENABLE_SET;
5986 cb_state->static_status &= ~CBSTATUS_STENCIL_TEST_ENABLE_SET;
5987}
5988
5989void ValidationStateTracker::PreCallRecordCmdSetStencilOpEXT(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
5990 VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp,
5991 VkCompareOp compareOp) {
5992 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5993 cb_state->status |= CBSTATUS_STENCIL_OP_SET;
5994 cb_state->static_status &= ~CBSTATUS_STENCIL_OP_SET;
5995}