blob: 94b2ddc31a67ef5147014319a426fb680915670c [file] [log] [blame]
Tony-LunarG73719992020-01-15 10:20:28 -07001/* Copyright (c) 2015-2020 The Khronos Group Inc.
2 * Copyright (c) 2015-2020 Valve Corporation
3 * Copyright (c) 2015-2020 LunarG, Inc.
4 * Copyright (C) 2015-2020 Google Inc.
locke-lunargd556cc32019-09-17 01:21:23 -06005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
locke-lunargd556cc32019-09-17 01:21:23 -060023#include <cmath>
24#include <set>
25#include <sstream>
26#include <string>
27
28#include "vk_enum_string_helper.h"
29#include "vk_format_utils.h"
30#include "vk_layer_data.h"
31#include "vk_layer_utils.h"
32#include "vk_layer_logging.h"
33#include "vk_typemap_helper.h"
34
35#include "chassis.h"
36#include "state_tracker.h"
37#include "shader_validation.h"
38
39using std::max;
40using std::string;
41using std::stringstream;
42using std::unique_ptr;
43using std::unordered_map;
44using std::unordered_set;
45using std::vector;
46
John Zulauf890b50b2020-06-17 15:18:19 -060047const char *CommandTypeString(CMD_TYPE type) {
48 // Autogenerated as part of the vk_validation_error_message.h codegen
49 static const std::array<const char *, CMD_RANGE_SIZE> command_name_list = {{VUID_CMD_NAME_LIST}};
50 return command_name_list[type];
51}
52
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -060053void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
54 if (add_obj) {
55 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
56 // Call base class
57 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
58 }
59}
60
John Zulauf5c5e88d2019-12-26 11:22:02 -070061uint32_t ResolveRemainingLevels(const VkImageSubresourceRange *range, uint32_t mip_levels) {
62 // Return correct number of mip levels taking into account VK_REMAINING_MIP_LEVELS
63 uint32_t mip_level_count = range->levelCount;
64 if (range->levelCount == VK_REMAINING_MIP_LEVELS) {
65 mip_level_count = mip_levels - range->baseMipLevel;
66 }
67 return mip_level_count;
68}
69
70uint32_t ResolveRemainingLayers(const VkImageSubresourceRange *range, uint32_t layers) {
71 // Return correct number of layers taking into account VK_REMAINING_ARRAY_LAYERS
72 uint32_t array_layer_count = range->layerCount;
73 if (range->layerCount == VK_REMAINING_ARRAY_LAYERS) {
74 array_layer_count = layers - range->baseArrayLayer;
75 }
76 return array_layer_count;
77}
78
79VkImageSubresourceRange NormalizeSubresourceRange(const VkImageCreateInfo &image_create_info,
80 const VkImageSubresourceRange &range) {
81 VkImageSubresourceRange norm = range;
82 norm.levelCount = ResolveRemainingLevels(&range, image_create_info.mipLevels);
83
84 // Special case for 3D images with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR flag bit, where <extent.depth> and
85 // <arrayLayers> can potentially alias.
86 uint32_t layer_limit = (0 != (image_create_info.flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR))
87 ? image_create_info.extent.depth
88 : image_create_info.arrayLayers;
89 norm.layerCount = ResolveRemainingLayers(&range, layer_limit);
90
91 // For multiplanar formats, IMAGE_ASPECT_COLOR is equivalent to adding the aspect of the individual planes
92 VkImageAspectFlags &aspect_mask = norm.aspectMask;
93 if (FormatIsMultiplane(image_create_info.format)) {
94 if (aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) {
95 aspect_mask &= ~VK_IMAGE_ASPECT_COLOR_BIT;
96 aspect_mask |= (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT);
97 if (FormatPlaneCount(image_create_info.format) > 2) {
98 aspect_mask |= VK_IMAGE_ASPECT_PLANE_2_BIT;
99 }
100 }
101 }
102 return norm;
103}
104
105VkImageSubresourceRange NormalizeSubresourceRange(const IMAGE_STATE &image_state, const VkImageSubresourceRange &range) {
106 const VkImageCreateInfo &image_create_info = image_state.createInfo;
107 return NormalizeSubresourceRange(image_create_info, range);
108}
109
John Zulauf2bc1fde2020-04-24 15:09:51 -0600110// NOTE: Beware the lifespan of the rp_begin when holding the return. If the rp_begin isn't a "safe" copy, "IMAGELESS"
111// attachments won't persist past the API entry point exit.
112std::pair<uint32_t, const VkImageView *> GetFramebufferAttachments(const VkRenderPassBeginInfo &rp_begin,
113 const FRAMEBUFFER_STATE &fb_state) {
114 const VkImageView *attachments = fb_state.createInfo.pAttachments;
115 uint32_t count = fb_state.createInfo.attachmentCount;
116 if (fb_state.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) {
117 const auto *framebuffer_attachments = lvl_find_in_chain<VkRenderPassAttachmentBeginInfo>(rp_begin.pNext);
118 if (framebuffer_attachments) {
119 attachments = framebuffer_attachments->pAttachments;
120 count = framebuffer_attachments->attachmentCount;
121 }
122 }
123 return std::make_pair(count, attachments);
124}
125
126std::vector<const IMAGE_VIEW_STATE *> ValidationStateTracker::GetAttachmentViews(const VkRenderPassBeginInfo &rp_begin,
127 const FRAMEBUFFER_STATE &fb_state) const {
128 std::vector<const IMAGE_VIEW_STATE *> views;
129
130 const auto count_attachment = GetFramebufferAttachments(rp_begin, fb_state);
131 const auto attachment_count = count_attachment.first;
132 const auto *attachments = count_attachment.second;
133 views.resize(attachment_count, nullptr);
134 for (uint32_t i = 0; i < attachment_count; i++) {
135 if (attachments[i] != VK_NULL_HANDLE) {
136 views[i] = Get<IMAGE_VIEW_STATE>(attachments[i]);
137 }
138 }
139 return views;
140}
141
142std::vector<const IMAGE_VIEW_STATE *> ValidationStateTracker::GetCurrentAttachmentViews(const CMD_BUFFER_STATE &cb_state) const {
143 // Only valid *after* RecordBeginRenderPass and *before* RecordEndRenderpass as it relies on cb_state for the renderpass info.
144 std::vector<const IMAGE_VIEW_STATE *> views;
145
locke-lunargaecf2152020-05-12 17:15:41 -0600146 const auto *rp_state = cb_state.activeRenderPass.get();
John Zulauf2bc1fde2020-04-24 15:09:51 -0600147 if (!rp_state) return views;
148 const auto &rp_begin = *cb_state.activeRenderPassBeginInfo.ptr();
149 const auto *fb_state = Get<FRAMEBUFFER_STATE>(rp_begin.framebuffer);
150 if (!fb_state) return views;
151
152 return GetAttachmentViews(rp_begin, *fb_state);
153}
154
locke-lunarg3e127c72020-06-09 17:45:28 -0600155PIPELINE_STATE *GetCurrentPipelineFromCommandBuffer(const CMD_BUFFER_STATE &cmd, VkPipelineBindPoint pipelineBindPoint) {
156 const auto last_bound_it = cmd.lastBound.find(pipelineBindPoint);
157 if (last_bound_it == cmd.lastBound.cend()) {
158 return nullptr;
159 }
160 return last_bound_it->second.pipeline_state;
161}
162
163void GetCurrentPipelineAndDesriptorSetsFromCommandBuffer(const CMD_BUFFER_STATE &cmd, VkPipelineBindPoint pipelineBindPoint,
164 const PIPELINE_STATE **rtn_pipe,
165 const std::vector<LAST_BOUND_STATE::PER_SET> **rtn_sets) {
166 const auto last_bound_it = cmd.lastBound.find(pipelineBindPoint);
167 if (last_bound_it == cmd.lastBound.cend()) {
168 return;
169 }
170 *rtn_pipe = last_bound_it->second.pipeline_state;
171 *rtn_sets = &(last_bound_it->second.per_set);
172}
173
locke-lunargd556cc32019-09-17 01:21:23 -0600174#ifdef VK_USE_PLATFORM_ANDROID_KHR
175// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
176// This could also move into a seperate core_validation_android.cpp file... ?
177
178void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
179 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
180 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
Spencer Fricke224c9852020-04-06 07:47:29 -0700181 is_node->external_ahb = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600182 }
183 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
184 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
185 is_node->has_ahb_format = true;
186 is_node->ahb_format = ext_fmt_android->externalFormat;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700187 // VUID 01894 will catch if not found in map
188 auto it = ahb_ext_formats_map.find(ext_fmt_android->externalFormat);
189 if (it != ahb_ext_formats_map.end()) {
190 is_node->format_features = it->second;
191 }
locke-lunargd556cc32019-09-17 01:21:23 -0600192 }
193}
194
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700195void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {
196 const VkExternalMemoryBufferCreateInfo *embci = lvl_find_in_chain<VkExternalMemoryBufferCreateInfo>(create_info->pNext);
197 if (embci && (embci->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
198 bs_node->external_ahb = true;
199 }
200}
201
locke-lunargd556cc32019-09-17 01:21:23 -0600202void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700203 VkSamplerYcbcrConversion ycbcr_conversion,
204 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state) {
locke-lunargd556cc32019-09-17 01:21:23 -0600205 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
206 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
207 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700208 // VUID 01894 will catch if not found in map
209 auto it = ahb_ext_formats_map.find(ext_format_android->externalFormat);
210 if (it != ahb_ext_formats_map.end()) {
211 ycbcr_state->format_features = it->second;
212 }
locke-lunargd556cc32019-09-17 01:21:23 -0600213 }
214};
215
216void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
217 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
218};
219
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700220void ValidationStateTracker::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
221 VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties, VkResult result) {
222 if (VK_SUCCESS != result) return;
223 auto ahb_format_props = lvl_find_in_chain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
224 if (ahb_format_props) {
225 ahb_ext_formats_map.insert({ahb_format_props->externalFormat, ahb_format_props->formatFeatures});
226 }
227}
228
locke-lunargd556cc32019-09-17 01:21:23 -0600229#else
230
231void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
232
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700233void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {}
234
locke-lunargd556cc32019-09-17 01:21:23 -0600235void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700236 VkSamplerYcbcrConversion ycbcr_conversion,
237 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state){};
locke-lunargd556cc32019-09-17 01:21:23 -0600238
239void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
240
241#endif // VK_USE_PLATFORM_ANDROID_KHR
242
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -0600243std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPipelineLayout(PIPELINE_LAYOUT_STATE const *layout_data,
244 uint32_t set) {
245 std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> dsl = nullptr;
246 if (layout_data && (set < layout_data->set_layouts.size())) {
247 dsl = layout_data->set_layouts[set];
248 }
249 return dsl;
250}
251
Petr Kraus44f1c482020-04-25 20:09:25 +0200252void AddImageStateProps(IMAGE_STATE &image_state, const VkDevice device, const VkPhysicalDevice physical_device) {
253 // Add feature support according to Image Format Features (vkspec.html#resources-image-format-features)
254 // if format is AHB external format then the features are already set
255 if (image_state.has_ahb_format == false) {
256 const VkImageTiling image_tiling = image_state.createInfo.tiling;
257 const VkFormat image_format = image_state.createInfo.format;
258 if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
259 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {
260 VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, nullptr};
261 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state.image, &drm_format_properties);
262
263 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
264 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
265 nullptr};
266 format_properties_2.pNext = (void *)&drm_properties_list;
267 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Lionel Landwerlin09351a72020-06-22 18:15:59 +0300268 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
269 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
270 drm_properties_list.pDrmFormatModifierProperties = &drm_properties[0];
271 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Petr Kraus44f1c482020-04-25 20:09:25 +0200272
273 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300274 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier ==
275 drm_format_properties.drmFormatModifier) {
276 image_state.format_features =
Petr Kraus44f1c482020-04-25 20:09:25 +0200277 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300278 break;
Petr Kraus44f1c482020-04-25 20:09:25 +0200279 }
280 }
281 } else {
282 VkFormatProperties format_properties;
283 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_format, &format_properties);
284 image_state.format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
285 : format_properties.optimalTilingFeatures;
286 }
287 }
288}
289
locke-lunargd556cc32019-09-17 01:21:23 -0600290void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
291 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
292 if (VK_SUCCESS != result) return;
locke-lunarg296a3c92020-03-25 01:04:29 -0600293 auto is_node = std::make_shared<IMAGE_STATE>(device, *pImage, pCreateInfo);
sfricke-samsung71bc6572020-04-29 15:49:43 -0700294 is_node->disjoint = ((pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT) != 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600295 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
296 RecordCreateImageANDROID(pCreateInfo, is_node.get());
297 }
298 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
299 if (swapchain_info) {
300 is_node->create_from_swapchain = swapchain_info->swapchain;
301 }
302
locke-lunargd556cc32019-09-17 01:21:23 -0600303 // Record the memory requirements in case they won't be queried
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700304 // External AHB memory can't be queried until after memory is bound
Spencer Fricke224c9852020-04-06 07:47:29 -0700305 if (is_node->external_ahb == false) {
sfricke-samsung71bc6572020-04-29 15:49:43 -0700306 if (is_node->disjoint == false) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700307 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
308 } else {
309 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
310 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
311 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
312 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
313 mem_req_info2.pNext = &image_plane_req;
314 mem_req_info2.image = *pImage;
315
316 assert(plane_count != 0); // assumes each format has at least first plane
317 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
318 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
319 is_node->plane0_requirements = mem_reqs2.memoryRequirements;
320
321 if (plane_count >= 2) {
322 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
323 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
324 is_node->plane1_requirements = mem_reqs2.memoryRequirements;
325 }
326 if (plane_count >= 3) {
327 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
328 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
329 is_node->plane2_requirements = mem_reqs2.memoryRequirements;
330 }
331 }
locke-lunargd556cc32019-09-17 01:21:23 -0600332 }
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700333
Petr Kraus44f1c482020-04-25 20:09:25 +0200334 AddImageStateProps(*is_node, device, physical_device);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700335
sfricke-samsungedce77a2020-07-03 22:35:13 -0700336 is_node->unprotected = ((pCreateInfo->flags & VK_IMAGE_CREATE_PROTECTED_BIT) == 0);
337
locke-lunargd556cc32019-09-17 01:21:23 -0600338 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
339}
340
341void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
342 if (!image) return;
343 IMAGE_STATE *image_state = GetImageState(image);
344 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
345 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
346 // Clean up memory mapping, bindings and range references for image
347 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700348 RemoveImageMemoryRange(image, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600349 }
350 if (image_state->bind_swapchain) {
351 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
352 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -0600353 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -0600354 }
355 }
356 RemoveAliasingImage(image_state);
357 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500358 image_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600359 // Remove image from imageMap
360 imageMap.erase(image);
361}
362
363void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
364 VkImageLayout imageLayout, const VkClearColorValue *pColor,
365 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
366 auto cb_node = GetCBState(commandBuffer);
367 auto image_state = GetImageState(image);
368 if (cb_node && image_state) {
369 AddCommandBufferBindingImage(cb_node, image_state);
370 }
371}
372
373void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
374 VkImageLayout imageLayout,
375 const VkClearDepthStencilValue *pDepthStencil,
376 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
377 auto cb_node = GetCBState(commandBuffer);
378 auto image_state = GetImageState(image);
379 if (cb_node && image_state) {
380 AddCommandBufferBindingImage(cb_node, image_state);
381 }
382}
383
384void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
385 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
386 uint32_t regionCount, const VkImageCopy *pRegions) {
387 auto cb_node = GetCBState(commandBuffer);
388 auto src_image_state = GetImageState(srcImage);
389 auto dst_image_state = GetImageState(dstImage);
390
391 // Update bindings between images and cmd buffer
392 AddCommandBufferBindingImage(cb_node, src_image_state);
393 AddCommandBufferBindingImage(cb_node, dst_image_state);
394}
395
396void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
397 VkImageLayout srcImageLayout, VkImage dstImage,
398 VkImageLayout dstImageLayout, uint32_t regionCount,
399 const VkImageResolve *pRegions) {
400 auto cb_node = GetCBState(commandBuffer);
401 auto src_image_state = GetImageState(srcImage);
402 auto dst_image_state = GetImageState(dstImage);
403
404 // Update bindings between images and cmd buffer
405 AddCommandBufferBindingImage(cb_node, src_image_state);
406 AddCommandBufferBindingImage(cb_node, dst_image_state);
407}
408
409void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
410 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
411 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
412 auto cb_node = GetCBState(commandBuffer);
413 auto src_image_state = GetImageState(srcImage);
414 auto dst_image_state = GetImageState(dstImage);
415
416 // Update bindings between images and cmd buffer
417 AddCommandBufferBindingImage(cb_node, src_image_state);
418 AddCommandBufferBindingImage(cb_node, dst_image_state);
419}
420
421void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
422 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
423 VkResult result) {
424 if (result != VK_SUCCESS) return;
425 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500426 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600427
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700428 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
429 RecordCreateBufferANDROID(pCreateInfo, buffer_state.get());
430 }
locke-lunargd556cc32019-09-17 01:21:23 -0600431 // Get a set of requirements in the case the app does not
sfricke-samsungad90e722020-07-08 20:54:24 -0700432 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
locke-lunargd556cc32019-09-17 01:21:23 -0600433
sfricke-samsungedce77a2020-07-03 22:35:13 -0700434 buffer_state->unprotected = ((pCreateInfo->flags & VK_BUFFER_CREATE_PROTECTED_BIT) == 0);
435
locke-lunargd556cc32019-09-17 01:21:23 -0600436 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
437}
438
439void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
440 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
441 VkResult result) {
442 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500443 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
444 bufferViewMap[*pView] = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600445}
446
447void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
448 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
449 VkResult result) {
450 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500451 auto image_state = GetImageShared(pCreateInfo->image);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700452 auto image_view_state = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
453
454 // Add feature support according to Image View Format Features (vkspec.html#resources-image-view-format-features)
455 const VkImageTiling image_tiling = image_state->createInfo.tiling;
456 const VkFormat image_view_format = pCreateInfo->format;
457 if (image_state->has_ahb_format == true) {
458 // The ImageView uses same Image's format feature since they share same AHB
459 image_view_state->format_features = image_state->format_features;
460 } else if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
461 // Parameter validation should catch if this is used without VK_EXT_image_drm_format_modifier
462 assert(device_extensions.vk_ext_image_drm_format_modifier);
463 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
464 nullptr};
465 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state->image, &drm_format_properties);
466
467 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
468 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
469 nullptr};
470 format_properties_2.pNext = (void *)&drm_properties_list;
471 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
472
473 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300474 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier == drm_format_properties.drmFormatModifier) {
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700475 image_view_state->format_features |=
476 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300477 break;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700478 }
479 }
480 } else {
481 VkFormatProperties format_properties;
482 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_view_format, &format_properties);
483 image_view_state->format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
484 : format_properties.optimalTilingFeatures;
485 }
486
487 imageViewMap.insert(std::make_pair(*pView, std::move(image_view_state)));
locke-lunargd556cc32019-09-17 01:21:23 -0600488}
489
490void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
491 uint32_t regionCount, const VkBufferCopy *pRegions) {
492 auto cb_node = GetCBState(commandBuffer);
493 auto src_buffer_state = GetBufferState(srcBuffer);
494 auto dst_buffer_state = GetBufferState(dstBuffer);
495
496 // Update bindings between buffers and cmd buffer
497 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
498 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
499}
500
501void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
502 const VkAllocationCallbacks *pAllocator) {
503 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
504 if (!image_view_state) return;
505 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
506
507 // Any bound cmd buffers are now invalid
508 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500509 image_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600510 imageViewMap.erase(imageView);
511}
512
513void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
514 if (!buffer) return;
515 auto buffer_state = GetBufferState(buffer);
516 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
517
518 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
519 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700520 RemoveBufferMemoryRange(buffer, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600521 }
522 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500523 buffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600524 bufferMap.erase(buffer_state->buffer);
525}
526
527void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
528 const VkAllocationCallbacks *pAllocator) {
529 if (!bufferView) return;
530 auto buffer_view_state = GetBufferViewState(bufferView);
531 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
532
533 // Any bound cmd buffers are now invalid
534 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500535 buffer_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600536 bufferViewMap.erase(bufferView);
537}
538
539void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
540 VkDeviceSize size, uint32_t data) {
541 auto cb_node = GetCBState(commandBuffer);
542 auto buffer_state = GetBufferState(dstBuffer);
543 // Update bindings between buffer and cmd buffer
544 AddCommandBufferBindingBuffer(cb_node, buffer_state);
545}
546
547void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
548 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
549 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
550 auto cb_node = GetCBState(commandBuffer);
551 auto src_image_state = GetImageState(srcImage);
552 auto dst_buffer_state = GetBufferState(dstBuffer);
553
554 // Update bindings between buffer/image and cmd buffer
555 AddCommandBufferBindingImage(cb_node, src_image_state);
556 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
557}
558
559void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
560 VkImageLayout dstImageLayout, uint32_t regionCount,
561 const VkBufferImageCopy *pRegions) {
562 auto cb_node = GetCBState(commandBuffer);
563 auto src_buffer_state = GetBufferState(srcBuffer);
564 auto dst_image_state = GetImageState(dstImage);
565
566 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
567 AddCommandBufferBindingImage(cb_node, dst_image_state);
568}
569
570// Get the image viewstate for a given framebuffer attachment
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300571IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(CMD_BUFFER_STATE *cb, FRAMEBUFFER_STATE *framebuffer,
572 uint32_t index) {
573 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) {
574 assert(index < cb->imagelessFramebufferAttachments.size());
575 return cb->imagelessFramebufferAttachments[index];
576 }
locke-lunargd556cc32019-09-17 01:21:23 -0600577 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
578 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
579 return GetImageViewState(image_view);
580}
581
582// Get the image viewstate for a given framebuffer attachment
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300583const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const CMD_BUFFER_STATE *cb,
584 const FRAMEBUFFER_STATE *framebuffer,
locke-lunargd556cc32019-09-17 01:21:23 -0600585 uint32_t index) const {
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300586 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) {
587 assert(index < cb->imagelessFramebufferAttachments.size());
588 return cb->imagelessFramebufferAttachments[index];
589 }
locke-lunargd556cc32019-09-17 01:21:23 -0600590 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
591 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
592 return GetImageViewState(image_view);
593}
594
595void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
locke-lunargd556cc32019-09-17 01:21:23 -0600596 std::unordered_set<VkImage> *bound_images = nullptr;
597
locke-lunargb3584732019-10-28 20:18:36 -0600598 if (image_state->bind_swapchain) {
599 auto swapchain_state = GetSwapchainState(image_state->bind_swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -0600600 if (swapchain_state) {
locke-lunargb3584732019-10-28 20:18:36 -0600601 bound_images = &swapchain_state->images[image_state->bind_swapchain_imageIndex].bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600602 }
603 } else {
locke-lunargcf04d582019-11-26 00:31:50 -0700604 if (image_state->binding.mem_state) {
605 bound_images = &image_state->binding.mem_state->bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600606 }
607 }
608
609 if (bound_images) {
610 for (const auto &handle : *bound_images) {
611 if (handle != image_state->image) {
612 auto is = GetImageState(handle);
613 if (is && is->IsCompatibleAliasing(image_state)) {
614 auto inserted = is->aliasing_images.emplace(image_state->image);
615 if (inserted.second) {
616 image_state->aliasing_images.emplace(handle);
617 }
618 }
619 }
620 }
621 }
622}
623
624void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
625 for (const auto &image : image_state->aliasing_images) {
626 auto is = GetImageState(image);
627 if (is) {
628 is->aliasing_images.erase(image_state->image);
629 }
630 }
631 image_state->aliasing_images.clear();
632}
633
634void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
635 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
636 // reference. It doesn't need two ways clear.
637 for (const auto &handle : bound_images) {
638 auto is = GetImageState(handle);
639 if (is) {
640 is->aliasing_images.clear();
641 }
642 }
643}
644
Jeff Bolz310775c2019-10-09 00:46:33 -0500645const EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) const {
646 auto it = eventMap.find(event);
647 if (it == eventMap.end()) {
648 return nullptr;
649 }
650 return &it->second;
651}
652
locke-lunargd556cc32019-09-17 01:21:23 -0600653EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
654 auto it = eventMap.find(event);
655 if (it == eventMap.end()) {
656 return nullptr;
657 }
658 return &it->second;
659}
660
661const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
662 auto it = queueMap.find(queue);
663 if (it == queueMap.cend()) {
664 return nullptr;
665 }
666 return &it->second;
667}
668
669QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
670 auto it = queueMap.find(queue);
671 if (it == queueMap.end()) {
672 return nullptr;
673 }
674 return &it->second;
675}
676
677const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
678 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
679 auto it = phys_dev_map->find(phys);
680 if (it == phys_dev_map->end()) {
681 return nullptr;
682 }
683 return &it->second;
684}
685
686PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
687 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
688 auto it = phys_dev_map->find(phys);
689 if (it == phys_dev_map->end()) {
690 return nullptr;
691 }
692 return &it->second;
693}
694
695PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
696const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
697
698// Return ptr to memory binding for given handle of specified type
699template <typename State, typename Result>
700static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
701 switch (typed_handle.type) {
702 case kVulkanObjectTypeImage:
703 return state->GetImageState(typed_handle.Cast<VkImage>());
704 case kVulkanObjectTypeBuffer:
705 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
706 case kVulkanObjectTypeAccelerationStructureNV:
707 return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
708 default:
709 break;
710 }
711 return nullptr;
712}
713
714const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
715 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
716}
717
718BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
719 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
720}
721
722void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
723 assert(object != NULL);
724
John Zulauf79952712020-04-07 11:25:54 -0600725 auto fake_address = fake_memory.Alloc(pAllocateInfo->allocationSize);
726 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo, fake_address);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500727 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600728
729 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
730 if (dedicated) {
731 mem_info->is_dedicated = true;
732 mem_info->dedicated_buffer = dedicated->buffer;
733 mem_info->dedicated_image = dedicated->image;
734 }
735 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
736 if (export_info) {
737 mem_info->is_export = true;
738 mem_info->export_handle_type_flags = export_info->handleTypes;
739 }
sfricke-samsung23068272020-06-21 14:49:51 -0700740
Mark Lobodzinski7bda6ed2020-07-24 09:57:38 -0600741 auto alloc_flags = lvl_find_in_chain<VkMemoryAllocateFlagsInfo>(pAllocateInfo->pNext);
742 if (alloc_flags) {
743 auto dev_mask = alloc_flags->deviceMask;
744 if ((dev_mask != 0) && (dev_mask & (dev_mask - 1))) {
745 mem_info->multi_instance = true;
746 }
747 }
748 auto heap_index = phys_dev_mem_props.memoryTypes[mem_info->alloc_info.memoryTypeIndex].heapIndex;
Tony-LunarGb399fb02020-08-06 14:20:59 -0600749 mem_info->multi_instance |= (((phys_dev_mem_props.memoryHeaps[heap_index].flags & VK_MEMORY_HEAP_MULTI_INSTANCE_BIT) != 0) &&
750 physical_device_count > 1);
Mark Lobodzinski7bda6ed2020-07-24 09:57:38 -0600751
sfricke-samsung23068272020-06-21 14:49:51 -0700752 // Assumes validation already for only a single import operation in the pNext
753#ifdef VK_USE_PLATFORM_WIN32_KHR
754 auto win32_import = lvl_find_in_chain<VkImportMemoryWin32HandleInfoKHR>(pAllocateInfo->pNext);
755 if (win32_import) {
756 mem_info->is_import = true;
757 mem_info->import_handle_type_flags = win32_import->handleType;
758 }
759#endif
760 auto fd_import = lvl_find_in_chain<VkImportMemoryFdInfoKHR>(pAllocateInfo->pNext);
761 if (fd_import) {
762 mem_info->is_import = true;
763 mem_info->import_handle_type_flags = fd_import->handleType;
764 }
765 auto host_pointer_import = lvl_find_in_chain<VkImportMemoryHostPointerInfoEXT>(pAllocateInfo->pNext);
766 if (host_pointer_import) {
767 mem_info->is_import = true;
768 mem_info->import_handle_type_flags = host_pointer_import->handleType;
769 }
770#ifdef VK_USE_PLATFORM_ANDROID_KHR
771 // AHB Import doesn't have handle in the pNext struct
772 // It should be assumed that all imported AHB can only have the same, single handleType
773 auto ahb_import = lvl_find_in_chain<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
774 if ((ahb_import) && (ahb_import->buffer != nullptr)) {
775 mem_info->is_import_ahb = true;
776 mem_info->is_import = true;
777 mem_info->import_handle_type_flags = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
778 }
779#endif
sfricke-samsungedce77a2020-07-03 22:35:13 -0700780
781 const VkMemoryType memory_type = phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex];
782 mem_info->unprotected = ((memory_type.propertyFlags & VK_MEMORY_PROPERTY_PROTECTED_BIT) == 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600783}
784
785// Create binding link between given sampler and command buffer node
786void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600787 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600788 return;
789 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500790 AddCommandBufferBinding(sampler_state->cb_bindings,
791 VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600792}
793
794// Create binding link between given image node and command buffer node
795void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600796 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600797 return;
798 }
799 // Skip validation if this image was created through WSI
800 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
801 // First update cb binding for image
Jeff Bolzadbfa852019-10-04 13:53:30 -0500802 if (AddCommandBufferBinding(image_state->cb_bindings,
803 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600804 // Now update CB binding in MemObj mini CB list
805 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700806 // Now update CBInfo's Mem reference list
807 AddCommandBufferBinding(mem_binding->cb_bindings,
808 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600809 }
810 }
811 }
812}
813
814// Create binding link between given image view node and its image with command buffer node
815void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600816 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600817 return;
818 }
819 // First add bindings for imageView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500820 if (AddCommandBufferBinding(view_state->cb_bindings,
821 VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600822 // Only need to continue if this is a new item
Jeff Bolzadbfa852019-10-04 13:53:30 -0500823 auto image_state = view_state->image_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600824 // Add bindings for image within imageView
825 if (image_state) {
826 AddCommandBufferBindingImage(cb_node, image_state);
827 }
828 }
829}
830
831// Create binding link between given buffer node and command buffer node
832void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600833 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600834 return;
835 }
836 // First update cb binding for buffer
Jeff Bolzadbfa852019-10-04 13:53:30 -0500837 if (AddCommandBufferBinding(buffer_state->cb_bindings,
838 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600839 // Now update CB binding in MemObj mini CB list
840 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700841 // Now update CBInfo's Mem reference list
842 AddCommandBufferBinding(mem_binding->cb_bindings,
843 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600844 }
845 }
846}
847
848// Create binding link between given buffer view node and its buffer with command buffer node
849void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600850 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600851 return;
852 }
853 // First add bindings for bufferView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500854 if (AddCommandBufferBinding(view_state->cb_bindings,
855 VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
856 auto buffer_state = view_state->buffer_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600857 // Add bindings for buffer within bufferView
858 if (buffer_state) {
859 AddCommandBufferBindingBuffer(cb_node, buffer_state);
860 }
861 }
862}
863
864// Create binding link between given acceleration structure and command buffer node
865void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
866 ACCELERATION_STRUCTURE_STATE *as_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600867 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600868 return;
869 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500870 if (AddCommandBufferBinding(
871 as_state->cb_bindings,
872 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600873 // Now update CB binding in MemObj mini CB list
874 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700875 // Now update CBInfo's Mem reference list
876 AddCommandBufferBinding(mem_binding->cb_bindings,
877 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600878 }
879 }
880}
881
locke-lunargd556cc32019-09-17 01:21:23 -0600882// Clear a single object binding from given memory object
locke-lunarg5f59e782019-12-19 10:32:23 -0700883void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
locke-lunargd556cc32019-09-17 01:21:23 -0600884 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
885 if (mem_info) {
886 mem_info->obj_bindings.erase(typed_handle);
887 }
888}
889
890// ClearMemoryObjectBindings clears the binding of objects to memory
891// For the given object it pulls the memory bindings and makes sure that the bindings
892// no longer refer to the object being cleared. This occurs when objects are destroyed.
893void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
894 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
895 if (mem_binding) {
896 if (!mem_binding->sparse) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700897 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600898 } else { // Sparse, clear all bindings
899 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700900 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600901 }
902 }
903 }
904}
905
906// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
907// Corresponding valid usage checks are in ValidateSetMemBinding().
908void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
909 const VulkanTypedHandle &typed_handle) {
910 assert(mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600911
912 if (mem != VK_NULL_HANDLE) {
locke-lunargcf04d582019-11-26 00:31:50 -0700913 mem_binding->binding.mem_state = GetShared<DEVICE_MEMORY_STATE>(mem);
914 if (mem_binding->binding.mem_state) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700915 mem_binding->binding.offset = memory_offset;
916 mem_binding->binding.size = mem_binding->requirements.size;
locke-lunargcf04d582019-11-26 00:31:50 -0700917 mem_binding->binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -0600918 // For image objects, make sure default memory state is correctly set
919 // TODO : What's the best/correct way to handle this?
920 if (kVulkanObjectTypeImage == typed_handle.type) {
921 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
922 if (image_state) {
923 VkImageCreateInfo ici = image_state->createInfo;
924 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
925 // TODO:: More memory state transition stuff.
926 }
927 }
928 }
locke-lunargcf04d582019-11-26 00:31:50 -0700929 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
locke-lunargd556cc32019-09-17 01:21:23 -0600930 }
931 }
932}
933
934// For NULL mem case, clear any previous binding Else...
935// Make sure given object is in its object map
936// IF a previous binding existed, update binding
937// Add reference from objectInfo to memoryInfo
938// Add reference off of object's binding info
939// Return VK_TRUE if addition is successful, VK_FALSE otherwise
locke-lunargcf04d582019-11-26 00:31:50 -0700940bool ValidationStateTracker::SetSparseMemBinding(const VkDeviceMemory mem, const VkDeviceSize mem_offset,
941 const VkDeviceSize mem_size, const VulkanTypedHandle &typed_handle) {
locke-lunargd556cc32019-09-17 01:21:23 -0600942 bool skip = VK_FALSE;
943 // Handle NULL case separately, just clear previous binding & decrement reference
locke-lunargcf04d582019-11-26 00:31:50 -0700944 if (mem == VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -0600945 // TODO : This should cause the range of the resource to be unbound according to spec
946 } else {
947 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
948 assert(mem_binding);
949 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
950 assert(mem_binding->sparse);
locke-lunargcf04d582019-11-26 00:31:50 -0700951 MEM_BINDING binding = {GetShared<DEVICE_MEMORY_STATE>(mem), mem_offset, mem_size};
952 if (binding.mem_state) {
953 binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -0600954 // Need to set mem binding for this object
955 mem_binding->sparse_bindings.insert(binding);
956 mem_binding->UpdateBoundMemorySet();
957 }
958 }
959 }
960 return skip;
961}
962
Jeremy Kniager05631e72020-06-08 14:21:35 -0600963void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, const VkPipelineBindPoint bind_point) {
locke-lunargd556cc32019-09-17 01:21:23 -0600964 auto &state = cb_state->lastBound[bind_point];
965 PIPELINE_STATE *pPipe = state.pipeline_state;
966 if (VK_NULL_HANDLE != state.pipeline_layout) {
967 for (const auto &set_binding_pair : pPipe->active_slots) {
968 uint32_t setIndex = set_binding_pair.first;
969 // Pull the set node
970 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
locke-lunargd556cc32019-09-17 01:21:23 -0600971
Tony-LunarG77822802020-05-28 16:35:46 -0600972 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
locke-lunargd556cc32019-09-17 01:21:23 -0600973
Tony-LunarG77822802020-05-28 16:35:46 -0600974 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
975 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
976 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
977 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
978
979 if (reduced_map.IsManyDescriptors()) {
980 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
981 descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
982 }
983
984 // We can skip updating the state if "nothing" has changed since the last validation.
985 // See CoreChecks::ValidateCmdBufDrawState for more details.
986 bool descriptor_set_changed =
987 !reduced_map.IsManyDescriptors() ||
988 // Update if descriptor set (or contents) has changed
989 state.per_set[setIndex].validated_set != descriptor_set ||
990 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
991 (!disabled[image_layout_validation] &&
992 state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
993 bool need_update = descriptor_set_changed ||
994 // Update if previous bindingReqMap doesn't include new bindingReqMap
995 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
996 state.per_set[setIndex].validated_set_binding_req_map.end(), binding_req_map.begin(),
997 binding_req_map.end());
998
999 if (need_update) {
1000 // Bind this set and its active descriptor resources to the command buffer
1001 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
1002 // Only record the bindings that haven't already been recorded
1003 BindingReqMap delta_reqs;
1004 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
1005 state.per_set[setIndex].validated_set_binding_req_map.begin(),
1006 state.per_set[setIndex].validated_set_binding_req_map.end(),
1007 std::inserter(delta_reqs, delta_reqs.begin()));
Jeremy Kniager05631e72020-06-08 14:21:35 -06001008 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pPipe, delta_reqs);
Tony-LunarG77822802020-05-28 16:35:46 -06001009 } else {
Jeremy Kniager05631e72020-06-08 14:21:35 -06001010 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pPipe, binding_req_map);
locke-lunargd556cc32019-09-17 01:21:23 -06001011 }
1012
Tony-LunarG77822802020-05-28 16:35:46 -06001013 state.per_set[setIndex].validated_set = descriptor_set;
1014 state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
1015 state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
1016 if (reduced_map.IsManyDescriptors()) {
1017 // Check whether old == new before assigning, the equality check is much cheaper than
1018 // freeing and reallocating the map.
1019 if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
1020 state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
Jeff Bolz56308942019-10-06 22:05:23 -05001021 }
Tony-LunarG77822802020-05-28 16:35:46 -06001022 } else {
1023 state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
locke-lunargd556cc32019-09-17 01:21:23 -06001024 }
1025 }
1026 }
1027 }
1028 if (!pPipe->vertex_binding_descriptions_.empty()) {
1029 cb_state->vertex_buffer_used = true;
1030 }
1031}
1032
1033// Remove set from setMap and delete the set
1034void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001035 descriptor_set->destroyed = true;
Jeff Bolzadbfa852019-10-04 13:53:30 -05001036 const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
Jeff Bolz41a1ced2019-10-11 11:40:49 -05001037 // Any bound cmd buffers are now invalid
Jeff Bolzadbfa852019-10-04 13:53:30 -05001038 InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
Jeff Bolz41a1ced2019-10-11 11:40:49 -05001039
locke-lunargd556cc32019-09-17 01:21:23 -06001040 setMap.erase(descriptor_set->GetSet());
1041}
1042
1043// Free all DS Pools including their Sets & related sub-structs
1044// NOTE : Calls to this function should be wrapped in mutex
1045void ValidationStateTracker::DeleteDescriptorSetPools() {
1046 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
1047 // Remove this pools' sets from setMap and delete them
1048 for (auto ds : ii->second->sets) {
1049 FreeDescriptorSet(ds);
1050 }
1051 ii->second->sets.clear();
1052 ii = descriptorPoolMap.erase(ii);
1053 }
1054}
1055
1056// For given object struct return a ptr of BASE_NODE type for its wrapping struct
1057BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001058 if (object_struct.node) {
1059#ifdef _DEBUG
1060 // assert that lookup would find the same object
1061 VulkanTypedHandle other = object_struct;
1062 other.node = nullptr;
1063 assert(object_struct.node == GetStateStructPtrFromObject(other));
1064#endif
1065 return object_struct.node;
1066 }
locke-lunargd556cc32019-09-17 01:21:23 -06001067 BASE_NODE *base_ptr = nullptr;
1068 switch (object_struct.type) {
1069 case kVulkanObjectTypeDescriptorSet: {
1070 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
1071 break;
1072 }
1073 case kVulkanObjectTypeSampler: {
1074 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
1075 break;
1076 }
1077 case kVulkanObjectTypeQueryPool: {
1078 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
1079 break;
1080 }
1081 case kVulkanObjectTypePipeline: {
1082 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
1083 break;
1084 }
1085 case kVulkanObjectTypeBuffer: {
1086 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
1087 break;
1088 }
1089 case kVulkanObjectTypeBufferView: {
1090 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
1091 break;
1092 }
1093 case kVulkanObjectTypeImage: {
1094 base_ptr = GetImageState(object_struct.Cast<VkImage>());
1095 break;
1096 }
1097 case kVulkanObjectTypeImageView: {
1098 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
1099 break;
1100 }
1101 case kVulkanObjectTypeEvent: {
1102 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
1103 break;
1104 }
1105 case kVulkanObjectTypeDescriptorPool: {
1106 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
1107 break;
1108 }
1109 case kVulkanObjectTypeCommandPool: {
1110 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
1111 break;
1112 }
1113 case kVulkanObjectTypeFramebuffer: {
1114 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
1115 break;
1116 }
1117 case kVulkanObjectTypeRenderPass: {
1118 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
1119 break;
1120 }
1121 case kVulkanObjectTypeDeviceMemory: {
1122 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
1123 break;
1124 }
1125 case kVulkanObjectTypeAccelerationStructureNV: {
1126 base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
1127 break;
1128 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001129 case kVulkanObjectTypeUnknown:
1130 // This can happen if an element of the object_bindings vector has been
1131 // zeroed out, after an object is destroyed.
1132 break;
locke-lunargd556cc32019-09-17 01:21:23 -06001133 default:
1134 // TODO : Any other objects to be handled here?
1135 assert(0);
1136 break;
1137 }
1138 return base_ptr;
1139}
1140
sfricke-samsungbf1a2ed2020-06-14 23:31:00 -07001141// Gets union of all features defined by Potential Format Features
1142// except, does not handle the external format case for AHB as that only can be used for sampled images
sfricke-samsungbe3584f2020-04-22 14:58:06 -07001143VkFormatFeatureFlags ValidationStateTracker::GetPotentialFormatFeatures(VkFormat format) const {
1144 VkFormatFeatureFlags format_features = 0;
1145
1146 if (format != VK_FORMAT_UNDEFINED) {
1147 VkFormatProperties format_properties;
1148 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
1149 format_features |= format_properties.linearTilingFeatures;
1150 format_features |= format_properties.optimalTilingFeatures;
1151 if (device_extensions.vk_ext_image_drm_format_modifier) {
1152 // VK_KHR_get_physical_device_properties2 is required in this case
1153 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2};
1154 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
1155 nullptr};
1156 format_properties_2.pNext = (void *)&drm_properties_list;
1157 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
1158 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
1159 format_features |= drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
1160 }
1161 }
1162 }
1163
1164 return format_features;
1165}
1166
locke-lunargd556cc32019-09-17 01:21:23 -06001167// Tie the VulkanTypedHandle to the cmd buffer which includes:
1168// Add object_binding to cmd buffer
1169// Add cb_binding to object
Jeff Bolzadbfa852019-10-04 13:53:30 -05001170bool ValidationStateTracker::AddCommandBufferBinding(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_bindings,
locke-lunargd556cc32019-09-17 01:21:23 -06001171 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001172 if (disabled[command_buffer_state]) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001173 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001174 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001175 // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
1176 // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
1177 auto inserted = cb_bindings.insert({cb_node, -1});
1178 if (inserted.second) {
1179 cb_node->object_bindings.push_back(obj);
1180 inserted.first->second = (int)cb_node->object_bindings.size() - 1;
1181 return true;
1182 }
1183 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001184}
1185
1186// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
1187void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
1188 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
1189 if (base_obj) base_obj->cb_bindings.erase(cb_node);
1190}
1191
1192// Reset the command buffer state
1193// Maintain the createInfo and set state to CB_NEW, but clear all other state
1194void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
1195 CMD_BUFFER_STATE *pCB = GetCBState(cb);
1196 if (pCB) {
1197 pCB->in_use.store(0);
1198 // Reset CB state (note that createInfo is not cleared)
1199 pCB->commandBuffer = cb;
1200 memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
1201 memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
1202 pCB->hasDrawCmd = false;
1203 pCB->hasTraceRaysCmd = false;
1204 pCB->hasBuildAccelerationStructureCmd = false;
1205 pCB->hasDispatchCmd = false;
1206 pCB->state = CB_NEW;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001207 pCB->commandCount = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06001208 pCB->submitCount = 0;
1209 pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
1210 pCB->status = 0;
1211 pCB->static_status = 0;
1212 pCB->viewportMask = 0;
Piers Daniell39842ee2020-07-10 16:42:33 -06001213 pCB->viewportWithCountMask = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06001214 pCB->scissorMask = 0;
Piers Daniell39842ee2020-07-10 16:42:33 -06001215 pCB->scissorWithCountMask = 0;
1216 pCB->primitiveTopology = VK_PRIMITIVE_TOPOLOGY_MAX_ENUM;
locke-lunargd556cc32019-09-17 01:21:23 -06001217
1218 for (auto &item : pCB->lastBound) {
1219 item.second.reset();
1220 }
1221
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07001222 pCB->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo();
locke-lunargd556cc32019-09-17 01:21:23 -06001223 pCB->activeRenderPass = nullptr;
1224 pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
1225 pCB->activeSubpass = 0;
1226 pCB->broken_bindings.clear();
1227 pCB->waitedEvents.clear();
1228 pCB->events.clear();
1229 pCB->writeEventsBeforeWait.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001230 pCB->activeQueries.clear();
1231 pCB->startedQueries.clear();
1232 pCB->image_layout_map.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001233 pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
1234 pCB->vertex_buffer_used = false;
1235 pCB->primaryCommandBuffer = VK_NULL_HANDLE;
1236 // If secondary, invalidate any primary command buffer that may call us.
1237 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001238 InvalidateLinkedCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -06001239 }
1240
1241 // Remove reverse command buffer links.
1242 for (auto pSubCB : pCB->linkedCommandBuffers) {
1243 pSubCB->linkedCommandBuffers.erase(pCB);
1244 }
1245 pCB->linkedCommandBuffers.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001246 pCB->queue_submit_functions.clear();
1247 pCB->cmd_execute_commands_functions.clear();
1248 pCB->eventUpdates.clear();
1249 pCB->queryUpdates.clear();
1250
1251 // Remove object bindings
1252 for (const auto &obj : pCB->object_bindings) {
1253 RemoveCommandBufferBinding(obj, pCB);
1254 }
1255 pCB->object_bindings.clear();
1256 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
1257 for (auto framebuffer : pCB->framebuffers) {
locke-lunargaecf2152020-05-12 17:15:41 -06001258 framebuffer->cb_bindings.erase(pCB);
locke-lunargd556cc32019-09-17 01:21:23 -06001259 }
1260 pCB->framebuffers.clear();
1261 pCB->activeFramebuffer = VK_NULL_HANDLE;
1262 memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
1263
1264 pCB->qfo_transfer_image_barriers.Reset();
1265 pCB->qfo_transfer_buffer_barriers.Reset();
1266
1267 // Clean up the label data
1268 ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
1269 pCB->debug_label.Reset();
locke-gb3ce08f2019-09-30 12:30:56 -06001270 pCB->validate_descriptorsets_in_queuesubmit.clear();
Attilio Provenzano02859b22020-02-27 14:17:28 +00001271
1272 // Best practices info
1273 pCB->small_indexed_draw_call_count = 0;
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06001274
1275 pCB->transform_feedback_active = false;
locke-lunargd556cc32019-09-17 01:21:23 -06001276 }
1277 if (command_buffer_reset_callback) {
1278 (*command_buffer_reset_callback)(cb);
1279 }
1280}
1281
1282void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
1283 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
1284 VkResult result) {
1285 if (VK_SUCCESS != result) return;
1286
1287 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
1288 if (nullptr == enabled_features_found) {
1289 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
1290 if (features2) {
1291 enabled_features_found = &(features2->features);
1292 }
1293 }
1294
1295 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
1296 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
1297 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
1298
1299 if (nullptr == enabled_features_found) {
1300 state_tracker->enabled_features.core = {};
1301 } else {
1302 state_tracker->enabled_features.core = *enabled_features_found;
1303 }
1304
1305 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
1306 // previously set them through an explicit API call.
1307 uint32_t count;
1308 auto pd_state = GetPhysicalDeviceState(gpu);
1309 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
1310 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
1311 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
1312 // Save local link to this device's physical device state
1313 state_tracker->physical_device_state = pd_state;
1314
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001315 const auto *vulkan_12_features = lvl_find_in_chain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
1316 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001317 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001318 } else {
sfricke-samsung27c70722020-05-02 08:42:39 -07001319 // Set Extension Feature Aliases to false as there is no struct to check
1320 state_tracker->enabled_features.core12.drawIndirectCount = VK_FALSE;
1321 state_tracker->enabled_features.core12.samplerMirrorClampToEdge = VK_FALSE;
1322 state_tracker->enabled_features.core12.descriptorIndexing = VK_FALSE;
1323 state_tracker->enabled_features.core12.samplerFilterMinmax = VK_FALSE;
1324 state_tracker->enabled_features.core12.shaderOutputLayer = VK_FALSE;
1325 state_tracker->enabled_features.core12.shaderOutputViewportIndex = VK_FALSE;
1326
1327 // These structs are only allowed in pNext chain if there is no VkPhysicalDeviceVulkan12Features
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001328
1329 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
1330 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001331 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
1332 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
1333 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
1334 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001335 }
1336
1337 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
1338 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001339 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
1340 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001341 }
1342
1343 const auto *descriptor_indexing_features =
1344 lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
1345 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001346 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
1347 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
1348 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
1349 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
1350 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
1351 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
1352 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
1353 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
1354 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
1355 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
1356 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
1357 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
1358 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
1359 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
1360 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
1361 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
1362 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
1363 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
1364 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
1365 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
1366 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
1367 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
1368 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
1369 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
1370 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
1371 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
1372 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
1373 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
1374 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
1375 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
1376 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
1377 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
1378 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
1379 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
1380 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
1381 descriptor_indexing_features->descriptorBindingPartiallyBound;
1382 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
1383 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
1384 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001385 }
1386
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001387 const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001388 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001389 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001390 }
1391
1392 const auto *imageless_framebuffer_features =
1393 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
1394 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001395 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001396 }
1397
1398 const auto *uniform_buffer_standard_layout_features =
1399 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
1400 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001401 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
1402 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001403 }
1404
1405 const auto *subgroup_extended_types_features =
1406 lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
1407 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001408 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
1409 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001410 }
1411
1412 const auto *separate_depth_stencil_layouts_features =
1413 lvl_find_in_chain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
1414 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001415 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
1416 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001417 }
1418
1419 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
1420 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001421 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001422 }
1423
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001424 const auto *timeline_semaphore_features = lvl_find_in_chain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001425 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001426 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001427 }
1428
1429 const auto *buffer_device_address = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
1430 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001431 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
1432 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1433 buffer_device_address->bufferDeviceAddressCaptureReplay;
1434 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1435 buffer_device_address->bufferDeviceAddressMultiDevice;
1436 }
1437 }
1438
1439 const auto *vulkan_11_features = lvl_find_in_chain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
1440 if (vulkan_11_features) {
1441 state_tracker->enabled_features.core11 = *vulkan_11_features;
1442 } else {
1443 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
1444
1445 const auto *sixteen_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
1446 if (sixteen_bit_storage_features) {
1447 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
1448 sixteen_bit_storage_features->storageBuffer16BitAccess;
1449 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
1450 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
1451 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
1452 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
1453 }
1454
1455 const auto *multiview_features = lvl_find_in_chain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
1456 if (multiview_features) {
1457 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1458 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1459 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1460 }
1461
1462 const auto *variable_pointers_features = lvl_find_in_chain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
1463 if (variable_pointers_features) {
1464 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1465 variable_pointers_features->variablePointersStorageBuffer;
1466 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1467 }
1468
1469 const auto *protected_memory_features = lvl_find_in_chain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
1470 if (protected_memory_features) {
1471 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1472 }
1473
1474 const auto *ycbcr_conversion_features =
1475 lvl_find_in_chain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
1476 if (ycbcr_conversion_features) {
1477 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1478 }
1479
1480 const auto *shader_draw_parameters_features =
1481 lvl_find_in_chain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
1482 if (shader_draw_parameters_features) {
1483 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001484 }
1485 }
1486
locke-lunargd556cc32019-09-17 01:21:23 -06001487 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
Tony-LunarGca4891a2020-08-10 15:46:46 -06001488 if (device_group_ci) {
1489 state_tracker->physical_device_count = device_group_ci->physicalDeviceCount;
1490 state_tracker->device_group_create_info = *device_group_ci;
1491 } else {
1492 state_tracker->physical_device_count = 1;
1493 }
locke-lunargd556cc32019-09-17 01:21:23 -06001494
locke-lunargd556cc32019-09-17 01:21:23 -06001495 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
1496 if (exclusive_scissor_features) {
1497 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1498 }
1499
1500 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1501 if (shading_rate_image_features) {
1502 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1503 }
1504
1505 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1506 if (mesh_shader_features) {
1507 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1508 }
1509
1510 const auto *inline_uniform_block_features =
1511 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1512 if (inline_uniform_block_features) {
1513 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1514 }
1515
1516 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1517 if (transform_feedback_features) {
1518 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1519 }
1520
locke-lunargd556cc32019-09-17 01:21:23 -06001521 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1522 if (vtx_attrib_div_features) {
1523 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1524 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001525
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001526 const auto *buffer_device_address_ext = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
1527 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001528 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001529 }
1530
1531 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1532 if (cooperative_matrix_features) {
1533 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1534 }
1535
locke-lunargd556cc32019-09-17 01:21:23 -06001536 const auto *compute_shader_derivatives_features =
1537 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1538 if (compute_shader_derivatives_features) {
1539 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1540 }
1541
1542 const auto *fragment_shader_barycentric_features =
1543 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1544 if (fragment_shader_barycentric_features) {
1545 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1546 }
1547
1548 const auto *shader_image_footprint_features =
1549 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1550 if (shader_image_footprint_features) {
1551 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1552 }
1553
1554 const auto *fragment_shader_interlock_features =
1555 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1556 if (fragment_shader_interlock_features) {
1557 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1558 }
1559
1560 const auto *demote_to_helper_invocation_features =
1561 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1562 if (demote_to_helper_invocation_features) {
1563 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1564 }
1565
1566 const auto *texel_buffer_alignment_features =
1567 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1568 if (texel_buffer_alignment_features) {
1569 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1570 }
1571
locke-lunargd556cc32019-09-17 01:21:23 -06001572 const auto *pipeline_exe_props_features =
1573 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1574 if (pipeline_exe_props_features) {
1575 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1576 }
1577
Jeff Bolz82f854d2019-09-17 14:56:47 -05001578 const auto *dedicated_allocation_image_aliasing_features =
1579 lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
1580 if (dedicated_allocation_image_aliasing_features) {
1581 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1582 *dedicated_allocation_image_aliasing_features;
1583 }
1584
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001585 const auto *performance_query_features = lvl_find_in_chain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
1586 if (performance_query_features) {
1587 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1588 }
1589
Tobias Hector782bcde2019-11-28 16:19:42 +00001590 const auto *device_coherent_memory_features = lvl_find_in_chain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
1591 if (device_coherent_memory_features) {
1592 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1593 }
1594
sfricke-samsungcead0802020-01-30 22:20:10 -08001595 const auto *ycbcr_image_array_features = lvl_find_in_chain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
1596 if (ycbcr_image_array_features) {
1597 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1598 }
1599
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001600 const auto *ray_tracing_features = lvl_find_in_chain<VkPhysicalDeviceRayTracingFeaturesKHR>(pCreateInfo->pNext);
1601 if (ray_tracing_features) {
1602 state_tracker->enabled_features.ray_tracing_features = *ray_tracing_features;
1603 }
1604
Jeff Bolz165818a2020-05-08 11:19:03 -05001605 const auto *robustness2_features = lvl_find_in_chain<VkPhysicalDeviceRobustness2FeaturesEXT>(pCreateInfo->pNext);
1606 if (robustness2_features) {
1607 state_tracker->enabled_features.robustness2_features = *robustness2_features;
1608 }
1609
janharaldfredriksen-arm3b793772020-05-12 18:55:53 +02001610 const auto *fragment_density_map_features =
1611 lvl_find_in_chain<VkPhysicalDeviceFragmentDensityMapFeaturesEXT>(pCreateInfo->pNext);
1612 if (fragment_density_map_features) {
1613 state_tracker->enabled_features.fragment_density_map_features = *fragment_density_map_features;
1614 }
1615
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001616 const auto *fragment_density_map_features2 =
1617 lvl_find_in_chain<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT>(pCreateInfo->pNext);
1618 if (fragment_density_map_features2) {
1619 state_tracker->enabled_features.fragment_density_map2_features = *fragment_density_map_features2;
1620 }
1621
sfricke-samsung0c4a06f2020-06-27 01:24:32 -07001622 const auto *astc_decode_features = lvl_find_in_chain<VkPhysicalDeviceASTCDecodeFeaturesEXT>(pCreateInfo->pNext);
1623 if (astc_decode_features) {
1624 state_tracker->enabled_features.astc_decode_features = *astc_decode_features;
1625 }
1626
Tony-LunarG7337b312020-04-15 16:40:25 -06001627 const auto *custom_border_color_features = lvl_find_in_chain<VkPhysicalDeviceCustomBorderColorFeaturesEXT>(pCreateInfo->pNext);
1628 if (custom_border_color_features) {
1629 state_tracker->enabled_features.custom_border_color_features = *custom_border_color_features;
1630 }
1631
sfricke-samsungfd661d62020-05-16 00:57:27 -07001632 const auto *pipeline_creation_cache_control_features =
1633 lvl_find_in_chain<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT>(pCreateInfo->pNext);
1634 if (pipeline_creation_cache_control_features) {
1635 state_tracker->enabled_features.pipeline_creation_cache_control_features = *pipeline_creation_cache_control_features;
1636 }
1637
Piers Daniell39842ee2020-07-10 16:42:33 -06001638 const auto *extended_dynamic_state_features =
1639 lvl_find_in_chain<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT>(pCreateInfo->pNext);
1640 if (extended_dynamic_state_features) {
1641 state_tracker->enabled_features.extended_dynamic_state_features = *extended_dynamic_state_features;
1642 }
1643
locke-lunargd556cc32019-09-17 01:21:23 -06001644 // Store physical device properties and physical device mem limits into CoreChecks structs
1645 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1646 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001647 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1648 &state_tracker->phys_dev_props_core11);
1649 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1650 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001651
1652 const auto &dev_ext = state_tracker->device_extensions;
1653 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1654
1655 if (dev_ext.vk_khr_push_descriptor) {
1656 // Get the needed push_descriptor limits
1657 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1658 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1659 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1660 }
1661
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001662 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
1663 VkPhysicalDeviceDescriptorIndexingPropertiesEXT descriptor_indexing_prop;
1664 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1665 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1666 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1667 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1668 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1669 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1670 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1671 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1672 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1673 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1674 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1675 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1676 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1677 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1678 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1679 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1680 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1681 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1682 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1683 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1684 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1685 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1686 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1687 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1688 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1689 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1690 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
1691 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1692 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
1693 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
1694 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
1695 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
1696 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
1697 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
1698 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1699 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1700 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
1701 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
1702 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1703 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
1704 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
1705 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
1706 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
1707 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
1708 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
1709 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
1710 }
1711
locke-lunargd556cc32019-09-17 01:21:23 -06001712 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1713 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1714 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1715 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001716
1717 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
1718 VkPhysicalDeviceDepthStencilResolvePropertiesKHR depth_stencil_resolve_props;
1719 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
1720 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
1721 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
1722 depth_stencil_resolve_props.supportedStencilResolveModes;
1723 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
1724 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
1725 }
1726
locke-lunargd556cc32019-09-17 01:21:23 -06001727 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001728 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
1729 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing, &phys_dev_props->ray_tracing_propsKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06001730 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1731 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001732 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map_2, &phys_dev_props->fragment_density_map2_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001733 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
sfricke-samsung8f658d42020-05-03 20:12:24 -07001734 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_sample_locations, &phys_dev_props->sample_locations_props);
Tony-LunarG7337b312020-04-15 16:40:25 -06001735 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_custom_border_color, &phys_dev_props->custom_border_color_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001736
1737 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
1738 VkPhysicalDeviceTimelineSemaphorePropertiesKHR timeline_semaphore_props;
1739 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
1740 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
1741 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
1742 }
1743
1744 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
1745 VkPhysicalDeviceFloatControlsPropertiesKHR float_controls_props;
1746 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
1747 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
1748 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
1749 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
1750 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
1751 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
1752 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
1753 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
1754 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
1755 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
1756 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
1757 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
1758 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
1759 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
1760 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
1761 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
1762 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
1763 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
1764 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
1765 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
1766 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
1767 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07001768
locke-lunargd556cc32019-09-17 01:21:23 -06001769 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1770 // Get the needed cooperative_matrix properties
1771 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1772 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
1773 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1774 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1775
1776 uint32_t numCooperativeMatrixProperties = 0;
1777 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
1778 state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
1779 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
1780
1781 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
1782 state_tracker->cooperative_matrix_properties.data());
1783 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001784 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06001785 // Get the needed subgroup limits
1786 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
1787 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
1788 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1789
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001790 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
1791 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
1792 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
1793 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06001794 }
1795
1796 // Store queue family data
1797 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1798 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001799 const VkDeviceQueueCreateInfo &queue_create_info = pCreateInfo->pQueueCreateInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06001800 state_tracker->queue_family_index_map.insert(
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001801 std::make_pair(queue_create_info.queueFamilyIndex, queue_create_info.queueCount));
1802 state_tracker->queue_family_create_flags_map.insert(
1803 std::make_pair(queue_create_info.queueFamilyIndex, queue_create_info.flags));
locke-lunargd556cc32019-09-17 01:21:23 -06001804 }
1805 }
1806}
1807
1808void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1809 if (!device) return;
1810
locke-lunargd556cc32019-09-17 01:21:23 -06001811 // Reset all command buffers before destroying them, to unlink object_bindings.
1812 for (auto &commandBuffer : commandBufferMap) {
1813 ResetCommandBufferState(commandBuffer.first);
1814 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001815 pipelineMap.clear();
1816 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001817 commandBufferMap.clear();
1818
1819 // This will also delete all sets in the pool & remove them from setMap
1820 DeleteDescriptorSetPools();
1821 // All sets should be removed
1822 assert(setMap.empty());
1823 descriptorSetLayoutMap.clear();
1824 imageViewMap.clear();
1825 imageMap.clear();
1826 bufferViewMap.clear();
1827 bufferMap.clear();
1828 // Queues persist until device is destroyed
1829 queueMap.clear();
1830}
1831
1832// Loop through bound objects and increment their in_use counts.
1833void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
1834 for (auto obj : cb_node->object_bindings) {
1835 auto base_obj = GetStateStructPtrFromObject(obj);
1836 if (base_obj) {
1837 base_obj->in_use.fetch_add(1);
1838 }
1839 }
1840}
1841
1842// Track which resources are in-flight by atomically incrementing their "in_use" count
1843void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1844 cb_node->submitCount++;
1845 cb_node->in_use.fetch_add(1);
1846
1847 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
1848 IncrementBoundObjects(cb_node);
1849 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1850 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1851 // should then be flagged prior to calling this function
1852 for (auto event : cb_node->writeEventsBeforeWait) {
1853 auto event_state = GetEventState(event);
1854 if (event_state) event_state->write_in_use++;
1855 }
1856}
1857
1858// Decrement in-use count for objects bound to command buffer
1859void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
1860 BASE_NODE *base_obj = nullptr;
1861 for (auto obj : cb_node->object_bindings) {
1862 base_obj = GetStateStructPtrFromObject(obj);
1863 if (base_obj) {
1864 base_obj->in_use.fetch_sub(1);
1865 }
1866 }
1867}
1868
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001869void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
locke-lunargd556cc32019-09-17 01:21:23 -06001870 std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
1871
1872 // Roll this queue forward, one submission at a time.
1873 while (pQueue->seq < seq) {
1874 auto &submission = pQueue->submissions.front();
1875
1876 for (auto &wait : submission.waitSemaphores) {
1877 auto pSemaphore = GetSemaphoreState(wait.semaphore);
1878 if (pSemaphore) {
1879 pSemaphore->in_use.fetch_sub(1);
1880 }
1881 auto &lastSeq = otherQueueSeqs[wait.queue];
1882 lastSeq = std::max(lastSeq, wait.seq);
1883 }
1884
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001885 for (auto &signal : submission.signalSemaphores) {
1886 auto pSemaphore = GetSemaphoreState(signal.semaphore);
locke-lunargd556cc32019-09-17 01:21:23 -06001887 if (pSemaphore) {
1888 pSemaphore->in_use.fetch_sub(1);
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001889 if (pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && pSemaphore->payload < signal.payload) {
1890 pSemaphore->payload = signal.payload;
1891 }
locke-lunargd556cc32019-09-17 01:21:23 -06001892 }
1893 }
1894
1895 for (auto &semaphore : submission.externalSemaphores) {
1896 auto pSemaphore = GetSemaphoreState(semaphore);
1897 if (pSemaphore) {
1898 pSemaphore->in_use.fetch_sub(1);
1899 }
1900 }
1901
1902 for (auto cb : submission.cbs) {
1903 auto cb_node = GetCBState(cb);
1904 if (!cb_node) {
1905 continue;
1906 }
1907 // First perform decrement on general case bound objects
1908 DecrementBoundResources(cb_node);
1909 for (auto event : cb_node->writeEventsBeforeWait) {
1910 auto eventNode = eventMap.find(event);
1911 if (eventNode != eventMap.end()) {
1912 eventNode->second.write_in_use--;
1913 }
1914 }
Jeff Bolz310775c2019-10-09 00:46:33 -05001915 QueryMap localQueryToStateMap;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001916 VkQueryPool first_pool = VK_NULL_HANDLE;
Jeff Bolz310775c2019-10-09 00:46:33 -05001917 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001918 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &localQueryToStateMap);
Jeff Bolz310775c2019-10-09 00:46:33 -05001919 }
1920
1921 for (auto queryStatePair : localQueryToStateMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001922 if (queryStatePair.second == QUERYSTATE_ENDED) {
1923 queryToStateMap[queryStatePair.first] = QUERYSTATE_AVAILABLE;
1924 }
locke-lunargd556cc32019-09-17 01:21:23 -06001925 }
locke-lunargd556cc32019-09-17 01:21:23 -06001926 cb_node->in_use.fetch_sub(1);
1927 }
1928
1929 auto pFence = GetFenceState(submission.fence);
1930 if (pFence && pFence->scope == kSyncScopeInternal) {
1931 pFence->state = FENCE_RETIRED;
1932 }
1933
1934 pQueue->submissions.pop_front();
1935 pQueue->seq++;
1936 }
1937
1938 // Roll other queues forward to the highest seq we saw a wait for
1939 for (auto qs : otherQueueSeqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001940 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001941 }
1942}
1943
1944// Submit a fence to a queue, delimiting previous fences and previous untracked
1945// work by it.
1946static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1947 pFence->state = FENCE_INFLIGHT;
1948 pFence->signaler.first = pQueue->queue;
1949 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1950}
1951
1952void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1953 VkFence fence, VkResult result) {
Mark Lobodzinski09379db2020-05-07 08:22:01 -06001954 if (result != VK_SUCCESS) return;
locke-lunargd556cc32019-09-17 01:21:23 -06001955 uint64_t early_retire_seq = 0;
1956 auto pQueue = GetQueueState(queue);
1957 auto pFence = GetFenceState(fence);
1958
1959 if (pFence) {
1960 if (pFence->scope == kSyncScopeInternal) {
1961 // Mark fence in use
1962 SubmitFence(pQueue, pFence, std::max(1u, submitCount));
1963 if (!submitCount) {
1964 // If no submissions, but just dropping a fence on the end of the queue,
1965 // record an empty submission with just the fence, so we can determine
1966 // its completion.
1967 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001968 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001969 }
1970 } else {
1971 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1972 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1973 }
1974 }
1975
1976 // Now process each individual submit
1977 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1978 std::vector<VkCommandBuffer> cbs;
1979 const VkSubmitInfo *submit = &pSubmits[submit_idx];
1980 vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001981 vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06001982 vector<VkSemaphore> semaphore_externals;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001983 const uint64_t next_seq = pQueue->seq + pQueue->submissions.size() + 1;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001984 auto *timeline_semaphore_submit = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001985 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
1986 VkSemaphore semaphore = submit->pWaitSemaphores[i];
1987 auto pSemaphore = GetSemaphoreState(semaphore);
1988 if (pSemaphore) {
1989 if (pSemaphore->scope == kSyncScopeInternal) {
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001990 SEMAPHORE_WAIT wait;
1991 wait.semaphore = semaphore;
1992 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
1993 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1994 wait.queue = pSemaphore->signaler.first;
1995 wait.seq = pSemaphore->signaler.second;
1996 semaphore_waits.push_back(wait);
1997 pSemaphore->in_use.fetch_add(1);
1998 }
1999 pSemaphore->signaler.first = VK_NULL_HANDLE;
2000 pSemaphore->signaled = false;
2001 } else if (pSemaphore->payload < timeline_semaphore_submit->pWaitSemaphoreValues[i]) {
2002 wait.queue = queue;
2003 wait.seq = next_seq;
2004 wait.payload = timeline_semaphore_submit->pWaitSemaphoreValues[i];
2005 semaphore_waits.push_back(wait);
locke-lunargd556cc32019-09-17 01:21:23 -06002006 pSemaphore->in_use.fetch_add(1);
2007 }
locke-lunargd556cc32019-09-17 01:21:23 -06002008 } else {
2009 semaphore_externals.push_back(semaphore);
2010 pSemaphore->in_use.fetch_add(1);
2011 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
2012 pSemaphore->scope = kSyncScopeInternal;
2013 }
2014 }
2015 }
2016 }
2017 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
2018 VkSemaphore semaphore = submit->pSignalSemaphores[i];
2019 auto pSemaphore = GetSemaphoreState(semaphore);
2020 if (pSemaphore) {
2021 if (pSemaphore->scope == kSyncScopeInternal) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002022 SEMAPHORE_SIGNAL signal;
2023 signal.semaphore = semaphore;
2024 signal.seq = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002025 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
2026 pSemaphore->signaler.first = queue;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002027 pSemaphore->signaler.second = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002028 pSemaphore->signaled = true;
2029 } else {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002030 signal.payload = timeline_semaphore_submit->pSignalSemaphoreValues[i];
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002031 }
locke-lunargd556cc32019-09-17 01:21:23 -06002032 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002033 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06002034 } else {
2035 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002036 early_retire_seq = std::max(early_retire_seq, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002037 }
2038 }
2039 }
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002040 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
2041 uint32_t perf_pass = perf_submit ? perf_submit->counterPassIndex : 0;
2042
locke-lunargd556cc32019-09-17 01:21:23 -06002043 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
2044 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
2045 if (cb_node) {
2046 cbs.push_back(submit->pCommandBuffers[i]);
2047 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
2048 cbs.push_back(secondaryCmdBuffer->commandBuffer);
2049 IncrementResources(secondaryCmdBuffer);
2050 }
2051 IncrementResources(cb_node);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002052
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002053 VkQueryPool first_pool = VK_NULL_HANDLE;
2054 EventToStageMap localEventToStageMap;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002055 QueryMap localQueryToStateMap;
2056 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002057 function(nullptr, /*do_validate*/ false, first_pool, perf_pass, &localQueryToStateMap);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002058 }
2059
2060 for (auto queryStatePair : localQueryToStateMap) {
2061 queryToStateMap[queryStatePair.first] = queryStatePair.second;
2062 }
2063
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002064 for (auto &function : cb_node->eventUpdates) {
2065 function(nullptr, /*do_validate*/ false, &localEventToStageMap);
2066 }
2067
2068 for (auto eventStagePair : localEventToStageMap) {
2069 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
2070 }
locke-lunargd556cc32019-09-17 01:21:23 -06002071 }
2072 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002073
locke-lunargd556cc32019-09-17 01:21:23 -06002074 pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002075 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, perf_pass);
locke-lunargd556cc32019-09-17 01:21:23 -06002076 }
2077
2078 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002079 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002080 }
2081}
2082
2083void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
2084 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
2085 VkResult result) {
2086 if (VK_SUCCESS == result) {
2087 AddMemObjInfo(device, *pMemory, pAllocateInfo);
2088 }
2089 return;
2090}
2091
2092void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
2093 if (!mem) return;
2094 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
2095 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
2096
2097 // Clear mem binding for any bound objects
2098 for (const auto &obj : mem_info->obj_bindings) {
2099 BINDABLE *bindable_state = nullptr;
2100 switch (obj.type) {
2101 case kVulkanObjectTypeImage:
2102 bindable_state = GetImageState(obj.Cast<VkImage>());
2103 break;
2104 case kVulkanObjectTypeBuffer:
2105 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
2106 break;
2107 case kVulkanObjectTypeAccelerationStructureNV:
2108 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
2109 break;
2110
2111 default:
2112 // Should only have acceleration structure, buffer, or image objects bound to memory
2113 assert(0);
2114 }
2115
2116 if (bindable_state) {
Jeff Bolz41e29052020-03-29 22:33:55 -05002117 // Remove any sparse bindings bound to the resource that use this memory.
2118 for (auto it = bindable_state->sparse_bindings.begin(); it != bindable_state->sparse_bindings.end();) {
2119 auto nextit = it;
2120 nextit++;
2121
2122 auto &sparse_mem_binding = *it;
2123 if (sparse_mem_binding.mem_state.get() == mem_info) {
2124 bindable_state->sparse_bindings.erase(it);
2125 }
2126
2127 it = nextit;
2128 }
locke-lunargd556cc32019-09-17 01:21:23 -06002129 bindable_state->UpdateBoundMemorySet();
2130 }
2131 }
2132 // Any bound cmd buffers are now invalid
2133 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
2134 RemoveAliasingImages(mem_info->bound_images);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002135 mem_info->destroyed = true;
John Zulauf79952712020-04-07 11:25:54 -06002136 fake_memory.Free(mem_info->fake_base_address);
locke-lunargd556cc32019-09-17 01:21:23 -06002137 memObjMap.erase(mem);
2138}
2139
2140void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
2141 VkFence fence, VkResult result) {
2142 if (result != VK_SUCCESS) return;
2143 uint64_t early_retire_seq = 0;
2144 auto pFence = GetFenceState(fence);
2145 auto pQueue = GetQueueState(queue);
2146
2147 if (pFence) {
2148 if (pFence->scope == kSyncScopeInternal) {
2149 SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
2150 if (!bindInfoCount) {
2151 // No work to do, just dropping a fence in the queue by itself.
2152 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002153 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002154 }
2155 } else {
2156 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
2157 early_retire_seq = pQueue->seq + pQueue->submissions.size();
2158 }
2159 }
2160
2161 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
2162 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
2163 // Track objects tied to memory
2164 for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
2165 for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
2166 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07002167 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06002168 VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
2169 }
2170 }
2171 for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
2172 for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
2173 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07002174 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06002175 VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
2176 }
2177 }
2178 for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
2179 for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
2180 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
2181 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
2182 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
locke-lunargcf04d582019-11-26 00:31:50 -07002183 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, size,
locke-lunargd556cc32019-09-17 01:21:23 -06002184 VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
2185 }
2186 }
2187
2188 std::vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002189 std::vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06002190 std::vector<VkSemaphore> semaphore_externals;
2191 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
2192 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
2193 auto pSemaphore = GetSemaphoreState(semaphore);
2194 if (pSemaphore) {
2195 if (pSemaphore->scope == kSyncScopeInternal) {
2196 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
2197 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
2198 pSemaphore->in_use.fetch_add(1);
2199 }
2200 pSemaphore->signaler.first = VK_NULL_HANDLE;
2201 pSemaphore->signaled = false;
2202 } else {
2203 semaphore_externals.push_back(semaphore);
2204 pSemaphore->in_use.fetch_add(1);
2205 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
2206 pSemaphore->scope = kSyncScopeInternal;
2207 }
2208 }
2209 }
2210 }
2211 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
2212 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
2213 auto pSemaphore = GetSemaphoreState(semaphore);
2214 if (pSemaphore) {
2215 if (pSemaphore->scope == kSyncScopeInternal) {
2216 pSemaphore->signaler.first = queue;
2217 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
2218 pSemaphore->signaled = true;
2219 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002220
2221 SEMAPHORE_SIGNAL signal;
2222 signal.semaphore = semaphore;
2223 signal.seq = pSemaphore->signaler.second;
2224 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06002225 } else {
2226 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
2227 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
2228 }
2229 }
2230 }
2231
2232 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002233 bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002234 }
2235
2236 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002237 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002238 }
2239}
2240
2241void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
2242 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
2243 VkResult result) {
2244 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002245 auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002246 semaphore_state->signaler.first = VK_NULL_HANDLE;
2247 semaphore_state->signaler.second = 0;
2248 semaphore_state->signaled = false;
2249 semaphore_state->scope = kSyncScopeInternal;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002250 semaphore_state->type = VK_SEMAPHORE_TYPE_BINARY_KHR;
2251 semaphore_state->payload = 0;
2252 auto semaphore_type_create_info = lvl_find_in_chain<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo->pNext);
2253 if (semaphore_type_create_info) {
2254 semaphore_state->type = semaphore_type_create_info->semaphoreType;
2255 semaphore_state->payload = semaphore_type_create_info->initialValue;
2256 }
locke-lunargd556cc32019-09-17 01:21:23 -06002257 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
2258}
2259
2260void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
2261 VkSemaphoreImportFlagsKHR flags) {
2262 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
2263 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
2264 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
2265 sema_node->scope == kSyncScopeInternal) {
2266 sema_node->scope = kSyncScopeExternalTemporary;
2267 } else {
2268 sema_node->scope = kSyncScopeExternalPermanent;
2269 }
2270 }
2271}
2272
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002273void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR *pSignalInfo,
2274 VkResult result) {
2275 auto *pSemaphore = GetSemaphoreState(pSignalInfo->semaphore);
2276 pSemaphore->payload = pSignalInfo->value;
2277}
2278
locke-lunargd556cc32019-09-17 01:21:23 -06002279void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
2280 auto mem_info = GetDevMemState(mem);
2281 if (mem_info) {
2282 mem_info->mapped_range.offset = offset;
2283 mem_info->mapped_range.size = size;
2284 mem_info->p_driver_data = *ppData;
2285 }
2286}
2287
2288void ValidationStateTracker::RetireFence(VkFence fence) {
2289 auto pFence = GetFenceState(fence);
2290 if (pFence && pFence->scope == kSyncScopeInternal) {
2291 if (pFence->signaler.first != VK_NULL_HANDLE) {
2292 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002293 RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002294 } else {
2295 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
2296 // the fence as retired.
2297 pFence->state = FENCE_RETIRED;
2298 }
2299 }
2300}
2301
2302void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2303 VkBool32 waitAll, uint64_t timeout, VkResult result) {
2304 if (VK_SUCCESS != result) return;
2305
2306 // When we know that all fences are complete we can clean/remove their CBs
2307 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
2308 for (uint32_t i = 0; i < fenceCount; i++) {
2309 RetireFence(pFences[i]);
2310 }
2311 }
2312 // NOTE : Alternate case not handled here is when some fences have completed. In
2313 // this case for app to guarantee which fences completed it will have to call
2314 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
2315}
2316
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002317void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
2318 auto pSemaphore = GetSemaphoreState(semaphore);
2319 if (pSemaphore) {
2320 for (auto &pair : queueMap) {
2321 QUEUE_STATE &queueState = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002322 uint64_t max_seq = 0;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002323 for (const auto &submission : queueState.submissions) {
2324 for (const auto &signalSemaphore : submission.signalSemaphores) {
2325 if (signalSemaphore.semaphore == semaphore && signalSemaphore.payload <= until_payload) {
Tony-LunarG47d5e272020-04-07 15:35:55 -06002326 if (signalSemaphore.seq > max_seq) {
2327 max_seq = signalSemaphore.seq;
2328 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002329 }
2330 }
2331 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06002332 if (max_seq) {
2333 RetireWorkOnQueue(&queueState, max_seq);
2334 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002335 }
2336 }
2337}
2338
John Zulauff89de662020-04-13 18:57:34 -06002339void ValidationStateTracker::RecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2340 VkResult result) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002341 if (VK_SUCCESS != result) return;
2342
2343 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
2344 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
2345 }
2346}
2347
John Zulauff89de662020-04-13 18:57:34 -06002348void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2349 VkResult result) {
2350 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2351}
2352
2353void ValidationStateTracker::PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo,
2354 uint64_t timeout, VkResult result) {
2355 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2356}
2357
locke-lunargd556cc32019-09-17 01:21:23 -06002358void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
2359 if (VK_SUCCESS != result) return;
2360 RetireFence(fence);
2361}
2362
2363void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
2364 // Add queue to tracking set only if it is new
2365 auto queue_is_new = queues.emplace(queue);
2366 if (queue_is_new.second == true) {
2367 QUEUE_STATE *queue_state = &queueMap[queue];
2368 queue_state->queue = queue;
2369 queue_state->queueFamilyIndex = queue_family_index;
2370 queue_state->seq = 0;
2371 }
2372}
2373
2374void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2375 VkQueue *pQueue) {
2376 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2377}
2378
2379void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2380 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2381}
2382
2383void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2384 if (VK_SUCCESS != result) return;
2385 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002386 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002387}
2388
2389void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2390 if (VK_SUCCESS != result) return;
2391 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002392 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002393 }
2394}
2395
2396void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2397 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002398 auto fence_state = GetFenceState(fence);
2399 fence_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002400 fenceMap.erase(fence);
2401}
2402
2403void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2404 const VkAllocationCallbacks *pAllocator) {
2405 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002406 auto semaphore_state = GetSemaphoreState(semaphore);
2407 semaphore_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002408 semaphoreMap.erase(semaphore);
2409}
2410
2411void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2412 if (!event) return;
2413 EVENT_STATE *event_state = GetEventState(event);
2414 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
2415 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
2416 eventMap.erase(event);
2417}
2418
2419void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2420 const VkAllocationCallbacks *pAllocator) {
2421 if (!queryPool) return;
2422 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
2423 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
2424 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002425 qp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002426 queryPoolMap.erase(queryPool);
2427}
2428
2429// Object with given handle is being bound to memory w/ given mem_info struct.
2430// Track the newly bound memory range with given memoryOffset
2431// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
2432// and non-linear range incorrectly overlap.
locke-lunargd556cc32019-09-17 01:21:23 -06002433void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002434 VkDeviceSize memoryOffset) {
locke-lunargd556cc32019-09-17 01:21:23 -06002435 if (typed_handle.type == kVulkanObjectTypeImage) {
2436 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
2437 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002438 mem_info->bound_buffers.insert(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002439 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002440 mem_info->bound_acceleration_structures.insert(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002441 } else {
2442 // Unsupported object type
2443 assert(false);
2444 }
2445}
2446
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002447void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2448 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002449}
2450
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002451void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2452 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002453}
2454
2455void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002456 VkDeviceSize mem_offset) {
2457 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002458}
2459
2460// This function will remove the handle-to-index mapping from the appropriate map.
2461static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
2462 if (typed_handle.type == kVulkanObjectTypeImage) {
2463 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
2464 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002465 mem_info->bound_buffers.erase(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002466 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002467 mem_info->bound_acceleration_structures.erase(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002468 } else {
2469 // Unsupported object type
2470 assert(false);
2471 }
2472}
2473
2474void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
2475 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
2476}
2477
2478void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
2479 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
2480}
2481
2482void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
2483 RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
2484}
2485
2486void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2487 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2488 if (buffer_state) {
2489 // Track bound memory range information
2490 auto mem_info = GetDevMemState(mem);
2491 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002492 InsertBufferMemoryRange(buffer, mem_info, memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06002493 }
2494 // Track objects tied to memory
2495 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
2496 }
2497}
2498
2499void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2500 VkDeviceSize memoryOffset, VkResult result) {
2501 if (VK_SUCCESS != result) return;
2502 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2503}
2504
2505void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
2506 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2507 for (uint32_t i = 0; i < bindInfoCount; i++) {
2508 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2509 }
2510}
2511
2512void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
2513 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2514 for (uint32_t i = 0; i < bindInfoCount; i++) {
2515 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2516 }
2517}
2518
Spencer Fricke6c127102020-04-16 06:25:20 -07002519void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer) {
locke-lunargd556cc32019-09-17 01:21:23 -06002520 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2521 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002522 buffer_state->memory_requirements_checked = true;
2523 }
2524}
2525
2526void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2527 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002528 RecordGetBufferMemoryRequirementsState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002529}
2530
2531void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
2532 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2533 VkMemoryRequirements2KHR *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002534 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002535}
2536
2537void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
2538 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2539 VkMemoryRequirements2KHR *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002540 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002541}
2542
Spencer Fricke6c127102020-04-16 06:25:20 -07002543void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002544 const VkImagePlaneMemoryRequirementsInfo *plane_info =
2545 (pInfo == nullptr) ? nullptr : lvl_find_in_chain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002546 IMAGE_STATE *image_state = GetImageState(image);
2547 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002548 if (plane_info != nullptr) {
2549 // Multi-plane image
2550 image_state->memory_requirements_checked = false; // Each image plane needs to be checked itself
2551 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
2552 image_state->plane0_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002553 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
2554 image_state->plane1_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002555 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
2556 image_state->plane2_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002557 }
2558 } else {
2559 // Single Plane image
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002560 image_state->memory_requirements_checked = true;
2561 }
locke-lunargd556cc32019-09-17 01:21:23 -06002562 }
2563}
2564
2565void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2566 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002567 RecordGetImageMemoryRequirementsState(image, nullptr);
locke-lunargd556cc32019-09-17 01:21:23 -06002568}
2569
2570void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2571 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002572 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002573}
2574
2575void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2576 const VkImageMemoryRequirementsInfo2 *pInfo,
2577 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002578 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002579}
2580
2581static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2582 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2583 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2584 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2585 image_state->sparse_metadata_required = true;
2586 }
2587}
2588
2589void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2590 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2591 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2592 auto image_state = GetImageState(image);
2593 image_state->get_sparse_reqs_called = true;
2594 if (!pSparseMemoryRequirements) return;
2595 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2596 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2597 }
2598}
2599
2600void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
2601 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2602 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2603 auto image_state = GetImageState(pInfo->image);
2604 image_state->get_sparse_reqs_called = true;
2605 if (!pSparseMemoryRequirements) return;
2606 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2607 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2608 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2609 }
2610}
2611
2612void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
2613 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2614 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2615 auto image_state = GetImageState(pInfo->image);
2616 image_state->get_sparse_reqs_called = true;
2617 if (!pSparseMemoryRequirements) return;
2618 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2619 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2620 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2621 }
2622}
2623
2624void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2625 const VkAllocationCallbacks *pAllocator) {
2626 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002627 auto shader_module_state = GetShaderModuleState(shaderModule);
2628 shader_module_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002629 shaderModuleMap.erase(shaderModule);
2630}
2631
2632void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2633 const VkAllocationCallbacks *pAllocator) {
2634 if (!pipeline) return;
2635 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
2636 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
2637 // Any bound cmd buffers are now invalid
2638 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002639 pipeline_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002640 pipelineMap.erase(pipeline);
2641}
2642
2643void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2644 const VkAllocationCallbacks *pAllocator) {
2645 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002646 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
2647 pipeline_layout_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002648 pipelineLayoutMap.erase(pipelineLayout);
2649}
2650
2651void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2652 const VkAllocationCallbacks *pAllocator) {
2653 if (!sampler) return;
2654 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
2655 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
2656 // Any bound cmd buffers are now invalid
2657 if (sampler_state) {
2658 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
Yuly Novikov424cdd52020-05-26 16:45:12 -04002659
2660 if (sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2661 sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
2662 custom_border_color_sampler_count--;
2663 }
2664
2665 sampler_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002666 }
2667 samplerMap.erase(sampler);
2668}
2669
2670void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2671 const VkAllocationCallbacks *pAllocator) {
2672 if (!descriptorSetLayout) return;
2673 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2674 if (layout_it != descriptorSetLayoutMap.end()) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002675 layout_it->second.get()->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002676 descriptorSetLayoutMap.erase(layout_it);
2677 }
2678}
2679
2680void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2681 const VkAllocationCallbacks *pAllocator) {
2682 if (!descriptorPool) return;
2683 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
2684 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
2685 if (desc_pool_state) {
2686 // Any bound cmd buffers are now invalid
2687 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
2688 // Free sets that were in this pool
2689 for (auto ds : desc_pool_state->sets) {
2690 FreeDescriptorSet(ds);
2691 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002692 desc_pool_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002693 descriptorPoolMap.erase(descriptorPool);
2694 }
2695}
2696
2697// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2698void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2699 const VkCommandBuffer *command_buffers) {
2700 for (uint32_t i = 0; i < command_buffer_count; i++) {
John Zulaufd1f85d42020-04-15 12:23:15 -06002701 // Allow any derived class to clean up command buffer state
2702 if (command_buffer_free_callback) {
2703 (*command_buffer_free_callback)(command_buffers[i]);
2704 }
2705
locke-lunargd556cc32019-09-17 01:21:23 -06002706 auto cb_state = GetCBState(command_buffers[i]);
2707 // Remove references to command buffer's state and delete
2708 if (cb_state) {
2709 // reset prior to delete, removing various references to it.
2710 // TODO: fix this, it's insane.
2711 ResetCommandBufferState(cb_state->commandBuffer);
2712 // Remove the cb_state's references from COMMAND_POOL_STATEs
2713 pool_state->commandBuffers.erase(command_buffers[i]);
2714 // Remove the cb debug labels
2715 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
2716 // Remove CBState from CB map
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002717 cb_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002718 commandBufferMap.erase(cb_state->commandBuffer);
2719 }
2720 }
2721}
2722
2723void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2724 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
2725 auto pPool = GetCommandPoolState(commandPool);
2726 FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
2727}
2728
2729void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2730 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2731 VkResult result) {
2732 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002733 auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002734 cmd_pool_state->createFlags = pCreateInfo->flags;
2735 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
sfricke-samsung0c45edc2020-07-01 22:19:53 -07002736 cmd_pool_state->unprotected = ((pCreateInfo->flags & VK_COMMAND_POOL_CREATE_PROTECTED_BIT) == 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002737 commandPoolMap[*pCommandPool] = std::move(cmd_pool_state);
2738}
2739
2740void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2741 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2742 VkResult result) {
2743 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002744 auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002745 query_pool_state->createInfo = *pCreateInfo;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002746 query_pool_state->pool = *pQueryPool;
2747 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
2748 const auto *perf = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
2749 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
2750
2751 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2752 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2753 switch (counter.scope) {
2754 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
2755 query_pool_state->has_perf_scope_command_buffer = true;
2756 break;
2757 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
2758 query_pool_state->has_perf_scope_render_pass = true;
2759 break;
2760 default:
2761 break;
2762 }
2763 }
2764
2765 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
2766 &query_pool_state->n_performance_passes);
2767 }
2768
locke-lunargd556cc32019-09-17 01:21:23 -06002769 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
2770
2771 QueryObject query_obj{*pQueryPool, 0u};
2772 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2773 query_obj.query = i;
2774 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2775 }
2776}
2777
2778void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2779 const VkAllocationCallbacks *pAllocator) {
2780 if (!commandPool) return;
2781 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2782 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2783 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2784 if (cp_state) {
2785 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2786 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2787 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002788 cp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002789 commandPoolMap.erase(commandPool);
2790 }
2791}
2792
2793void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2794 VkCommandPoolResetFlags flags, VkResult result) {
2795 if (VK_SUCCESS != result) return;
2796 // Reset all of the CBs allocated from this pool
2797 auto command_pool_state = GetCommandPoolState(commandPool);
2798 for (auto cmdBuffer : command_pool_state->commandBuffers) {
2799 ResetCommandBufferState(cmdBuffer);
2800 }
2801}
2802
2803void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2804 VkResult result) {
2805 for (uint32_t i = 0; i < fenceCount; ++i) {
2806 auto pFence = GetFenceState(pFences[i]);
2807 if (pFence) {
2808 if (pFence->scope == kSyncScopeInternal) {
2809 pFence->state = FENCE_UNSIGNALED;
2810 } else if (pFence->scope == kSyncScopeExternalTemporary) {
2811 pFence->scope = kSyncScopeInternal;
2812 }
2813 }
2814 }
2815}
2816
Jeff Bolzadbfa852019-10-04 13:53:30 -05002817// For given cb_nodes, invalidate them and track object causing invalidation.
2818// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
2819// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
2820// can also unlink objects from command buffers.
2821void ValidationStateTracker::InvalidateCommandBuffers(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_nodes,
2822 const VulkanTypedHandle &obj, bool unlink) {
2823 for (const auto &cb_node_pair : cb_nodes) {
2824 auto &cb_node = cb_node_pair.first;
2825 if (cb_node->state == CB_RECORDING) {
2826 cb_node->state = CB_INVALID_INCOMPLETE;
2827 } else if (cb_node->state == CB_RECORDED) {
2828 cb_node->state = CB_INVALID_COMPLETE;
2829 }
2830 cb_node->broken_bindings.push_back(obj);
2831
2832 // if secondary, then propagate the invalidation to the primaries that will call us.
2833 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2834 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
2835 }
2836 if (unlink) {
2837 int index = cb_node_pair.second;
2838 assert(cb_node->object_bindings[index] == obj);
2839 cb_node->object_bindings[index] = VulkanTypedHandle();
2840 }
2841 }
2842 if (unlink) {
2843 cb_nodes.clear();
2844 }
2845}
2846
2847void ValidationStateTracker::InvalidateLinkedCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
2848 const VulkanTypedHandle &obj) {
locke-lunargd556cc32019-09-17 01:21:23 -06002849 for (auto cb_node : cb_nodes) {
2850 if (cb_node->state == CB_RECORDING) {
2851 cb_node->state = CB_INVALID_INCOMPLETE;
2852 } else if (cb_node->state == CB_RECORDED) {
2853 cb_node->state = CB_INVALID_COMPLETE;
2854 }
2855 cb_node->broken_bindings.push_back(obj);
2856
2857 // if secondary, then propagate the invalidation to the primaries that will call us.
2858 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002859 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
locke-lunargd556cc32019-09-17 01:21:23 -06002860 }
2861 }
2862}
2863
2864void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2865 const VkAllocationCallbacks *pAllocator) {
2866 if (!framebuffer) return;
2867 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
2868 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
2869 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002870 framebuffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002871 frameBufferMap.erase(framebuffer);
2872}
2873
2874void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2875 const VkAllocationCallbacks *pAllocator) {
2876 if (!renderPass) return;
2877 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
2878 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
2879 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002880 rp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002881 renderPassMap.erase(renderPass);
2882}
2883
2884void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2885 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2886 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002887 auto fence_state = std::make_shared<FENCE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002888 fence_state->fence = *pFence;
2889 fence_state->createInfo = *pCreateInfo;
2890 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
2891 fenceMap[*pFence] = std::move(fence_state);
2892}
2893
2894bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2895 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2896 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002897 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002898 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2899 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2900 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2901 cgpl_state->pipe_state.reserve(count);
2902 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002903 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002904 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002905 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002906 }
2907 return false;
2908}
2909
2910void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2911 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2912 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2913 VkResult result, void *cgpl_state_data) {
2914 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2915 // This API may create pipelines regardless of the return value
2916 for (uint32_t i = 0; i < count; i++) {
2917 if (pPipelines[i] != VK_NULL_HANDLE) {
2918 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2919 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2920 }
2921 }
2922 cgpl_state->pipe_state.clear();
2923}
2924
2925bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2926 const VkComputePipelineCreateInfo *pCreateInfos,
2927 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002928 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002929 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2930 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2931 ccpl_state->pipe_state.reserve(count);
2932 for (uint32_t i = 0; i < count; i++) {
2933 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002934 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002935 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002936 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002937 }
2938 return false;
2939}
2940
2941void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2942 const VkComputePipelineCreateInfo *pCreateInfos,
2943 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2944 VkResult result, void *ccpl_state_data) {
2945 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2946
2947 // This API may create pipelines regardless of the return value
2948 for (uint32_t i = 0; i < count; i++) {
2949 if (pPipelines[i] != VK_NULL_HANDLE) {
2950 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2951 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2952 }
2953 }
2954 ccpl_state->pipe_state.clear();
2955}
2956
2957bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2958 uint32_t count,
2959 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2960 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002961 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002962 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2963 crtpl_state->pipe_state.reserve(count);
2964 for (uint32_t i = 0; i < count; i++) {
2965 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002966 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002967 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002968 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002969 }
2970 return false;
2971}
2972
2973void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2974 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2975 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2976 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2977 // This API may create pipelines regardless of the return value
2978 for (uint32_t i = 0; i < count; i++) {
2979 if (pPipelines[i] != VK_NULL_HANDLE) {
2980 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2981 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2982 }
2983 }
2984 crtpl_state->pipe_state.clear();
2985}
2986
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002987bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkPipelineCache pipelineCache,
2988 uint32_t count,
2989 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2990 const VkAllocationCallbacks *pAllocator,
2991 VkPipeline *pPipelines, void *crtpl_state_data) const {
2992 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2993 crtpl_state->pipe_state.reserve(count);
2994 for (uint32_t i = 0; i < count; i++) {
2995 // Create and initialize internal tracking data structure
2996 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
2997 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
2998 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
2999 }
3000 return false;
3001}
3002
3003void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(
3004 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
3005 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
3006 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
3007 // This API may create pipelines regardless of the return value
3008 for (uint32_t i = 0; i < count; i++) {
3009 if (pPipelines[i] != VK_NULL_HANDLE) {
3010 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3011 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
3012 }
3013 }
3014 crtpl_state->pipe_state.clear();
3015}
3016
locke-lunargd556cc32019-09-17 01:21:23 -06003017void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
3018 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
3019 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003020 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
Tony-LunarG7337b312020-04-15 16:40:25 -06003021 if (pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT || pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT)
3022 custom_border_color_sampler_count++;
locke-lunargd556cc32019-09-17 01:21:23 -06003023}
3024
3025void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
3026 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
3027 const VkAllocationCallbacks *pAllocator,
3028 VkDescriptorSetLayout *pSetLayout, VkResult result) {
3029 if (VK_SUCCESS != result) return;
3030 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
3031}
3032
3033// For repeatable sorting, not very useful for "memory in range" search
3034struct PushConstantRangeCompare {
3035 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
3036 if (lhs->offset == rhs->offset) {
3037 if (lhs->size == rhs->size) {
3038 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
3039 return lhs->stageFlags < rhs->stageFlags;
3040 }
3041 // If the offsets are the same then sorting by the end of range is useful for validation
3042 return lhs->size < rhs->size;
3043 }
3044 return lhs->offset < rhs->offset;
3045 }
3046};
3047
3048static PushConstantRangesDict push_constant_ranges_dict;
3049
3050PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
3051 if (!info->pPushConstantRanges) {
3052 // Hand back the empty entry (creating as needed)...
3053 return push_constant_ranges_dict.look_up(PushConstantRanges());
3054 }
3055
3056 // Sort the input ranges to ensure equivalent ranges map to the same id
3057 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
3058 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
3059 sorted.insert(info->pPushConstantRanges + i);
3060 }
3061
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07003062 PushConstantRanges ranges;
3063 ranges.reserve(sorted.size());
locke-lunargd556cc32019-09-17 01:21:23 -06003064 for (const auto range : sorted) {
3065 ranges.emplace_back(*range);
3066 }
3067 return push_constant_ranges_dict.look_up(std::move(ranges));
3068}
3069
3070// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
3071static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
3072
3073// Dictionary of canonical form of the "compatible for set" records
3074static PipelineLayoutCompatDict pipeline_layout_compat_dict;
3075
3076static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
3077 const PipelineLayoutSetLayoutsId set_layouts_id) {
3078 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
3079}
3080
3081void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
3082 const VkAllocationCallbacks *pAllocator,
3083 VkPipelineLayout *pPipelineLayout, VkResult result) {
3084 if (VK_SUCCESS != result) return;
3085
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003086 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003087 pipeline_layout_state->layout = *pPipelineLayout;
3088 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
3089 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
3090 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05003091 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003092 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
3093 }
3094
3095 // Get canonical form IDs for the "compatible for set" contents
3096 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
3097 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
3098 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
3099
3100 // Create table of "compatible for set N" cannonical forms for trivial accept validation
3101 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
3102 pipeline_layout_state->compat_for_set.emplace_back(
3103 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
3104 }
3105 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
3106}
3107
3108void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
3109 const VkAllocationCallbacks *pAllocator,
3110 VkDescriptorPool *pDescriptorPool, VkResult result) {
3111 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003112 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003113}
3114
3115void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
3116 VkDescriptorPoolResetFlags flags, VkResult result) {
3117 if (VK_SUCCESS != result) return;
3118 DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
3119 // TODO: validate flags
3120 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
3121 for (auto ds : pPool->sets) {
3122 FreeDescriptorSet(ds);
3123 }
3124 pPool->sets.clear();
3125 // Reset available count for each type and available sets for this pool
3126 for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
3127 pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
3128 }
3129 pPool->availableSets = pPool->maxSets;
3130}
3131
3132bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
3133 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003134 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003135 // Always update common data
3136 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
3137 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
3138 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
3139
3140 return false;
3141}
3142
3143// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
3144void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
3145 VkDescriptorSet *pDescriptorSets, VkResult result,
3146 void *ads_state_data) {
3147 if (VK_SUCCESS != result) return;
3148 // All the updates are contained in a single cvdescriptorset function
3149 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
3150 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
3151 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
3152}
3153
3154void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
3155 const VkDescriptorSet *pDescriptorSets) {
3156 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
3157 // Update available descriptor sets in pool
3158 pool_state->availableSets += count;
3159
3160 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
3161 for (uint32_t i = 0; i < count; ++i) {
3162 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
3163 auto descriptor_set = setMap[pDescriptorSets[i]].get();
3164 uint32_t type_index = 0, descriptor_count = 0;
3165 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
3166 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
3167 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
3168 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
3169 }
3170 FreeDescriptorSet(descriptor_set);
3171 pool_state->sets.erase(descriptor_set);
3172 }
3173 }
3174}
3175
3176void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
3177 const VkWriteDescriptorSet *pDescriptorWrites,
3178 uint32_t descriptorCopyCount,
3179 const VkCopyDescriptorSet *pDescriptorCopies) {
3180 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
3181 pDescriptorCopies);
3182}
3183
3184void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
3185 VkCommandBuffer *pCommandBuffer, VkResult result) {
3186 if (VK_SUCCESS != result) return;
Jeff Bolz6835fda2019-10-06 00:15:34 -05003187 auto pPool = GetCommandPoolShared(pCreateInfo->commandPool);
locke-lunargd556cc32019-09-17 01:21:23 -06003188 if (pPool) {
3189 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
3190 // Add command buffer to its commandPool map
3191 pPool->commandBuffers.insert(pCommandBuffer[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003192 auto pCB = std::make_shared<CMD_BUFFER_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003193 pCB->createInfo = *pCreateInfo;
Jeff Bolz6835fda2019-10-06 00:15:34 -05003194 pCB->command_pool = pPool;
sfricke-samsung0c45edc2020-07-01 22:19:53 -07003195 pCB->unprotected = pPool->unprotected;
locke-lunargd556cc32019-09-17 01:21:23 -06003196 // Add command buffer to map
3197 commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
3198 ResetCommandBufferState(pCommandBuffer[i]);
3199 }
3200 }
3201}
3202
3203// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
3204void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003205 AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003206 cb_state);
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -07003207 // If imageless fb, skip fb binding
3208 if (fb_state->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003209 const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
3210 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003211 auto view_state = GetAttachmentImageViewState(cb_state, fb_state, attachment);
locke-lunargd556cc32019-09-17 01:21:23 -06003212 if (view_state) {
3213 AddCommandBufferBindingImageView(cb_state, view_state);
3214 }
3215 }
3216}
3217
3218void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
3219 const VkCommandBufferBeginInfo *pBeginInfo) {
3220 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3221 if (!cb_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003222 if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
3223 // Secondary Command Buffer
3224 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
3225 if (pInfo) {
3226 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
3227 assert(pInfo->renderPass);
3228 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
3229 if (framebuffer) {
3230 // Connect this framebuffer and its children to this cmdBuffer
3231 AddFramebufferBinding(cb_state, framebuffer);
3232 }
3233 }
3234 }
3235 }
3236 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
3237 ResetCommandBufferState(commandBuffer);
3238 }
3239 // Set updated state here in case implicit reset occurs above
3240 cb_state->state = CB_RECORDING;
3241 cb_state->beginInfo = *pBeginInfo;
3242 if (cb_state->beginInfo.pInheritanceInfo) {
3243 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
3244 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
3245 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
3246 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
3247 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
locke-lunargaecf2152020-05-12 17:15:41 -06003248 cb_state->activeRenderPass = GetShared<RENDER_PASS_STATE>(cb_state->beginInfo.pInheritanceInfo->renderPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003249 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
locke-lunargaecf2152020-05-12 17:15:41 -06003250 if (cb_state->beginInfo.pInheritanceInfo->framebuffer) {
3251 cb_state->activeFramebuffer = GetShared<FRAMEBUFFER_STATE>(cb_state->beginInfo.pInheritanceInfo->framebuffer);
3252 if (cb_state->activeFramebuffer) cb_state->framebuffers.insert(cb_state->activeFramebuffer);
3253 }
locke-lunargd556cc32019-09-17 01:21:23 -06003254 }
3255 }
3256
3257 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
3258 if (chained_device_group_struct) {
3259 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
3260 } else {
3261 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
3262 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003263
3264 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06003265}
3266
3267void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
3268 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3269 if (!cb_state) return;
3270 // Cached validation is specific to a specific recording of a specific command buffer.
3271 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
3272 descriptor_set->ClearCachedValidation(cb_state);
3273 }
3274 cb_state->validated_descriptor_sets.clear();
3275 if (VK_SUCCESS == result) {
3276 cb_state->state = CB_RECORDED;
3277 }
3278}
3279
3280void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
3281 VkResult result) {
3282 if (VK_SUCCESS == result) {
3283 ResetCommandBufferState(commandBuffer);
3284 }
3285}
3286
3287CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
3288 // initially assume everything is static state
3289 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
3290
3291 if (ds) {
3292 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
3293 switch (ds->pDynamicStates[i]) {
3294 case VK_DYNAMIC_STATE_LINE_WIDTH:
3295 flags &= ~CBSTATUS_LINE_WIDTH_SET;
3296 break;
3297 case VK_DYNAMIC_STATE_DEPTH_BIAS:
3298 flags &= ~CBSTATUS_DEPTH_BIAS_SET;
3299 break;
3300 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
3301 flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
3302 break;
3303 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
3304 flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
3305 break;
3306 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
3307 flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
3308 break;
3309 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
3310 flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
3311 break;
3312 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
3313 flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
3314 break;
3315 case VK_DYNAMIC_STATE_SCISSOR:
3316 flags &= ~CBSTATUS_SCISSOR_SET;
3317 break;
3318 case VK_DYNAMIC_STATE_VIEWPORT:
3319 flags &= ~CBSTATUS_VIEWPORT_SET;
3320 break;
3321 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
3322 flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
3323 break;
3324 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
3325 flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
3326 break;
3327 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
3328 flags &= ~CBSTATUS_LINE_STIPPLE_SET;
3329 break;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003330 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
3331 flags &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
3332 break;
Piers Daniell39842ee2020-07-10 16:42:33 -06003333 case VK_DYNAMIC_STATE_CULL_MODE_EXT:
3334 flags &= ~CBSTATUS_CULL_MODE_SET;
3335 break;
3336 case VK_DYNAMIC_STATE_FRONT_FACE_EXT:
3337 flags &= ~CBSTATUS_FRONT_FACE_SET;
3338 break;
3339 case VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT:
3340 flags &= ~CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
3341 break;
3342 case VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT:
3343 flags &= ~CBSTATUS_VIEWPORT_WITH_COUNT_SET;
3344 break;
3345 case VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT:
3346 flags &= ~CBSTATUS_SCISSOR_WITH_COUNT_SET;
3347 break;
3348 case VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT:
3349 flags &= ~CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
3350 break;
3351 case VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT:
3352 flags &= ~CBSTATUS_DEPTH_TEST_ENABLE_SET;
3353 break;
3354 case VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT:
3355 flags &= ~CBSTATUS_DEPTH_WRITE_ENABLE_SET;
3356 break;
3357 case VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT:
3358 flags &= ~CBSTATUS_DEPTH_COMPARE_OP_SET;
3359 break;
3360 case VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT:
3361 flags &= ~CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
3362 break;
3363 case VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT:
3364 flags &= ~CBSTATUS_STENCIL_TEST_ENABLE_SET;
3365 break;
3366 case VK_DYNAMIC_STATE_STENCIL_OP_EXT:
3367 flags &= ~CBSTATUS_STENCIL_OP_SET;
3368 break;
locke-lunargd556cc32019-09-17 01:21:23 -06003369 default:
3370 break;
3371 }
3372 }
3373 }
3374
3375 return flags;
3376}
3377
3378// Validation cache:
3379// CV is the bottommost implementor of this extension. Don't pass calls down.
3380// utility function to set collective state for pipeline
3381void SetPipelineState(PIPELINE_STATE *pPipe) {
3382 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
3383 if (pPipe->graphicsPipelineCI.pColorBlendState) {
3384 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
3385 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
3386 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3387 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3388 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3389 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3390 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3391 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3392 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3393 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
3394 pPipe->blendConstantsEnabled = true;
3395 }
3396 }
3397 }
3398 }
sfricke-samsung8f658d42020-05-03 20:12:24 -07003399 // Check if sample location is enabled
3400 if (pPipe->graphicsPipelineCI.pMultisampleState) {
3401 const VkPipelineSampleLocationsStateCreateInfoEXT *sample_location_state =
3402 lvl_find_in_chain<VkPipelineSampleLocationsStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pMultisampleState->pNext);
3403 if (sample_location_state != nullptr) {
3404 pPipe->sample_location_enabled = sample_location_state->sampleLocationsEnable;
3405 }
3406 }
locke-lunargd556cc32019-09-17 01:21:23 -06003407}
3408
3409void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
3410 VkPipeline pipeline) {
3411 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3412 assert(cb_state);
3413
3414 auto pipe_state = GetPipelineState(pipeline);
3415 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
3416 cb_state->status &= ~cb_state->static_status;
3417 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
3418 cb_state->status |= cb_state->static_status;
3419 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003420 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout->layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003421 cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
3422 SetPipelineState(pipe_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003423 AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003424}
3425
3426void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3427 uint32_t viewportCount, const VkViewport *pViewports) {
3428 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3429 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
3430 cb_state->status |= CBSTATUS_VIEWPORT_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003431 cb_state->static_status &= ~CBSTATUS_VIEWPORT_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003432}
3433
3434void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
3435 uint32_t exclusiveScissorCount,
3436 const VkRect2D *pExclusiveScissors) {
3437 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3438 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
3439 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
3440 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003441 cb_state->static_status &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003442}
3443
3444void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3445 VkImageLayout imageLayout) {
3446 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3447
3448 if (imageView != VK_NULL_HANDLE) {
3449 auto view_state = GetImageViewState(imageView);
3450 AddCommandBufferBindingImageView(cb_state, view_state);
3451 }
3452}
3453
3454void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3455 uint32_t viewportCount,
3456 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3457 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3458 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3459 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3460 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003461 cb_state->static_status &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003462}
3463
3464void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3465 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3466 const VkAllocationCallbacks *pAllocator,
3467 VkAccelerationStructureNV *pAccelerationStructure,
3468 VkResult result) {
3469 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003470 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003471
3472 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3473 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
3474 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3475 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
3476 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3477 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3478
3479 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
3480 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3481 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
3482 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3483 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3484 &as_state->build_scratch_memory_requirements);
3485
3486 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
3487 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3488 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
3489 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3490 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3491 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003492 as_state->allocator = pAllocator;
locke-lunargd556cc32019-09-17 01:21:23 -06003493 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3494}
3495
Jeff Bolz95176d02020-04-01 00:36:16 -05003496void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3497 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3498 const VkAllocationCallbacks *pAllocator,
3499 VkAccelerationStructureKHR *pAccelerationStructure,
3500 VkResult result) {
3501 if (VK_SUCCESS != result) return;
3502 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
3503
3504 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3505 VkAccelerationStructureMemoryRequirementsInfoKHR as_memory_requirements_info = {};
3506 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3507 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR;
3508 as_memory_requirements_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3509 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3510 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &as_memory_requirements_info, &as_state->memory_requirements);
3511
3512 VkAccelerationStructureMemoryRequirementsInfoKHR scratch_memory_req_info = {};
3513 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3514 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR;
3515 scratch_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3516 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3517 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &scratch_memory_req_info,
3518 &as_state->build_scratch_memory_requirements);
3519
3520 VkAccelerationStructureMemoryRequirementsInfoKHR update_memory_req_info = {};
3521 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3522 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR;
3523 update_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3524 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3525 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &update_memory_req_info,
3526 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003527 as_state->allocator = pAllocator;
Jeff Bolz95176d02020-04-01 00:36:16 -05003528 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3529}
3530
locke-lunargd556cc32019-09-17 01:21:23 -06003531void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
3532 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
3533 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
3534 if (as_state != nullptr) {
3535 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
3536 as_state->memory_requirements = *pMemoryRequirements;
3537 as_state->memory_requirements_checked = true;
3538 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
3539 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
3540 as_state->build_scratch_memory_requirements_checked = true;
3541 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
3542 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
3543 as_state->update_scratch_memory_requirements_checked = true;
3544 }
3545 }
3546}
3547
Jeff Bolz95176d02020-04-01 00:36:16 -05003548void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryCommon(
3549 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result,
3550 bool isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003551 if (VK_SUCCESS != result) return;
3552 for (uint32_t i = 0; i < bindInfoCount; i++) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003553 const VkBindAccelerationStructureMemoryInfoKHR &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06003554
3555 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
3556 if (as_state) {
3557 // Track bound memory range information
3558 auto mem_info = GetDevMemState(info.memory);
3559 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07003560 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06003561 }
3562 // Track objects tied to memory
3563 SetMemBinding(info.memory, as_state, info.memoryOffset,
Jeff Bolz95176d02020-04-01 00:36:16 -05003564 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR));
locke-lunargd556cc32019-09-17 01:21:23 -06003565
3566 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05003567 // XXX TODO: Query device address for KHR extension
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003568 if (enabled[gpu_validation] && isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003569 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
3570 }
3571 }
3572 }
3573}
3574
Jeff Bolz95176d02020-04-01 00:36:16 -05003575void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
3576 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
3577 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, true);
3578}
3579
3580void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryKHR(
3581 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result) {
3582 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, false);
3583}
3584
locke-lunargd556cc32019-09-17 01:21:23 -06003585void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
3586 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
3587 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
3588 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3589 if (cb_state == nullptr) {
3590 return;
3591 }
3592
3593 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3594 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3595 if (dst_as_state != nullptr) {
3596 dst_as_state->built = true;
3597 dst_as_state->build_info.initialize(pInfo);
3598 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3599 }
3600 if (src_as_state != nullptr) {
3601 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3602 }
3603 cb_state->hasBuildAccelerationStructureCmd = true;
3604}
3605
3606void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
3607 VkAccelerationStructureNV dst,
3608 VkAccelerationStructureNV src,
3609 VkCopyAccelerationStructureModeNV mode) {
3610 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3611 if (cb_state) {
3612 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3613 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3614 if (dst_as_state != nullptr && src_as_state != nullptr) {
3615 dst_as_state->built = true;
3616 dst_as_state->build_info = src_as_state->build_info;
3617 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3618 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3619 }
3620 }
3621}
3622
Jeff Bolz95176d02020-04-01 00:36:16 -05003623void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
3624 VkAccelerationStructureKHR accelerationStructure,
3625 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06003626 if (!accelerationStructure) return;
3627 auto *as_state = GetAccelerationStructureState(accelerationStructure);
3628 if (as_state) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003629 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06003630 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
3631 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07003632 RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -06003633 }
3634 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003635 as_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003636 accelerationStructureMap.erase(accelerationStructure);
3637 }
3638}
3639
Jeff Bolz95176d02020-04-01 00:36:16 -05003640void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
3641 VkAccelerationStructureNV accelerationStructure,
3642 const VkAllocationCallbacks *pAllocator) {
3643 PreCallRecordDestroyAccelerationStructureKHR(device, accelerationStructure, pAllocator);
3644}
3645
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003646void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3647 uint32_t viewportCount,
3648 const VkViewportWScalingNV *pViewportWScalings) {
3649 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3650 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003651 cb_state->static_status &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003652}
3653
locke-lunargd556cc32019-09-17 01:21:23 -06003654void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
3655 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3656 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003657 cb_state->static_status &= ~CBSTATUS_LINE_WIDTH_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003658}
3659
3660void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
3661 uint16_t lineStipplePattern) {
3662 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3663 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003664 cb_state->static_status &= ~CBSTATUS_LINE_STIPPLE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003665}
3666
3667void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
3668 float depthBiasClamp, float depthBiasSlopeFactor) {
3669 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3670 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003671 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003672}
3673
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003674void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
3675 const VkRect2D *pScissors) {
3676 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3677 cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
3678 cb_state->status |= CBSTATUS_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003679 cb_state->static_status &= ~CBSTATUS_SCISSOR_SET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003680}
3681
locke-lunargd556cc32019-09-17 01:21:23 -06003682void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
3683 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3684 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003685 cb_state->static_status &= ~CBSTATUS_BLEND_CONSTANTS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003686}
3687
3688void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
3689 float maxDepthBounds) {
3690 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3691 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003692 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003693}
3694
3695void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3696 uint32_t compareMask) {
3697 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3698 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003699 cb_state->static_status &= ~CBSTATUS_STENCIL_READ_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003700}
3701
3702void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3703 uint32_t writeMask) {
3704 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3705 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003706 cb_state->static_status &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003707}
3708
3709void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3710 uint32_t reference) {
3711 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3712 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003713 cb_state->static_status &= ~CBSTATUS_STENCIL_REFERENCE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003714}
3715
3716// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
3717// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
3718// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
3719void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
3720 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
3721 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
3722 cvdescriptorset::DescriptorSet *push_descriptor_set,
3723 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
3724 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
3725 // Defensive
3726 assert(pipeline_layout);
3727 if (!pipeline_layout) return;
3728
3729 uint32_t required_size = first_set + set_count;
3730 const uint32_t last_binding_index = required_size - 1;
3731 assert(last_binding_index < pipeline_layout->compat_for_set.size());
3732
3733 // Some useful shorthand
3734 auto &last_bound = cb_state->lastBound[pipeline_bind_point];
3735 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
3736 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
3737
3738 // We need this three times in this function, but nowhere else
3739 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
3740 if (ds && ds->IsPushDescriptor()) {
3741 assert(ds == last_bound.push_descriptor_set.get());
3742 last_bound.push_descriptor_set = nullptr;
3743 return true;
3744 }
3745 return false;
3746 };
3747
3748 // Clean up the "disturbed" before and after the range to be set
3749 if (required_size < current_size) {
3750 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
3751 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
3752 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
3753 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
3754 }
3755 } else {
3756 // We're not disturbing past last, so leave the upper binding data alone.
3757 required_size = current_size;
3758 }
3759 }
3760
3761 // We resize if we need more set entries or if those past "last" are disturbed
3762 if (required_size != current_size) {
3763 last_bound.per_set.resize(required_size);
3764 }
3765
3766 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
3767 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
3768 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
3769 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3770 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
3771 last_bound.per_set[set_idx].dynamicOffsets.clear();
3772 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
3773 }
3774 }
3775
3776 // Now update the bound sets with the input sets
3777 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
3778 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
3779 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
3780 cvdescriptorset::DescriptorSet *descriptor_set =
3781 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
3782
3783 // Record binding (or push)
3784 if (descriptor_set != last_bound.push_descriptor_set.get()) {
3785 // Only cleanup the push descriptors if they aren't the currently used set.
3786 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3787 }
3788 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
3789 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
3790
3791 if (descriptor_set) {
3792 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
3793 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
3794 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
3795 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
3796 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
3797 input_dynamic_offsets = end_offset;
3798 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
3799 } else {
3800 last_bound.per_set[set_idx].dynamicOffsets.clear();
3801 }
3802 if (!descriptor_set->IsPushDescriptor()) {
3803 // Can't cache validation of push_descriptors
3804 cb_state->validated_descriptor_sets.insert(descriptor_set);
3805 }
3806 }
3807 }
3808}
3809
3810// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
3811void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
3812 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3813 uint32_t firstSet, uint32_t setCount,
3814 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
3815 const uint32_t *pDynamicOffsets) {
3816 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3817 auto pipeline_layout = GetPipelineLayout(layout);
3818
3819 // Resize binding arrays
3820 uint32_t last_set_index = firstSet + setCount - 1;
3821 if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
3822 cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
3823 }
3824
3825 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
3826 dynamicOffsetCount, pDynamicOffsets);
3827 cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
3828 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3829}
3830
3831void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
3832 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
3833 const VkWriteDescriptorSet *pDescriptorWrites) {
3834 const auto &pipeline_layout = GetPipelineLayout(layout);
3835 // Short circuit invalid updates
3836 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
3837 !pipeline_layout->set_layouts[set]->IsPushDescriptor())
3838 return;
3839
3840 // We need a descriptor set to update the bindings with, compatible with the passed layout
3841 const auto dsl = pipeline_layout->set_layouts[set];
3842 auto &last_bound = cb_state->lastBound[pipelineBindPoint];
3843 auto &push_descriptor_set = last_bound.push_descriptor_set;
3844 // If we are disturbing the current push_desriptor_set clear it
3845 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
John Zulaufd2c3dae2019-12-12 11:02:17 -07003846 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003847 }
3848
3849 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3850 nullptr);
3851 last_bound.pipeline_layout = layout;
3852
3853 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003854 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003855}
3856
3857void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3858 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3859 uint32_t set, uint32_t descriptorWriteCount,
3860 const VkWriteDescriptorSet *pDescriptorWrites) {
3861 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3862 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3863}
3864
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003865void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3866 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3867 const void *pValues) {
3868 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3869 if (cb_state != nullptr) {
3870 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3871
3872 auto &push_constant_data = cb_state->push_constant_data;
3873 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3874 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
3875 }
3876}
3877
locke-lunargd556cc32019-09-17 01:21:23 -06003878void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3879 VkIndexType indexType) {
3880 auto buffer_state = GetBufferState(buffer);
3881 auto cb_state = GetCBState(commandBuffer);
3882
3883 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
Piers Daniell39842ee2020-07-10 16:42:33 -06003884 cb_state->static_status &= ~CBSTATUS_INDEX_BUFFER_BOUND;
locke-lunargd556cc32019-09-17 01:21:23 -06003885 cb_state->index_buffer_binding.buffer = buffer;
3886 cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
3887 cb_state->index_buffer_binding.offset = offset;
3888 cb_state->index_buffer_binding.index_type = indexType;
3889 // Add binding for this index buffer to this commandbuffer
3890 AddCommandBufferBindingBuffer(cb_state, buffer_state);
3891}
3892
3893void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3894 uint32_t bindingCount, const VkBuffer *pBuffers,
3895 const VkDeviceSize *pOffsets) {
3896 auto cb_state = GetCBState(commandBuffer);
3897
3898 uint32_t end = firstBinding + bindingCount;
3899 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3900 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3901 }
3902
3903 for (uint32_t i = 0; i < bindingCount; ++i) {
3904 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
3905 vertex_buffer_binding.buffer = pBuffers[i];
3906 vertex_buffer_binding.offset = pOffsets[i];
Piers Daniell39842ee2020-07-10 16:42:33 -06003907 vertex_buffer_binding.size = VK_WHOLE_SIZE;
3908 vertex_buffer_binding.stride = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06003909 // Add binding for this vertex buffer to this commandbuffer
Jeff Bolz165818a2020-05-08 11:19:03 -05003910 if (pBuffers[i]) {
3911 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
3912 }
locke-lunargd556cc32019-09-17 01:21:23 -06003913 }
3914}
3915
3916void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3917 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
3918 auto cb_state = GetCBState(commandBuffer);
3919 auto dst_buffer_state = GetBufferState(dstBuffer);
3920
3921 // Update bindings between buffer and cmd buffer
3922 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
3923}
3924
Jeff Bolz310775c2019-10-09 00:46:33 -05003925bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags stageMask,
3926 EventToStageMap *localEventToStageMap) {
3927 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003928 return false;
3929}
3930
3931void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3932 VkPipelineStageFlags stageMask) {
3933 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3934 auto event_state = GetEventState(event);
3935 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003936 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003937 }
3938 cb_state->events.push_back(event);
3939 if (!cb_state->waitedEvents.count(event)) {
3940 cb_state->writeEventsBeforeWait.push_back(event);
3941 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003942 cb_state->eventUpdates.emplace_back(
3943 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3944 return SetEventStageMask(event, stageMask, localEventToStageMap);
3945 });
locke-lunargd556cc32019-09-17 01:21:23 -06003946}
3947
3948void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3949 VkPipelineStageFlags stageMask) {
3950 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3951 auto event_state = GetEventState(event);
3952 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003953 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003954 }
3955 cb_state->events.push_back(event);
3956 if (!cb_state->waitedEvents.count(event)) {
3957 cb_state->writeEventsBeforeWait.push_back(event);
3958 }
3959
3960 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003961 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
3962 return SetEventStageMask(event, VkPipelineStageFlags(0), localEventToStageMap);
3963 });
locke-lunargd556cc32019-09-17 01:21:23 -06003964}
3965
3966void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3967 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3968 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3969 uint32_t bufferMemoryBarrierCount,
3970 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3971 uint32_t imageMemoryBarrierCount,
3972 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3973 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3974 for (uint32_t i = 0; i < eventCount; ++i) {
3975 auto event_state = GetEventState(pEvents[i]);
3976 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003977 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
3978 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003979 }
3980 cb_state->waitedEvents.insert(pEvents[i]);
3981 cb_state->events.push_back(pEvents[i]);
3982 }
3983}
3984
Jeff Bolz310775c2019-10-09 00:46:33 -05003985bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
3986 (*localQueryToStateMap)[object] = value;
3987 return false;
3988}
3989
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003990bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
3991 QueryState value, QueryMap *localQueryToStateMap) {
Jeff Bolz310775c2019-10-09 00:46:33 -05003992 for (uint32_t i = 0; i < queryCount; i++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003993 QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
Jeff Bolz310775c2019-10-09 00:46:33 -05003994 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06003995 }
3996 return false;
3997}
3998
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003999QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex,
4000 uint32_t perfPass) const {
4001 QueryObject query = QueryObject(QueryObject(queryPool, queryIndex), perfPass);
locke-lunargd556cc32019-09-17 01:21:23 -06004002
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004003 auto iter = localQueryToStateMap->find(query);
4004 if (iter != localQueryToStateMap->end()) return iter->second;
Jeff Bolz310775c2019-10-09 00:46:33 -05004005
Jeff Bolz310775c2019-10-09 00:46:33 -05004006 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06004007}
4008
4009void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004010 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004011 cb_state->activeQueries.insert(query_obj);
4012 cb_state->startedQueries.insert(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004013 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
4014 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4015 QueryMap *localQueryToStateMap) {
4016 SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
4017 return false;
4018 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004019 auto pool_state = GetQueryPoolState(query_obj.pool);
4020 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
4021 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004022}
4023
4024void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
4025 VkFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004026 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004027 QueryObject query = {queryPool, slot};
4028 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4029 RecordCmdBeginQuery(cb_state, query);
4030}
4031
4032void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004033 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004034 cb_state->activeQueries.erase(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004035 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
4036 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4037 QueryMap *localQueryToStateMap) {
4038 return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
4039 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004040 auto pool_state = GetQueryPoolState(query_obj.pool);
4041 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
4042 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004043}
4044
4045void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004046 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004047 QueryObject query_obj = {queryPool, slot};
4048 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4049 RecordCmdEndQuery(cb_state, query_obj);
4050}
4051
4052void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4053 uint32_t firstQuery, uint32_t queryCount) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004054 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004055 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4056
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02004057 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
4058 QueryObject query = {queryPool, slot};
4059 cb_state->resetQueries.insert(query);
4060 }
4061
Jeff Bolz310775c2019-10-09 00:46:33 -05004062 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004063 bool do_validate, VkQueryPool &firstPerfQueryPool,
4064 uint32_t perfQueryPass,
4065 QueryMap *localQueryToStateMap) {
4066 return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06004067 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004068 auto pool_state = GetQueryPoolState(queryPool);
4069 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004070 cb_state);
4071}
4072
4073void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4074 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
4075 VkDeviceSize dstOffset, VkDeviceSize stride,
4076 VkQueryResultFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004077 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004078 auto cb_state = GetCBState(commandBuffer);
4079 auto dst_buff_state = GetBufferState(dstBuffer);
4080 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05004081 auto pool_state = GetQueryPoolState(queryPool);
4082 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004083 cb_state);
4084}
4085
4086void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
4087 VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004088 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004089 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeff Bolzadbfa852019-10-04 13:53:30 -05004090 auto pool_state = GetQueryPoolState(queryPool);
4091 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004092 cb_state);
4093 QueryObject query = {queryPool, slot};
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004094 cb_state->queryUpdates.emplace_back([query](const ValidationStateTracker *device_data, bool do_validate,
4095 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4096 QueryMap *localQueryToStateMap) {
4097 return SetQueryState(QueryObject(query, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
4098 });
locke-lunargd556cc32019-09-17 01:21:23 -06004099}
4100
4101void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
4102 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
4103 VkResult result) {
4104 if (VK_SUCCESS != result) return;
4105 // Shadow create info and store in map
Jeff Bolz6ae39612019-10-11 20:57:36 -05004106 auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
locke-lunargd556cc32019-09-17 01:21:23 -06004107
4108 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
4109 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
4110 VkImageView view = pCreateInfo->pAttachments[i];
4111 auto view_state = GetImageViewState(view);
4112 if (!view_state) {
4113 continue;
4114 }
4115 }
4116 }
4117 frameBufferMap[*pFramebuffer] = std::move(fb_state);
4118}
4119
4120void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4121 RENDER_PASS_STATE *render_pass) {
4122 auto &subpass_to_node = render_pass->subpassToNode;
4123 subpass_to_node.resize(pCreateInfo->subpassCount);
4124 auto &self_dependencies = render_pass->self_dependencies;
4125 self_dependencies.resize(pCreateInfo->subpassCount);
John Zulauf4aff5d92020-02-21 08:29:35 -07004126 auto &subpass_dependencies = render_pass->subpass_dependencies;
4127 subpass_dependencies.resize(pCreateInfo->subpassCount);
locke-lunargd556cc32019-09-17 01:21:23 -06004128
4129 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
4130 subpass_to_node[i].pass = i;
4131 self_dependencies[i].clear();
John Zulauf4aff5d92020-02-21 08:29:35 -07004132 subpass_dependencies[i].pass = i;
locke-lunargd556cc32019-09-17 01:21:23 -06004133 }
4134 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
4135 const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
John Zulauf4aff5d92020-02-21 08:29:35 -07004136 const auto srcSubpass = dependency.srcSubpass;
4137 const auto dstSubpass = dependency.dstSubpass;
locke-lunargd556cc32019-09-17 01:21:23 -06004138 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
4139 if (dependency.srcSubpass == dependency.dstSubpass) {
4140 self_dependencies[dependency.srcSubpass].push_back(i);
4141 } else {
4142 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
4143 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
4144 }
4145 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004146 if (srcSubpass == VK_SUBPASS_EXTERNAL) {
4147 assert(dstSubpass != VK_SUBPASS_EXTERNAL); // this is invalid per VUID-VkSubpassDependency-srcSubpass-00865
4148 subpass_dependencies[dstSubpass].barrier_from_external = &dependency;
4149 } else if (dstSubpass == VK_SUBPASS_EXTERNAL) {
4150 subpass_dependencies[srcSubpass].barrier_to_external = &dependency;
4151 } else if (dependency.srcSubpass != dependency.dstSubpass) {
4152 // ignore self dependencies in prev and next
4153 subpass_dependencies[srcSubpass].next.emplace_back(&dependency, &subpass_dependencies[dstSubpass]);
4154 subpass_dependencies[dstSubpass].prev.emplace_back(&dependency, &subpass_dependencies[srcSubpass]);
4155 }
4156 }
4157
4158 //
4159 // Determine "asynchrononous" subpassess
4160 // syncronization is only interested in asyncronous stages *earlier* that the current one... so we'll only look towards those.
4161 // NOTE: This is O(N^3), which we could shrink to O(N^2logN) using sets instead of arrays, but given that N is likely to be
4162 // small and the K for |= from the prev is must less than for set, we'll accept the brute force.
4163 std::vector<std::vector<bool>> pass_depends(pCreateInfo->subpassCount);
4164 for (uint32_t i = 1; i < pCreateInfo->subpassCount; ++i) {
4165 auto &depends = pass_depends[i];
4166 depends.resize(i);
4167 auto &subpass_dep = subpass_dependencies[i];
4168 for (const auto &prev : subpass_dep.prev) {
4169 const auto prev_pass = prev.node->pass;
4170 const auto &prev_depends = pass_depends[prev_pass];
4171 for (uint32_t j = 0; j < prev_pass; j++) {
4172 depends[j] = depends[j] | prev_depends[j];
4173 }
4174 depends[prev_pass] = true;
4175 }
4176 for (uint32_t pass = 0; pass < subpass_dep.pass; pass++) {
4177 if (!depends[pass]) {
4178 subpass_dep.async.push_back(pass);
4179 }
4180 }
locke-lunargd556cc32019-09-17 01:21:23 -06004181 }
4182}
4183
John Zulauf4aff5d92020-02-21 08:29:35 -07004184static VkSubpassDependency2 ImplicitDependencyFromExternal(uint32_t subpass) {
4185 VkSubpassDependency2 from_external = {VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
4186 nullptr,
4187 VK_SUBPASS_EXTERNAL,
4188 subpass,
4189 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
4190 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
4191 0,
4192 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
4193 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
4194 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
4195 0,
4196 0};
4197 return from_external;
4198}
4199
4200static VkSubpassDependency2 ImplicitDependencyToExternal(uint32_t subpass) {
4201 VkSubpassDependency2 to_external = {VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
4202 nullptr,
4203 subpass,
4204 VK_SUBPASS_EXTERNAL,
4205 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
4206 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
4207 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
4208 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
4209 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
4210 0,
4211 0,
4212 0};
4213 return to_external;
4214}
4215
locke-lunargd556cc32019-09-17 01:21:23 -06004216void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
4217 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
4218 VkRenderPass *pRenderPass) {
4219 render_pass->renderPass = *pRenderPass;
4220 auto create_info = render_pass->createInfo.ptr();
4221
4222 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
4223
John Zulauf8863c332020-03-20 10:34:33 -06004224 struct AttachmentTracker { // This is really only of local interest, but a bit big for a lambda
4225 RENDER_PASS_STATE *const rp;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004226 std::vector<uint32_t> &first;
John Zulauf1507ee42020-05-18 11:33:09 -06004227 std::vector<bool> &first_is_transition;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004228 std::vector<uint32_t> &last;
John Zulauf8863c332020-03-20 10:34:33 -06004229 std::vector<std::vector<RENDER_PASS_STATE::AttachmentTransition>> &subpass_transitions;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004230 std::unordered_map<uint32_t, bool> &first_read;
4231 const uint32_t attachment_count;
John Zulauf8863c332020-03-20 10:34:33 -06004232 std::vector<VkImageLayout> attachment_layout;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004233 std::vector<std::vector<VkImageLayout>> subpass_attachment_layout;
John Zulauf8863c332020-03-20 10:34:33 -06004234 AttachmentTracker(std::shared_ptr<RENDER_PASS_STATE> &render_pass)
4235 : rp(render_pass.get()),
4236 first(rp->attachment_first_subpass),
John Zulauf1507ee42020-05-18 11:33:09 -06004237 first_is_transition(rp->attachment_first_is_transition),
John Zulauf8863c332020-03-20 10:34:33 -06004238 last(rp->attachment_last_subpass),
4239 subpass_transitions(rp->subpass_transitions),
4240 first_read(rp->attachment_first_read),
4241 attachment_count(rp->createInfo.attachmentCount),
John Zulauf2bc1fde2020-04-24 15:09:51 -06004242 attachment_layout(),
4243 subpass_attachment_layout() {
John Zulaufbb9f07f2020-03-19 16:53:06 -06004244 first.resize(attachment_count, VK_SUBPASS_EXTERNAL);
John Zulauf1507ee42020-05-18 11:33:09 -06004245 first_is_transition.resize(attachment_count, false);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004246 last.resize(attachment_count, VK_SUBPASS_EXTERNAL);
John Zulauf8863c332020-03-20 10:34:33 -06004247 subpass_transitions.resize(rp->createInfo.subpassCount + 1); // Add an extra for EndRenderPass
4248 attachment_layout.reserve(attachment_count);
John Zulauf2bc1fde2020-04-24 15:09:51 -06004249 subpass_attachment_layout.resize(rp->createInfo.subpassCount);
4250 for (auto &subpass_layouts : subpass_attachment_layout) {
4251 subpass_layouts.resize(attachment_count, kInvalidLayout);
4252 }
4253
John Zulauf8863c332020-03-20 10:34:33 -06004254 for (uint32_t j = 0; j < attachment_count; j++) {
4255 attachment_layout.push_back(rp->createInfo.pAttachments[j].initialLayout);
4256 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004257 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004258
John Zulaufbb9f07f2020-03-19 16:53:06 -06004259 void Update(uint32_t subpass, const VkAttachmentReference2 *attach_ref, uint32_t count, bool is_read) {
4260 if (nullptr == attach_ref) return;
4261 for (uint32_t j = 0; j < count; ++j) {
4262 const auto attachment = attach_ref[j].attachment;
4263 if (attachment != VK_ATTACHMENT_UNUSED) {
John Zulauf8863c332020-03-20 10:34:33 -06004264 const auto layout = attach_ref[j].layout;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004265 // Take advantage of the fact that insert won't overwrite, so we'll only write the first time.
4266 first_read.insert(std::make_pair(attachment, is_read));
John Zulauf2bc1fde2020-04-24 15:09:51 -06004267 if (first[attachment] == VK_SUBPASS_EXTERNAL) {
4268 first[attachment] = subpass;
4269 const auto initial_layout = rp->createInfo.pAttachments[attachment].initialLayout;
John Zulauf1507ee42020-05-18 11:33:09 -06004270 if (initial_layout != layout) {
4271 subpass_transitions[subpass].emplace_back(VK_SUBPASS_EXTERNAL, attachment, initial_layout, layout);
4272 first_is_transition[attachment] = true;
4273 }
John Zulauf2bc1fde2020-04-24 15:09:51 -06004274 }
John Zulaufbb9f07f2020-03-19 16:53:06 -06004275 last[attachment] = subpass;
John Zulauf8863c332020-03-20 10:34:33 -06004276
John Zulauf2bc1fde2020-04-24 15:09:51 -06004277 for (const auto &prev : rp->subpass_dependencies[subpass].prev) {
4278 const auto prev_pass = prev.node->pass;
4279 const auto prev_layout = subpass_attachment_layout[prev_pass][attachment];
4280 if ((prev_layout != kInvalidLayout) && (prev_layout != layout)) {
4281 subpass_transitions[subpass].emplace_back(prev_pass, attachment, prev_layout, layout);
4282 }
John Zulauf8863c332020-03-20 10:34:33 -06004283 }
John Zulauf2bc1fde2020-04-24 15:09:51 -06004284 attachment_layout[attachment] = layout;
John Zulauf8863c332020-03-20 10:34:33 -06004285 }
4286 }
4287 }
4288 void FinalTransitions() {
4289 auto &final_transitions = subpass_transitions[rp->createInfo.subpassCount];
4290
4291 for (uint32_t attachment = 0; attachment < attachment_count; ++attachment) {
4292 const auto final_layout = rp->createInfo.pAttachments[attachment].finalLayout;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004293 // Add final transitions for attachments that were used and change layout.
4294 if ((last[attachment] != VK_SUBPASS_EXTERNAL) && final_layout != attachment_layout[attachment]) {
4295 final_transitions.emplace_back(last[attachment], attachment, attachment_layout[attachment], final_layout);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004296 }
locke-lunargd556cc32019-09-17 01:21:23 -06004297 }
4298 }
John Zulaufbb9f07f2020-03-19 16:53:06 -06004299 };
John Zulauf8863c332020-03-20 10:34:33 -06004300 AttachmentTracker attachment_tracker(render_pass);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004301
4302 for (uint32_t subpass_index = 0; subpass_index < create_info->subpassCount; ++subpass_index) {
4303 const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[subpass_index];
John Zulauf8863c332020-03-20 10:34:33 -06004304 attachment_tracker.Update(subpass_index, subpass.pColorAttachments, subpass.colorAttachmentCount, false);
4305 attachment_tracker.Update(subpass_index, subpass.pResolveAttachments, subpass.colorAttachmentCount, false);
4306 attachment_tracker.Update(subpass_index, subpass.pDepthStencilAttachment, 1, false);
4307 attachment_tracker.Update(subpass_index, subpass.pInputAttachments, subpass.inputAttachmentCount, true);
John Zulauf4aff5d92020-02-21 08:29:35 -07004308 }
John Zulauf8863c332020-03-20 10:34:33 -06004309 attachment_tracker.FinalTransitions();
John Zulauf4aff5d92020-02-21 08:29:35 -07004310
John Zulaufbb9f07f2020-03-19 16:53:06 -06004311 // Add implicit dependencies
John Zulauf8863c332020-03-20 10:34:33 -06004312 for (uint32_t attachment = 0; attachment < attachment_tracker.attachment_count; attachment++) {
4313 const auto first_use = attachment_tracker.first[attachment];
John Zulauf4aff5d92020-02-21 08:29:35 -07004314 if (first_use != VK_SUBPASS_EXTERNAL) {
4315 auto &subpass_dep = render_pass->subpass_dependencies[first_use];
4316 if (!subpass_dep.barrier_from_external) {
4317 // Add implicit from barrier
4318 subpass_dep.implicit_barrier_from_external.reset(
4319 new VkSubpassDependency2(ImplicitDependencyFromExternal(first_use)));
4320 subpass_dep.barrier_from_external = subpass_dep.implicit_barrier_from_external.get();
4321 }
4322 }
4323
John Zulauf8863c332020-03-20 10:34:33 -06004324 const auto last_use = attachment_tracker.last[attachment];
John Zulauf4aff5d92020-02-21 08:29:35 -07004325 if (last_use != VK_SUBPASS_EXTERNAL) {
4326 auto &subpass_dep = render_pass->subpass_dependencies[last_use];
4327 if (!render_pass->subpass_dependencies[last_use].barrier_to_external) {
4328 // Add implicit to barrier
4329 subpass_dep.implicit_barrier_to_external.reset(new VkSubpassDependency2(ImplicitDependencyToExternal(last_use)));
4330 subpass_dep.barrier_to_external = subpass_dep.implicit_barrier_to_external.get();
4331 }
locke-lunargd556cc32019-09-17 01:21:23 -06004332 }
4333 }
4334
4335 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
4336 renderPassMap[*pRenderPass] = std::move(render_pass);
4337}
4338
4339// Style note:
4340// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
4341// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
4342// construction or assignment.
4343void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
4344 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4345 VkResult result) {
4346 if (VK_SUCCESS != result) return;
4347 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
4348 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
4349}
4350
Tony-LunarG977448c2019-12-02 14:52:02 -07004351void ValidationStateTracker::RecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4352 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4353 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004354 if (VK_SUCCESS != result) return;
4355 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
4356 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
4357}
4358
Tony-LunarG977448c2019-12-02 14:52:02 -07004359void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4360 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4361 VkResult result) {
4362 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
4363}
4364
4365void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4366 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4367 VkResult result) {
4368 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
4369}
4370
locke-lunargd556cc32019-09-17 01:21:23 -06004371void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
4372 const VkRenderPassBeginInfo *pRenderPassBegin,
4373 const VkSubpassContents contents) {
4374 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunargaecf2152020-05-12 17:15:41 -06004375 auto render_pass_state = pRenderPassBegin ? GetShared<RENDER_PASS_STATE>(pRenderPassBegin->renderPass) : nullptr;
4376 auto framebuffer = pRenderPassBegin ? GetShared<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer) : nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004377
4378 if (render_pass_state) {
locke-lunargaecf2152020-05-12 17:15:41 -06004379 cb_state->activeFramebuffer = framebuffer;
locke-lunargd556cc32019-09-17 01:21:23 -06004380 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07004381 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06004382 cb_state->activeSubpass = 0;
4383 cb_state->activeSubpassContents = contents;
locke-lunargaecf2152020-05-12 17:15:41 -06004384 if (framebuffer) cb_state->framebuffers.insert(framebuffer);
locke-lunargd556cc32019-09-17 01:21:23 -06004385 // Connect this framebuffer and its children to this cmdBuffer
locke-lunargaecf2152020-05-12 17:15:41 -06004386 AddFramebufferBinding(cb_state, framebuffer.get());
locke-lunargd556cc32019-09-17 01:21:23 -06004387 // Connect this RP to cmdBuffer
locke-lunargaecf2152020-05-12 17:15:41 -06004388 AddCommandBufferBinding(
4389 render_pass_state->cb_bindings,
4390 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state.get()), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004391
4392 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
4393 if (chained_device_group_struct) {
4394 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
4395 } else {
4396 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
4397 }
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004398
4399 cb_state->imagelessFramebufferAttachments.clear();
4400 auto attachment_info_struct = lvl_find_in_chain<VkRenderPassAttachmentBeginInfo>(pRenderPassBegin->pNext);
4401 if (attachment_info_struct) {
4402 for (uint32_t i = 0; i < attachment_info_struct->attachmentCount; i++) {
4403 IMAGE_VIEW_STATE *img_view_state = GetImageViewState(attachment_info_struct->pAttachments[i]);
4404 cb_state->imagelessFramebufferAttachments.push_back(img_view_state);
4405 }
4406 }
locke-lunargd556cc32019-09-17 01:21:23 -06004407 }
4408}
4409
4410void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
4411 const VkRenderPassBeginInfo *pRenderPassBegin,
4412 VkSubpassContents contents) {
4413 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
4414}
4415
4416void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
4417 const VkRenderPassBeginInfo *pRenderPassBegin,
4418 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
4419 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4420}
4421
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06004422void ValidationStateTracker::PostCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4423 uint32_t counterBufferCount,
4424 const VkBuffer *pCounterBuffers,
4425 const VkDeviceSize *pCounterBufferOffsets) {
4426 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4427
4428 cb_state->transform_feedback_active = true;
4429}
4430
4431void ValidationStateTracker::PostCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4432 uint32_t counterBufferCount, const VkBuffer *pCounterBuffers,
4433 const VkDeviceSize *pCounterBufferOffsets) {
4434 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4435
4436 cb_state->transform_feedback_active = false;
4437}
4438
Tony-LunarG977448c2019-12-02 14:52:02 -07004439void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
4440 const VkRenderPassBeginInfo *pRenderPassBegin,
4441 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
4442 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4443}
4444
locke-lunargd556cc32019-09-17 01:21:23 -06004445void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4446 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4447 cb_state->activeSubpass++;
4448 cb_state->activeSubpassContents = contents;
4449}
4450
4451void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4452 RecordCmdNextSubpass(commandBuffer, contents);
4453}
4454
4455void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
4456 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
4457 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4458 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4459}
4460
Tony-LunarG977448c2019-12-02 14:52:02 -07004461void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
4462 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
4463 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4464 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4465}
4466
locke-lunargd556cc32019-09-17 01:21:23 -06004467void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
4468 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4469 cb_state->activeRenderPass = nullptr;
4470 cb_state->activeSubpass = 0;
4471 cb_state->activeFramebuffer = VK_NULL_HANDLE;
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004472 cb_state->imagelessFramebufferAttachments.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06004473}
4474
4475void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
4476 RecordCmdEndRenderPassState(commandBuffer);
4477}
4478
4479void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
4480 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4481 RecordCmdEndRenderPassState(commandBuffer);
4482}
4483
Tony-LunarG977448c2019-12-02 14:52:02 -07004484void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
4485 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4486 RecordCmdEndRenderPassState(commandBuffer);
4487}
locke-lunargd556cc32019-09-17 01:21:23 -06004488void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
4489 const VkCommandBuffer *pCommandBuffers) {
4490 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4491
4492 CMD_BUFFER_STATE *sub_cb_state = NULL;
4493 for (uint32_t i = 0; i < commandBuffersCount; i++) {
4494 sub_cb_state = GetCBState(pCommandBuffers[i]);
4495 assert(sub_cb_state);
4496 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
4497 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
4498 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
4499 // from the validation step to the recording step
4500 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
4501 }
4502 }
4503
4504 // Propagate inital layout and current layout state to the primary cmd buffer
4505 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
4506 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
4507 // for those other classes.
4508 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
4509 const auto image = sub_layout_map_entry.first;
4510 const auto *image_state = GetImageState(image);
4511 if (!image_state) continue; // Can't set layouts of a dead image
4512
4513 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
4514 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
4515 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
4516 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
4517 }
4518
4519 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
4520 cb_state->linkedCommandBuffers.insert(sub_cb_state);
4521 sub_cb_state->linkedCommandBuffers.insert(cb_state);
4522 for (auto &function : sub_cb_state->queryUpdates) {
4523 cb_state->queryUpdates.push_back(function);
4524 }
4525 for (auto &function : sub_cb_state->queue_submit_functions) {
4526 cb_state->queue_submit_functions.push_back(function);
4527 }
4528 }
4529}
4530
4531void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
4532 VkFlags flags, void **ppData, VkResult result) {
4533 if (VK_SUCCESS != result) return;
4534 RecordMappedMemory(mem, offset, size, ppData);
4535}
4536
4537void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
4538 auto mem_info = GetDevMemState(mem);
4539 if (mem_info) {
4540 mem_info->mapped_range = MemRange();
4541 mem_info->p_driver_data = nullptr;
4542 }
4543}
4544
4545void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
4546 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
4547 if (image_state) {
locke-lunargae26eac2020-04-16 15:29:05 -06004548 // An Android sepcial image cannot get VkSubresourceLayout until the image binds a memory.
4549 // See: VUID-vkGetImageSubresourceLayout-image-01895
4550 image_state->fragment_encoder =
4551 std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(new subresource_adapter::ImageRangeEncoder(*image_state));
locke-lunargd556cc32019-09-17 01:21:23 -06004552 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
4553 if (swapchain_info) {
4554 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
4555 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -06004556 swapchain->images[swapchain_info->imageIndex].bound_images.emplace(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -06004557 image_state->bind_swapchain = swapchain_info->swapchain;
4558 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
4559 }
4560 } else {
4561 // Track bound memory range information
4562 auto mem_info = GetDevMemState(bindInfo.memory);
4563 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07004564 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06004565 }
4566
4567 // Track objects tied to memory
4568 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
4569 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
4570 }
Tony-LunarG330cf4c2020-03-04 16:29:03 -07004571 if ((image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) || swapchain_info) {
locke-lunargd556cc32019-09-17 01:21:23 -06004572 AddAliasingImage(image_state);
4573 }
4574 }
4575}
4576
4577void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
4578 VkDeviceSize memoryOffset, VkResult result) {
4579 if (VK_SUCCESS != result) return;
4580 VkBindImageMemoryInfo bindInfo = {};
4581 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
4582 bindInfo.image = image;
4583 bindInfo.memory = mem;
4584 bindInfo.memoryOffset = memoryOffset;
4585 UpdateBindImageMemoryState(bindInfo);
4586}
4587
4588void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
4589 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
4590 if (VK_SUCCESS != result) return;
4591 for (uint32_t i = 0; i < bindInfoCount; i++) {
4592 UpdateBindImageMemoryState(pBindInfos[i]);
4593 }
4594}
4595
4596void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
4597 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
4598 if (VK_SUCCESS != result) return;
4599 for (uint32_t i = 0; i < bindInfoCount; i++) {
4600 UpdateBindImageMemoryState(pBindInfos[i]);
4601 }
4602}
4603
4604void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
4605 auto event_state = GetEventState(event);
4606 if (event_state) {
4607 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
4608 }
locke-lunargd556cc32019-09-17 01:21:23 -06004609}
4610
4611void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
4612 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
4613 VkResult result) {
4614 if (VK_SUCCESS != result) return;
4615 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
4616 pImportSemaphoreFdInfo->flags);
4617}
4618
4619void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
4620 VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
4621 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
4622 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4623 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
4624 semaphore_state->scope = kSyncScopeExternalPermanent;
4625 }
4626}
4627
4628#ifdef VK_USE_PLATFORM_WIN32_KHR
4629void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
4630 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
4631 if (VK_SUCCESS != result) return;
4632 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
4633 pImportSemaphoreWin32HandleInfo->flags);
4634}
4635
4636void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
4637 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4638 HANDLE *pHandle, VkResult result) {
4639 if (VK_SUCCESS != result) return;
4640 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
4641}
4642
4643void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
4644 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
4645 if (VK_SUCCESS != result) return;
4646 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
4647 pImportFenceWin32HandleInfo->flags);
4648}
4649
4650void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
4651 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4652 HANDLE *pHandle, VkResult result) {
4653 if (VK_SUCCESS != result) return;
4654 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
4655}
4656#endif
4657
4658void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
4659 VkResult result) {
4660 if (VK_SUCCESS != result) return;
4661 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
4662}
4663
4664void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
4665 VkFenceImportFlagsKHR flags) {
4666 FENCE_STATE *fence_node = GetFenceState(fence);
4667 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
4668 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
4669 fence_node->scope == kSyncScopeInternal) {
4670 fence_node->scope = kSyncScopeExternalTemporary;
4671 } else {
4672 fence_node->scope = kSyncScopeExternalPermanent;
4673 }
4674 }
4675}
4676
4677void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
4678 VkResult result) {
4679 if (VK_SUCCESS != result) return;
4680 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
4681}
4682
4683void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
4684 FENCE_STATE *fence_state = GetFenceState(fence);
4685 if (fence_state) {
4686 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4687 // Export with reference transference becomes external
4688 fence_state->scope = kSyncScopeExternalPermanent;
4689 } else if (fence_state->scope == kSyncScopeInternal) {
4690 // Export with copy transference has a side effect of resetting the fence
4691 fence_state->state = FENCE_UNSIGNALED;
4692 }
4693 }
4694}
4695
4696void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
4697 VkResult result) {
4698 if (VK_SUCCESS != result) return;
4699 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
4700}
4701
4702void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
4703 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
4704 if (VK_SUCCESS != result) return;
4705 eventMap[*pEvent].write_in_use = 0;
4706 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
4707}
4708
4709void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
4710 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
4711 SWAPCHAIN_NODE *old_swapchain_state) {
4712 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004713 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004714 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
4715 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
4716 swapchain_state->shared_presentable = true;
4717 }
4718 surface_state->swapchain = swapchain_state.get();
4719 swapchainMap[*pSwapchain] = std::move(swapchain_state);
4720 } else {
4721 surface_state->swapchain = nullptr;
4722 }
4723 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
4724 if (old_swapchain_state) {
4725 old_swapchain_state->retired = true;
4726 }
4727 return;
4728}
4729
4730void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
4731 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
4732 VkResult result) {
4733 auto surface_state = GetSurfaceState(pCreateInfo->surface);
4734 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
4735 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
4736}
4737
4738void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
4739 const VkAllocationCallbacks *pAllocator) {
4740 if (!swapchain) return;
4741 auto swapchain_data = GetSwapchainState(swapchain);
4742 if (swapchain_data) {
4743 for (const auto &swapchain_image : swapchain_data->images) {
locke-lunargb3584732019-10-28 20:18:36 -06004744 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image, kVulkanObjectTypeImage));
4745 imageMap.erase(swapchain_image.image);
4746 RemoveAliasingImages(swapchain_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06004747 }
4748
4749 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
4750 if (surface_state) {
4751 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
4752 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004753 swapchain_data->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004754 swapchainMap.erase(swapchain);
4755 }
4756}
4757
4758void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
4759 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
4760 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
4761 auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
4762 if (pSemaphore) {
4763 pSemaphore->signaler.first = VK_NULL_HANDLE;
4764 pSemaphore->signaled = false;
4765 }
4766 }
4767
4768 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
4769 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
4770 // confused itself just as much.
4771 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
4772 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
4773 // Mark the image as having been released to the WSI
4774 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
4775 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
locke-lunargb3584732019-10-28 20:18:36 -06004776 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004777 auto image_state = GetImageState(image);
4778 if (image_state) {
4779 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004780 if (image_state->shared_presentable) {
4781 image_state->layout_locked = true;
4782 }
locke-lunargd556cc32019-09-17 01:21:23 -06004783 }
4784 }
4785 }
4786 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
4787 // its semaphore waits) /never/ participate in any completion proof.
4788}
4789
4790void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
4791 const VkSwapchainCreateInfoKHR *pCreateInfos,
4792 const VkAllocationCallbacks *pAllocator,
4793 VkSwapchainKHR *pSwapchains, VkResult result) {
4794 if (pCreateInfos) {
4795 for (uint32_t i = 0; i < swapchainCount; i++) {
4796 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
4797 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
4798 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
4799 }
4800 }
4801}
4802
4803void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4804 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
4805 auto pFence = GetFenceState(fence);
4806 if (pFence && pFence->scope == kSyncScopeInternal) {
4807 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
4808 // import
4809 pFence->state = FENCE_INFLIGHT;
4810 pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
4811 }
4812
4813 auto pSemaphore = GetSemaphoreState(semaphore);
4814 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
4815 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
4816 // temporary import
4817 pSemaphore->signaled = true;
4818 pSemaphore->signaler.first = VK_NULL_HANDLE;
4819 }
4820
4821 // Mark the image as acquired.
4822 auto swapchain_data = GetSwapchainState(swapchain);
4823 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
locke-lunargb3584732019-10-28 20:18:36 -06004824 auto image = swapchain_data->images[*pImageIndex].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004825 auto image_state = GetImageState(image);
4826 if (image_state) {
4827 image_state->acquired = true;
4828 image_state->shared_presentable = swapchain_data->shared_presentable;
4829 }
4830 }
4831}
4832
4833void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4834 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
4835 VkResult result) {
4836 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4837 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
4838}
4839
4840void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
4841 uint32_t *pImageIndex, VkResult result) {
4842 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4843 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
4844 pAcquireInfo->fence, pImageIndex);
4845}
4846
4847void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
4848 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
4849 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
4850 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
4851 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
4852 phys_device_state.phys_device = pPhysicalDevices[i];
4853 // Init actual features for each physical device
4854 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
4855 }
4856 }
4857}
4858
4859// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
4860static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
4861 VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4862 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
4863
4864 if (!pQueueFamilyProperties) {
4865 if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
4866 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
4867 } else { // Save queue family properties
4868 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
4869
4870 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
4871 for (uint32_t i = 0; i < count; ++i) {
4872 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
4873 }
4874 }
4875}
4876
4877void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
4878 uint32_t *pQueueFamilyPropertyCount,
4879 VkQueueFamilyProperties *pQueueFamilyProperties) {
4880 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4881 assert(physical_device_state);
4882 VkQueueFamilyProperties2KHR *pqfp = nullptr;
4883 std::vector<VkQueueFamilyProperties2KHR> qfp;
4884 qfp.resize(*pQueueFamilyPropertyCount);
4885 if (pQueueFamilyProperties) {
4886 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
4887 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
4888 qfp[i].pNext = nullptr;
4889 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
4890 }
4891 pqfp = qfp.data();
4892 }
4893 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
4894}
4895
4896void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
4897 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4898 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4899 assert(physical_device_state);
4900 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4901 pQueueFamilyProperties);
4902}
4903
4904void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
4905 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4906 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4907 assert(physical_device_state);
4908 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4909 pQueueFamilyProperties);
4910}
4911void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
4912 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004913 if (!surface) return;
4914 auto surface_state = GetSurfaceState(surface);
4915 surface_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004916 surface_map.erase(surface);
4917}
4918
4919void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004920 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06004921}
4922
4923void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
4924 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
4925 const VkAllocationCallbacks *pAllocator,
4926 VkSurfaceKHR *pSurface, VkResult result) {
4927 if (VK_SUCCESS != result) return;
4928 RecordVulkanSurface(pSurface);
4929}
4930
4931#ifdef VK_USE_PLATFORM_ANDROID_KHR
4932void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
4933 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
4934 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4935 VkResult result) {
4936 if (VK_SUCCESS != result) return;
4937 RecordVulkanSurface(pSurface);
4938}
4939#endif // VK_USE_PLATFORM_ANDROID_KHR
4940
4941#ifdef VK_USE_PLATFORM_IOS_MVK
4942void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
4943 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4944 VkResult result) {
4945 if (VK_SUCCESS != result) return;
4946 RecordVulkanSurface(pSurface);
4947}
4948#endif // VK_USE_PLATFORM_IOS_MVK
4949
4950#ifdef VK_USE_PLATFORM_MACOS_MVK
4951void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
4952 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
4953 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4954 VkResult result) {
4955 if (VK_SUCCESS != result) return;
4956 RecordVulkanSurface(pSurface);
4957}
4958#endif // VK_USE_PLATFORM_MACOS_MVK
4959
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07004960#ifdef VK_USE_PLATFORM_METAL_EXT
4961void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
4962 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
4963 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4964 VkResult result) {
4965 if (VK_SUCCESS != result) return;
4966 RecordVulkanSurface(pSurface);
4967}
4968#endif // VK_USE_PLATFORM_METAL_EXT
4969
locke-lunargd556cc32019-09-17 01:21:23 -06004970#ifdef VK_USE_PLATFORM_WAYLAND_KHR
4971void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
4972 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
4973 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4974 VkResult result) {
4975 if (VK_SUCCESS != result) return;
4976 RecordVulkanSurface(pSurface);
4977}
4978#endif // VK_USE_PLATFORM_WAYLAND_KHR
4979
4980#ifdef VK_USE_PLATFORM_WIN32_KHR
4981void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
4982 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
4983 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4984 VkResult result) {
4985 if (VK_SUCCESS != result) return;
4986 RecordVulkanSurface(pSurface);
4987}
4988#endif // VK_USE_PLATFORM_WIN32_KHR
4989
4990#ifdef VK_USE_PLATFORM_XCB_KHR
4991void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
4992 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4993 VkResult result) {
4994 if (VK_SUCCESS != result) return;
4995 RecordVulkanSurface(pSurface);
4996}
4997#endif // VK_USE_PLATFORM_XCB_KHR
4998
4999#ifdef VK_USE_PLATFORM_XLIB_KHR
5000void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
5001 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5002 VkResult result) {
5003 if (VK_SUCCESS != result) return;
5004 RecordVulkanSurface(pSurface);
5005}
5006#endif // VK_USE_PLATFORM_XLIB_KHR
5007
Niklas Haas8b84af12020-04-19 22:20:11 +02005008void ValidationStateTracker::PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance,
5009 const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
5010 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5011 VkResult result) {
5012 if (VK_SUCCESS != result) return;
5013 RecordVulkanSurface(pSurface);
5014}
5015
Cort23cf2282019-09-20 18:58:18 +02005016void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005017 VkPhysicalDeviceFeatures *pFeatures) {
5018 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5019 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
Yilong Li358152a2020-07-08 02:16:45 -07005020 // Reset the features2 safe struct before setting up the features field.
5021 physical_device_state->features2 = safe_VkPhysicalDeviceFeatures2();
Cortffba2642019-09-20 22:09:41 +02005022 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02005023}
5024
5025void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005026 VkPhysicalDeviceFeatures2 *pFeatures) {
5027 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5028 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
5029 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02005030}
5031
5032void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005033 VkPhysicalDeviceFeatures2 *pFeatures) {
5034 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5035 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
5036 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02005037}
5038
locke-lunargd556cc32019-09-17 01:21:23 -06005039void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
5040 VkSurfaceKHR surface,
5041 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
5042 VkResult result) {
5043 if (VK_SUCCESS != result) return;
5044 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5045 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08005046 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005047 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
5048}
5049
5050void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
5051 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
5052 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
5053 if (VK_SUCCESS != result) return;
5054 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5055 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08005056 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005057 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
5058}
5059
5060void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
5061 VkSurfaceKHR surface,
5062 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
5063 VkResult result) {
5064 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5065 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08005066 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005067 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
5068 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
5069 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
5070 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
5071 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
5072 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
5073 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
5074 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
5075 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
5076 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
5077}
5078
5079void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
5080 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
5081 VkBool32 *pSupported, VkResult result) {
5082 if (VK_SUCCESS != result) return;
5083 auto surface_state = GetSurfaceState(surface);
5084 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
5085}
5086
5087void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
5088 VkSurfaceKHR surface,
5089 uint32_t *pPresentModeCount,
5090 VkPresentModeKHR *pPresentModes,
5091 VkResult result) {
5092 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5093
5094 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
5095 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5096 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState;
5097
5098 if (*pPresentModeCount) {
5099 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
5100 if (*pPresentModeCount > physical_device_state->present_modes.size())
5101 physical_device_state->present_modes.resize(*pPresentModeCount);
5102 }
5103 if (pPresentModes) {
5104 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
5105 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
5106 physical_device_state->present_modes[i] = pPresentModes[i];
5107 }
5108 }
5109}
5110
5111void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
5112 uint32_t *pSurfaceFormatCount,
5113 VkSurfaceFormatKHR *pSurfaceFormats,
5114 VkResult result) {
5115 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5116
5117 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5118 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
5119
5120 if (*pSurfaceFormatCount) {
5121 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
5122 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
5123 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
5124 }
5125 if (pSurfaceFormats) {
5126 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
5127 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
5128 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
5129 }
5130 }
5131}
5132
5133void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
5134 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
5135 uint32_t *pSurfaceFormatCount,
5136 VkSurfaceFormat2KHR *pSurfaceFormats,
5137 VkResult result) {
5138 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5139
5140 auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
5141 if (*pSurfaceFormatCount) {
5142 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_COUNT) {
5143 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_COUNT;
5144 }
5145 if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
5146 physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
5147 }
5148 if (pSurfaceFormats) {
5149 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_DETAILS) {
5150 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_DETAILS;
5151 }
5152 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
5153 physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
5154 }
5155 }
5156}
5157
5158void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
5159 const VkDebugUtilsLabelEXT *pLabelInfo) {
5160 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
5161}
5162
5163void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
5164 EndCmdDebugUtilsLabel(report_data, commandBuffer);
5165}
5166
5167void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
5168 const VkDebugUtilsLabelEXT *pLabelInfo) {
5169 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
5170
5171 // Squirrel away an easily accessible copy.
5172 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5173 cb_state->debug_label = LoggingLabel(pLabelInfo);
5174}
5175
5176void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
5177 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
5178 if (NULL != pPhysicalDeviceGroupProperties) {
5179 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
5180 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
5181 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
5182 auto &phys_device_state = physical_device_map[cur_phys_dev];
5183 phys_device_state.phys_device = cur_phys_dev;
5184 // Init actual features for each physical device
5185 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
5186 }
5187 }
5188 }
5189}
5190
5191void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
5192 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
5193 VkResult result) {
5194 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5195 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
5196}
5197
5198void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
5199 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
5200 VkResult result) {
5201 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5202 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
5203}
5204
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005205void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
5206 uint32_t queueFamilyIndex,
5207 uint32_t *pCounterCount,
5208 VkPerformanceCounterKHR *pCounters) {
5209 if (NULL == pCounters) return;
5210
5211 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5212 assert(physical_device_state);
5213
5214 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queueFamilyCounters(new QUEUE_FAMILY_PERF_COUNTERS());
5215 queueFamilyCounters->counters.resize(*pCounterCount);
5216 for (uint32_t i = 0; i < *pCounterCount; i++) queueFamilyCounters->counters[i] = pCounters[i];
5217
5218 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queueFamilyCounters);
5219}
5220
5221void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
5222 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
5223 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
5224 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5225 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
5226}
5227
5228void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
5229 VkResult result) {
5230 if (result == VK_SUCCESS) performance_lock_acquired = true;
5231}
5232
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005233void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
5234 performance_lock_acquired = false;
5235 for (auto &cmd_buffer : commandBufferMap) {
5236 cmd_buffer.second->performance_lock_released = true;
5237 }
5238}
5239
locke-lunargd556cc32019-09-17 01:21:23 -06005240void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
5241 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5242 const VkAllocationCallbacks *pAllocator) {
5243 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005244 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5245 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005246 desc_template_map.erase(descriptorUpdateTemplate);
5247}
5248
5249void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
5250 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5251 const VkAllocationCallbacks *pAllocator) {
5252 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005253 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5254 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005255 desc_template_map.erase(descriptorUpdateTemplate);
5256}
5257
5258void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
5259 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
5260 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005261 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06005262 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
5263}
5264
5265void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
5266 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
5267 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
5268 if (VK_SUCCESS != result) return;
5269 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
5270}
5271
5272void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
5273 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
5274 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
5275 if (VK_SUCCESS != result) return;
5276 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
5277}
5278
5279void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
5280 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5281 const void *pData) {
5282 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
5283 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
5284 assert(0);
5285 } else {
5286 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
5287 // TODO: Record template push descriptor updates
5288 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
5289 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
5290 }
5291 }
5292}
5293
5294void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
5295 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
5296 const void *pData) {
5297 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
5298}
5299
5300void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
5301 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5302 const void *pData) {
5303 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
5304}
5305
5306void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
5307 VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
5308 const void *pData) {
5309 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5310
5311 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5312 if (template_state) {
5313 auto layout_data = GetPipelineLayout(layout);
5314 auto dsl = GetDslFromPipelineLayout(layout_data, set);
5315 const auto &template_ci = template_state->create_info;
Jeff Bolz6ae39612019-10-11 20:57:36 -05005316 if (dsl && !dsl->destroyed) {
locke-lunargd556cc32019-09-17 01:21:23 -06005317 // Decode the template into a set of write updates
5318 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
5319 dsl->GetDescriptorSetLayout());
5320 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
5321 static_cast<uint32_t>(decoded_template.desc_writes.size()),
5322 decoded_template.desc_writes.data());
5323 }
5324 }
5325}
5326
5327void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
5328 uint32_t *pPropertyCount, void *pProperties) {
5329 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5330 if (*pPropertyCount) {
5331 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_COUNT) {
5332 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_COUNT;
Camden Stocker61050592019-11-27 12:03:09 -08005333 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005334 }
5335 physical_device_state->display_plane_property_count = *pPropertyCount;
5336 }
5337 if (pProperties) {
5338 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_DETAILS) {
5339 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08005340 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005341 }
5342 }
5343}
5344
5345void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
5346 uint32_t *pPropertyCount,
5347 VkDisplayPlanePropertiesKHR *pProperties,
5348 VkResult result) {
5349 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5350 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5351}
5352
5353void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
5354 uint32_t *pPropertyCount,
5355 VkDisplayPlaneProperties2KHR *pProperties,
5356 VkResult result) {
5357 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5358 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5359}
5360
5361void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5362 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
5363 QueryObject query_obj = {queryPool, query, index};
5364 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5365 RecordCmdBeginQuery(cb_state, query_obj);
5366}
5367
5368void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5369 uint32_t query, uint32_t index) {
5370 QueryObject query_obj = {queryPool, query, index};
5371 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5372 RecordCmdEndQuery(cb_state, query_obj);
5373}
5374
5375void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
5376 VkSamplerYcbcrConversion ycbcr_conversion) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005377 auto ycbcr_state = std::make_shared<SAMPLER_YCBCR_CONVERSION_STATE>();
5378
locke-lunargd556cc32019-09-17 01:21:23 -06005379 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005380 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion, ycbcr_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06005381 }
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005382
5383 const VkFormat conversion_format = create_info->format;
5384
5385 if (conversion_format != VK_FORMAT_UNDEFINED) {
5386 // If format is VK_FORMAT_UNDEFINED, will be set by external AHB features
5387 ycbcr_state->format_features = GetPotentialFormatFeatures(conversion_format);
5388 }
5389
5390 ycbcr_state->chromaFilter = create_info->chromaFilter;
5391 ycbcr_state->format = conversion_format;
5392 samplerYcbcrConversionMap[ycbcr_conversion] = std::move(ycbcr_state);
locke-lunargd556cc32019-09-17 01:21:23 -06005393}
5394
5395void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
5396 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5397 const VkAllocationCallbacks *pAllocator,
5398 VkSamplerYcbcrConversion *pYcbcrConversion,
5399 VkResult result) {
5400 if (VK_SUCCESS != result) return;
5401 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5402}
5403
5404void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
5405 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5406 const VkAllocationCallbacks *pAllocator,
5407 VkSamplerYcbcrConversion *pYcbcrConversion,
5408 VkResult result) {
5409 if (VK_SUCCESS != result) return;
5410 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5411}
5412
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005413void ValidationStateTracker::RecordDestroySamplerYcbcrConversionState(VkSamplerYcbcrConversion ycbcr_conversion) {
5414 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
5415 RecordDestroySamplerYcbcrConversionANDROID(ycbcr_conversion);
5416 }
5417
5418 auto ycbcr_state = GetSamplerYcbcrConversionState(ycbcr_conversion);
5419 ycbcr_state->destroyed = true;
5420 samplerYcbcrConversionMap.erase(ycbcr_conversion);
5421}
5422
locke-lunargd556cc32019-09-17 01:21:23 -06005423void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
5424 const VkAllocationCallbacks *pAllocator) {
5425 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005426 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005427}
5428
5429void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
5430 VkSamplerYcbcrConversion ycbcrConversion,
5431 const VkAllocationCallbacks *pAllocator) {
5432 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005433 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005434}
5435
Tony-LunarG977448c2019-12-02 14:52:02 -07005436void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5437 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005438 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07005439 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06005440
5441 // Do nothing if the query pool has been destroyed.
5442 auto query_pool_state = GetQueryPoolState(queryPool);
5443 if (!query_pool_state) return;
5444
5445 // Reset the state of existing entries.
5446 QueryObject query_obj{queryPool, 0};
5447 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
5448 for (uint32_t i = 0; i < max_query_count; ++i) {
5449 query_obj.query = firstQuery + i;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005450 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005451 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
5452 for (uint32_t passIndex = 0; passIndex < query_pool_state->n_performance_passes; passIndex++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005453 query_obj.perf_pass = passIndex;
5454 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005455 }
5456 }
locke-lunargd556cc32019-09-17 01:21:23 -06005457 }
5458}
5459
Tony-LunarG977448c2019-12-02 14:52:02 -07005460void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5461 uint32_t queryCount) {
5462 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5463}
5464
5465void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5466 uint32_t queryCount) {
5467 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5468}
5469
locke-lunargd556cc32019-09-17 01:21:23 -06005470void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
5471 const TEMPLATE_STATE *template_state, const void *pData) {
5472 // Translate the templated update into a normal update for validation...
5473 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
5474 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
5475 decoded_update.desc_writes.data(), 0, NULL);
5476}
5477
5478// Update the common AllocateDescriptorSetsData
5479void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005480 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005481 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05005482 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06005483 if (layout) {
5484 ds_data->layout_nodes[i] = layout;
5485 // Count total descriptors required per type
5486 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
5487 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
5488 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
5489 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
5490 }
5491 }
5492 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
5493 }
5494}
5495
5496// Decrement allocated sets from the pool and insert new sets into set_map
5497void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
5498 const VkDescriptorSet *descriptor_sets,
5499 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
5500 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
5501 // Account for sets and individual descriptors allocated from pool
5502 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
5503 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
5504 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
5505 }
5506
5507 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
5508 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
5509
5510 // Create tracking object for each descriptor set; insert into global map and the pool's set.
5511 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
5512 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
5513
Jeff Bolz41a1ced2019-10-11 11:40:49 -05005514 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07005515 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06005516 pool_state->sets.insert(new_ds.get());
5517 new_ds->in_use.store(0);
5518 setMap[descriptor_sets[i]] = std::move(new_ds);
5519 }
5520}
5521
5522// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
Jeremy Kniager05631e72020-06-08 14:21:35 -06005523void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type,
5524 VkPipelineBindPoint bind_point) {
5525 UpdateDrawState(cb_state, cmd_type, bind_point);
locke-lunargd556cc32019-09-17 01:21:23 -06005526 cb_state->hasDispatchCmd = true;
5527}
5528
locke-lunargd556cc32019-09-17 01:21:23 -06005529// Generic function to handle state update for all CmdDraw* type functions
Jeremy Kniager05631e72020-06-08 14:21:35 -06005530void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, VkPipelineBindPoint bind_point) {
5531 UpdateStateCmdDrawDispatchType(cb_state, cmd_type, bind_point);
locke-lunargd556cc32019-09-17 01:21:23 -06005532 cb_state->hasDrawCmd = true;
5533}
5534
5535void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
5536 uint32_t firstVertex, uint32_t firstInstance) {
5537 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005538 UpdateStateCmdDrawType(cb_state, CMD_DRAW, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005539}
5540
5541void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
5542 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
5543 uint32_t firstInstance) {
5544 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005545 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXED, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005546}
5547
5548void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5549 uint32_t count, uint32_t stride) {
5550 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5551 BUFFER_STATE *buffer_state = GetBufferState(buffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005552 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005553 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5554}
5555
5556void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5557 VkDeviceSize offset, uint32_t count, uint32_t stride) {
5558 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5559 BUFFER_STATE *buffer_state = GetBufferState(buffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005560 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005561 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5562}
5563
5564void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
5565 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005566 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCH, VK_PIPELINE_BIND_POINT_COMPUTE);
locke-lunargd556cc32019-09-17 01:21:23 -06005567}
5568
5569void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5570 VkDeviceSize offset) {
5571 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005572 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCHINDIRECT, VK_PIPELINE_BIND_POINT_COMPUTE);
locke-lunargd556cc32019-09-17 01:21:23 -06005573 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5574 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5575}
5576
Tony-LunarG977448c2019-12-02 14:52:02 -07005577void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5578 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5579 uint32_t stride) {
5580 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5581 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5582 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005583 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS);
Tony-LunarG977448c2019-12-02 14:52:02 -07005584 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5585 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5586}
5587
locke-lunargd556cc32019-09-17 01:21:23 -06005588void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5589 VkDeviceSize offset, VkBuffer countBuffer,
5590 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5591 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005592 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
5593}
5594
5595void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5596 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5597 uint32_t maxDrawCount, uint32_t stride) {
5598 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
5599}
5600
5601void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5602 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5603 uint32_t maxDrawCount, uint32_t stride) {
locke-lunargd556cc32019-09-17 01:21:23 -06005604 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5605 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5606 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005607 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005608 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5609 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5610}
5611
5612void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5613 VkDeviceSize offset, VkBuffer countBuffer,
5614 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5615 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005616 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
5617}
5618
5619void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
5620 VkDeviceSize offset, VkBuffer countBuffer,
5621 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5622 uint32_t stride) {
5623 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
locke-lunargd556cc32019-09-17 01:21:23 -06005624}
5625
5626void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
5627 uint32_t firstTask) {
5628 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005629 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSNV, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005630}
5631
5632void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5633 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
5634 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005635 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTNV, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005636 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5637 if (buffer_state) {
5638 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5639 }
5640}
5641
5642void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5643 VkDeviceSize offset, VkBuffer countBuffer,
5644 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5645 uint32_t stride) {
5646 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5647 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5648 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005649 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTCOUNTNV, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005650 if (buffer_state) {
5651 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5652 }
5653 if (count_buffer_state) {
5654 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5655 }
5656}
5657
5658void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
5659 const VkAllocationCallbacks *pAllocator,
5660 VkShaderModule *pShaderModule, VkResult result,
5661 void *csm_state_data) {
5662 if (VK_SUCCESS != result) return;
5663 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
5664
Tony-LunarG8a51b7d2020-07-01 15:57:23 -06005665 spv_target_env spirv_environment = PickSpirvEnv(api_version, (device_extensions.vk_khr_spirv_1_4 != kNotEnabled));
locke-lunargd556cc32019-09-17 01:21:23 -06005666 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005667 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
5668 csm_state->unique_shader_id)
5669 : std::make_shared<SHADER_MODULE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06005670 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
5671}
5672
5673void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005674 PIPELINE_STATE::StageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005675 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
5676 auto module = GetShaderModuleState(pStage->module);
5677 if (!module->has_valid_spirv) return;
5678
5679 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
5680 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
5681 if (entrypoint == module->end()) return;
5682
locke-lunarg654e3692020-06-04 17:19:15 -06005683 stage_state->stage_flag = pStage->stage;
5684
locke-lunargd556cc32019-09-17 01:21:23 -06005685 // Mark accessible ids
5686 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
5687 ProcessExecutionModes(module, entrypoint, pipeline);
5688
5689 stage_state->descriptor_uses =
Mark Lobodzinskid8d658e2020-01-30 15:05:51 -07005690 CollectInterfaceByDescriptorSlot(module, stage_state->accessible_ids, &stage_state->has_writable_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06005691 // Capture descriptor uses for the pipeline
5692 for (auto use : stage_state->descriptor_uses) {
5693 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06005694 const uint32_t slot = use.first.first;
5695 auto &reqs = pipeline->active_slots[slot][use.first.second];
locke-lunargd556cc32019-09-17 01:21:23 -06005696 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
John Zulauf649edd52019-10-02 14:39:41 -06005697 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunargd556cc32019-09-17 01:21:23 -06005698 }
locke-lunarg96dc9632020-06-10 17:22:18 -06005699
5700 if (pStage->stage == VK_SHADER_STAGE_FRAGMENT_BIT) {
5701 pipeline->fragmentShader_writable_output_location_list = CollectWritableOutputLocationinFS(*module, *pStage);
5702 }
locke-lunargd556cc32019-09-17 01:21:23 -06005703}
5704
5705void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
5706 if (cb_state == nullptr) {
5707 return;
5708 }
5709
5710 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
5711 if (pipeline_layout_state == nullptr) {
5712 return;
5713 }
5714
5715 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
5716 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
5717 cb_state->push_constant_data.clear();
5718 uint32_t size_needed = 0;
5719 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
5720 size_needed = std::max(size_needed, (push_constant_range.offset + push_constant_range.size));
5721 }
5722 cb_state->push_constant_data.resize(size_needed, 0);
5723 }
5724}
John Zulauf22b0fbe2019-10-15 06:26:16 -06005725
5726void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
5727 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
5728 VkResult result) {
5729 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
5730 auto swapchain_state = GetSwapchainState(swapchain);
5731
5732 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
5733
5734 if (pSwapchainImages) {
5735 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_DETAILS) {
5736 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_DETAILS;
5737 }
5738 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
locke-lunargb3584732019-10-28 20:18:36 -06005739 if (swapchain_state->images[i].image != VK_NULL_HANDLE) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06005740
5741 // Add imageMap entries for each swapchain image
5742 VkImageCreateInfo image_ci;
5743 image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -06005744 image_ci.pNext = nullptr; // to be set later
5745 image_ci.flags = 0; // to be updated below
John Zulauf22b0fbe2019-10-15 06:26:16 -06005746 image_ci.imageType = VK_IMAGE_TYPE_2D;
5747 image_ci.format = swapchain_state->createInfo.imageFormat;
5748 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
5749 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
5750 image_ci.extent.depth = 1;
5751 image_ci.mipLevels = 1;
5752 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
5753 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
5754 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
5755 image_ci.usage = swapchain_state->createInfo.imageUsage;
5756 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
5757 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
5758 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
5759 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5760
5761 image_ci.pNext = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(swapchain_state->createInfo.pNext);
5762
5763 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR)
5764 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
5765 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)
5766 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
5767 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
5768 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR);
5769
locke-lunarg296a3c92020-03-25 01:04:29 -06005770 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(device, pSwapchainImages[i], &image_ci);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005771 auto &image_state = imageMap[pSwapchainImages[i]];
5772 image_state->valid = false;
5773 image_state->create_from_swapchain = swapchain;
5774 image_state->bind_swapchain = swapchain;
5775 image_state->bind_swapchain_imageIndex = i;
Tony-LunarGe64e4fe2020-02-17 16:21:55 -07005776 image_state->is_swapchain_image = true;
locke-lunargb3584732019-10-28 20:18:36 -06005777 swapchain_state->images[i].image = pSwapchainImages[i];
5778 swapchain_state->images[i].bound_images.emplace(pSwapchainImages[i]);
Petr Kraus44f1c482020-04-25 20:09:25 +02005779
5780 AddImageStateProps(*image_state, device, physical_device);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005781 }
5782 }
5783
5784 if (*pSwapchainImageCount) {
5785 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_COUNT) {
5786 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_COUNT;
5787 }
5788 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
5789 }
5790}
sourav parmar35e7a002020-06-09 17:58:44 -07005791
5792void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureKHR(
5793 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
5794 const VkAccelerationStructureBuildOffsetInfoKHR *const *ppOffsetInfos) {
5795 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5796 if (cb_state == nullptr) {
5797 return;
5798 }
5799 for (uint32_t i = 0; i < infoCount; ++i) {
5800 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(pInfos[i].dstAccelerationStructure);
5801 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(pInfos[i].srcAccelerationStructure);
5802 if (dst_as_state != nullptr) {
5803 dst_as_state->built = true;
5804 dst_as_state->build_info_khr.initialize(pInfos);
5805 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
5806 }
5807 if (src_as_state != nullptr) {
5808 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
5809 }
5810 }
5811 cb_state->hasBuildAccelerationStructureCmd = true;
5812}
5813
5814void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer,
5815 const VkCopyAccelerationStructureInfoKHR *pInfo) {
5816 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5817 if (cb_state) {
5818 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(pInfo->src);
5819 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(pInfo->dst);
5820 if (dst_as_state != nullptr && src_as_state != nullptr) {
5821 dst_as_state->built = true;
5822 dst_as_state->build_info_khr = src_as_state->build_info_khr;
5823 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
5824 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
5825 }
5826 }
5827}
Piers Daniell39842ee2020-07-10 16:42:33 -06005828
5829void ValidationStateTracker::PreCallRecordCmdSetCullModeEXT(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) {
5830 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5831 cb_state->status |= CBSTATUS_CULL_MODE_SET;
5832 cb_state->static_status &= ~CBSTATUS_CULL_MODE_SET;
5833}
5834
5835void ValidationStateTracker::PreCallRecordCmdSetFrontFaceEXT(VkCommandBuffer commandBuffer, VkFrontFace frontFace) {
5836 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5837 cb_state->status |= CBSTATUS_FRONT_FACE_SET;
5838 cb_state->static_status &= ~CBSTATUS_FRONT_FACE_SET;
5839}
5840
5841void ValidationStateTracker::PreCallRecordCmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer,
5842 VkPrimitiveTopology primitiveTopology) {
5843 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5844 cb_state->primitiveTopology = primitiveTopology;
5845 cb_state->status |= CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5846 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5847}
5848
5849void ValidationStateTracker::PreCallRecordCmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer, uint32_t viewportCount,
5850 const VkViewport *pViewports) {
5851 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5852 cb_state->viewportWithCountMask |= (1u << viewportCount) - 1u;
5853 cb_state->status |= CBSTATUS_VIEWPORT_WITH_COUNT_SET;
5854 cb_state->static_status &= ~CBSTATUS_VIEWPORT_WITH_COUNT_SET;
5855}
5856
5857void ValidationStateTracker::PreCallRecordCmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer, uint32_t scissorCount,
5858 const VkRect2D *pScissors) {
5859 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5860 cb_state->scissorWithCountMask |= (1u << scissorCount) - 1u;
5861 cb_state->status |= CBSTATUS_SCISSOR_WITH_COUNT_SET;
5862 cb_state->static_status &= ~CBSTATUS_SCISSOR_WITH_COUNT_SET;
5863}
5864
5865void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer, uint32_t firstBinding,
5866 uint32_t bindingCount, const VkBuffer *pBuffers,
5867 const VkDeviceSize *pOffsets, const VkDeviceSize *pSizes,
5868 const VkDeviceSize *pStrides) {
5869 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5870 if (pStrides) {
5871 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5872 cb_state->static_status &= ~CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5873 }
5874
5875 uint32_t end = firstBinding + bindingCount;
5876 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
5877 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
5878 }
5879
5880 for (uint32_t i = 0; i < bindingCount; ++i) {
5881 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
5882 vertex_buffer_binding.buffer = pBuffers[i];
5883 vertex_buffer_binding.offset = pOffsets[i];
5884 vertex_buffer_binding.size = (pSizes) ? pSizes[i] : VK_WHOLE_SIZE;
5885 vertex_buffer_binding.stride = (pStrides) ? pStrides[i] : 0;
5886 // Add binding for this vertex buffer to this commandbuffer
5887 if (pBuffers[i]) {
5888 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
5889 }
5890 }
5891}
5892
5893void ValidationStateTracker::PreCallRecordCmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) {
5894 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5895 cb_state->status |= CBSTATUS_DEPTH_TEST_ENABLE_SET;
5896 cb_state->static_status &= ~CBSTATUS_DEPTH_TEST_ENABLE_SET;
5897}
5898
5899void ValidationStateTracker::PreCallRecordCmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) {
5900 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5901 cb_state->status |= CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5902 cb_state->static_status &= ~CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5903}
5904
5905void ValidationStateTracker::PreCallRecordCmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) {
5906 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5907 cb_state->status |= CBSTATUS_DEPTH_COMPARE_OP_SET;
5908 cb_state->static_status &= ~CBSTATUS_DEPTH_COMPARE_OP_SET;
5909}
5910
5911void ValidationStateTracker::PreCallRecordCmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer,
5912 VkBool32 depthBoundsTestEnable) {
5913 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5914 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5915 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5916}
5917void ValidationStateTracker::PreCallRecordCmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) {
5918 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5919 cb_state->status |= CBSTATUS_STENCIL_TEST_ENABLE_SET;
5920 cb_state->static_status &= ~CBSTATUS_STENCIL_TEST_ENABLE_SET;
5921}
5922
5923void ValidationStateTracker::PreCallRecordCmdSetStencilOpEXT(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
5924 VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp,
5925 VkCompareOp compareOp) {
5926 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5927 cb_state->status |= CBSTATUS_STENCIL_OP_SET;
5928 cb_state->static_status &= ~CBSTATUS_STENCIL_OP_SET;
5929}