blob: dd5808580b4582777bb2914a5d03e2d7151452f6 [file] [log] [blame]
Tony-LunarG73719992020-01-15 10:20:28 -07001/* Copyright (c) 2015-2020 The Khronos Group Inc.
2 * Copyright (c) 2015-2020 Valve Corporation
3 * Copyright (c) 2015-2020 LunarG, Inc.
4 * Copyright (C) 2015-2020 Google Inc.
locke-lunargd556cc32019-09-17 01:21:23 -06005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
locke-lunargd556cc32019-09-17 01:21:23 -060023#include <cmath>
24#include <set>
locke-lunargd556cc32019-09-17 01:21:23 -060025
26#include "vk_enum_string_helper.h"
27#include "vk_format_utils.h"
28#include "vk_layer_data.h"
29#include "vk_layer_utils.h"
30#include "vk_layer_logging.h"
31#include "vk_typemap_helper.h"
32
33#include "chassis.h"
34#include "state_tracker.h"
35#include "shader_validation.h"
36
John Zulauf890b50b2020-06-17 15:18:19 -060037const char *CommandTypeString(CMD_TYPE type) {
38 // Autogenerated as part of the vk_validation_error_message.h codegen
39 static const std::array<const char *, CMD_RANGE_SIZE> command_name_list = {{VUID_CMD_NAME_LIST}};
40 return command_name_list[type];
41}
42
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -060043void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
44 if (add_obj) {
45 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
46 // Call base class
47 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
48 }
49}
50
John Zulauf5c5e88d2019-12-26 11:22:02 -070051uint32_t ResolveRemainingLevels(const VkImageSubresourceRange *range, uint32_t mip_levels) {
52 // Return correct number of mip levels taking into account VK_REMAINING_MIP_LEVELS
53 uint32_t mip_level_count = range->levelCount;
54 if (range->levelCount == VK_REMAINING_MIP_LEVELS) {
55 mip_level_count = mip_levels - range->baseMipLevel;
56 }
57 return mip_level_count;
58}
59
60uint32_t ResolveRemainingLayers(const VkImageSubresourceRange *range, uint32_t layers) {
61 // Return correct number of layers taking into account VK_REMAINING_ARRAY_LAYERS
62 uint32_t array_layer_count = range->layerCount;
63 if (range->layerCount == VK_REMAINING_ARRAY_LAYERS) {
64 array_layer_count = layers - range->baseArrayLayer;
65 }
66 return array_layer_count;
67}
68
69VkImageSubresourceRange NormalizeSubresourceRange(const VkImageCreateInfo &image_create_info,
70 const VkImageSubresourceRange &range) {
71 VkImageSubresourceRange norm = range;
72 norm.levelCount = ResolveRemainingLevels(&range, image_create_info.mipLevels);
73
74 // Special case for 3D images with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR flag bit, where <extent.depth> and
75 // <arrayLayers> can potentially alias.
76 uint32_t layer_limit = (0 != (image_create_info.flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR))
77 ? image_create_info.extent.depth
78 : image_create_info.arrayLayers;
79 norm.layerCount = ResolveRemainingLayers(&range, layer_limit);
80
81 // For multiplanar formats, IMAGE_ASPECT_COLOR is equivalent to adding the aspect of the individual planes
82 VkImageAspectFlags &aspect_mask = norm.aspectMask;
83 if (FormatIsMultiplane(image_create_info.format)) {
84 if (aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) {
85 aspect_mask &= ~VK_IMAGE_ASPECT_COLOR_BIT;
86 aspect_mask |= (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT);
87 if (FormatPlaneCount(image_create_info.format) > 2) {
88 aspect_mask |= VK_IMAGE_ASPECT_PLANE_2_BIT;
89 }
90 }
91 }
92 return norm;
93}
94
95VkImageSubresourceRange NormalizeSubresourceRange(const IMAGE_STATE &image_state, const VkImageSubresourceRange &range) {
96 const VkImageCreateInfo &image_create_info = image_state.createInfo;
97 return NormalizeSubresourceRange(image_create_info, range);
98}
99
John Zulauf2bc1fde2020-04-24 15:09:51 -0600100// NOTE: Beware the lifespan of the rp_begin when holding the return. If the rp_begin isn't a "safe" copy, "IMAGELESS"
101// attachments won't persist past the API entry point exit.
102std::pair<uint32_t, const VkImageView *> GetFramebufferAttachments(const VkRenderPassBeginInfo &rp_begin,
103 const FRAMEBUFFER_STATE &fb_state) {
104 const VkImageView *attachments = fb_state.createInfo.pAttachments;
105 uint32_t count = fb_state.createInfo.attachmentCount;
106 if (fb_state.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) {
107 const auto *framebuffer_attachments = lvl_find_in_chain<VkRenderPassAttachmentBeginInfo>(rp_begin.pNext);
108 if (framebuffer_attachments) {
109 attachments = framebuffer_attachments->pAttachments;
110 count = framebuffer_attachments->attachmentCount;
111 }
112 }
113 return std::make_pair(count, attachments);
114}
115
116std::vector<const IMAGE_VIEW_STATE *> ValidationStateTracker::GetAttachmentViews(const VkRenderPassBeginInfo &rp_begin,
117 const FRAMEBUFFER_STATE &fb_state) const {
118 std::vector<const IMAGE_VIEW_STATE *> views;
119
120 const auto count_attachment = GetFramebufferAttachments(rp_begin, fb_state);
121 const auto attachment_count = count_attachment.first;
122 const auto *attachments = count_attachment.second;
123 views.resize(attachment_count, nullptr);
124 for (uint32_t i = 0; i < attachment_count; i++) {
125 if (attachments[i] != VK_NULL_HANDLE) {
126 views[i] = Get<IMAGE_VIEW_STATE>(attachments[i]);
127 }
128 }
129 return views;
130}
131
132std::vector<const IMAGE_VIEW_STATE *> ValidationStateTracker::GetCurrentAttachmentViews(const CMD_BUFFER_STATE &cb_state) const {
133 // Only valid *after* RecordBeginRenderPass and *before* RecordEndRenderpass as it relies on cb_state for the renderpass info.
134 std::vector<const IMAGE_VIEW_STATE *> views;
135
locke-lunargaecf2152020-05-12 17:15:41 -0600136 const auto *rp_state = cb_state.activeRenderPass.get();
John Zulauf2bc1fde2020-04-24 15:09:51 -0600137 if (!rp_state) return views;
138 const auto &rp_begin = *cb_state.activeRenderPassBeginInfo.ptr();
139 const auto *fb_state = Get<FRAMEBUFFER_STATE>(rp_begin.framebuffer);
140 if (!fb_state) return views;
141
142 return GetAttachmentViews(rp_begin, *fb_state);
143}
144
locke-lunarg3e127c72020-06-09 17:45:28 -0600145PIPELINE_STATE *GetCurrentPipelineFromCommandBuffer(const CMD_BUFFER_STATE &cmd, VkPipelineBindPoint pipelineBindPoint) {
146 const auto last_bound_it = cmd.lastBound.find(pipelineBindPoint);
147 if (last_bound_it == cmd.lastBound.cend()) {
148 return nullptr;
149 }
150 return last_bound_it->second.pipeline_state;
151}
152
153void GetCurrentPipelineAndDesriptorSetsFromCommandBuffer(const CMD_BUFFER_STATE &cmd, VkPipelineBindPoint pipelineBindPoint,
154 const PIPELINE_STATE **rtn_pipe,
155 const std::vector<LAST_BOUND_STATE::PER_SET> **rtn_sets) {
156 const auto last_bound_it = cmd.lastBound.find(pipelineBindPoint);
157 if (last_bound_it == cmd.lastBound.cend()) {
158 return;
159 }
160 *rtn_pipe = last_bound_it->second.pipeline_state;
161 *rtn_sets = &(last_bound_it->second.per_set);
162}
163
locke-lunargd556cc32019-09-17 01:21:23 -0600164#ifdef VK_USE_PLATFORM_ANDROID_KHR
165// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
166// This could also move into a seperate core_validation_android.cpp file... ?
167
168void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
169 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
170 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
Spencer Fricke224c9852020-04-06 07:47:29 -0700171 is_node->external_ahb = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600172 }
173 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
174 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
175 is_node->has_ahb_format = true;
176 is_node->ahb_format = ext_fmt_android->externalFormat;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700177 // VUID 01894 will catch if not found in map
178 auto it = ahb_ext_formats_map.find(ext_fmt_android->externalFormat);
179 if (it != ahb_ext_formats_map.end()) {
180 is_node->format_features = it->second;
181 }
locke-lunargd556cc32019-09-17 01:21:23 -0600182 }
183}
184
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700185void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {
186 const VkExternalMemoryBufferCreateInfo *embci = lvl_find_in_chain<VkExternalMemoryBufferCreateInfo>(create_info->pNext);
187 if (embci && (embci->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
188 bs_node->external_ahb = true;
189 }
190}
191
locke-lunargd556cc32019-09-17 01:21:23 -0600192void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700193 VkSamplerYcbcrConversion ycbcr_conversion,
194 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state) {
locke-lunargd556cc32019-09-17 01:21:23 -0600195 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
196 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
197 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700198 // VUID 01894 will catch if not found in map
199 auto it = ahb_ext_formats_map.find(ext_format_android->externalFormat);
200 if (it != ahb_ext_formats_map.end()) {
201 ycbcr_state->format_features = it->second;
202 }
locke-lunargd556cc32019-09-17 01:21:23 -0600203 }
204};
205
206void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
207 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
208};
209
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700210void ValidationStateTracker::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
211 VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties, VkResult result) {
212 if (VK_SUCCESS != result) return;
213 auto ahb_format_props = lvl_find_in_chain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
214 if (ahb_format_props) {
215 ahb_ext_formats_map.insert({ahb_format_props->externalFormat, ahb_format_props->formatFeatures});
216 }
217}
218
locke-lunargd556cc32019-09-17 01:21:23 -0600219#else
220
221void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
222
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700223void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {}
224
locke-lunargd556cc32019-09-17 01:21:23 -0600225void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700226 VkSamplerYcbcrConversion ycbcr_conversion,
227 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state){};
locke-lunargd556cc32019-09-17 01:21:23 -0600228
229void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
230
231#endif // VK_USE_PLATFORM_ANDROID_KHR
232
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -0600233std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPipelineLayout(PIPELINE_LAYOUT_STATE const *layout_data,
234 uint32_t set) {
235 std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> dsl = nullptr;
236 if (layout_data && (set < layout_data->set_layouts.size())) {
237 dsl = layout_data->set_layouts[set];
238 }
239 return dsl;
240}
241
Petr Kraus44f1c482020-04-25 20:09:25 +0200242void AddImageStateProps(IMAGE_STATE &image_state, const VkDevice device, const VkPhysicalDevice physical_device) {
243 // Add feature support according to Image Format Features (vkspec.html#resources-image-format-features)
244 // if format is AHB external format then the features are already set
245 if (image_state.has_ahb_format == false) {
246 const VkImageTiling image_tiling = image_state.createInfo.tiling;
247 const VkFormat image_format = image_state.createInfo.format;
248 if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
249 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {
250 VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, nullptr};
251 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state.image, &drm_format_properties);
252
253 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
254 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
255 nullptr};
256 format_properties_2.pNext = (void *)&drm_properties_list;
257 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Lionel Landwerlin09351a72020-06-22 18:15:59 +0300258 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
259 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
260 drm_properties_list.pDrmFormatModifierProperties = &drm_properties[0];
261 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Petr Kraus44f1c482020-04-25 20:09:25 +0200262
263 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300264 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier ==
265 drm_format_properties.drmFormatModifier) {
266 image_state.format_features =
Petr Kraus44f1c482020-04-25 20:09:25 +0200267 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300268 break;
Petr Kraus44f1c482020-04-25 20:09:25 +0200269 }
270 }
271 } else {
272 VkFormatProperties format_properties;
273 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_format, &format_properties);
274 image_state.format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
275 : format_properties.optimalTilingFeatures;
276 }
277 }
278}
279
locke-lunargd556cc32019-09-17 01:21:23 -0600280void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
281 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
282 if (VK_SUCCESS != result) return;
locke-lunarg296a3c92020-03-25 01:04:29 -0600283 auto is_node = std::make_shared<IMAGE_STATE>(device, *pImage, pCreateInfo);
sfricke-samsung71bc6572020-04-29 15:49:43 -0700284 is_node->disjoint = ((pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT) != 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600285 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
286 RecordCreateImageANDROID(pCreateInfo, is_node.get());
287 }
288 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
289 if (swapchain_info) {
290 is_node->create_from_swapchain = swapchain_info->swapchain;
291 }
292
locke-lunargd556cc32019-09-17 01:21:23 -0600293 // Record the memory requirements in case they won't be queried
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700294 // External AHB memory can't be queried until after memory is bound
Spencer Fricke224c9852020-04-06 07:47:29 -0700295 if (is_node->external_ahb == false) {
sfricke-samsung71bc6572020-04-29 15:49:43 -0700296 if (is_node->disjoint == false) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700297 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
298 } else {
299 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
300 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
301 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
302 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
303 mem_req_info2.pNext = &image_plane_req;
304 mem_req_info2.image = *pImage;
305
306 assert(plane_count != 0); // assumes each format has at least first plane
307 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
308 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
309 is_node->plane0_requirements = mem_reqs2.memoryRequirements;
310
311 if (plane_count >= 2) {
312 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
313 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
314 is_node->plane1_requirements = mem_reqs2.memoryRequirements;
315 }
316 if (plane_count >= 3) {
317 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
318 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
319 is_node->plane2_requirements = mem_reqs2.memoryRequirements;
320 }
321 }
locke-lunargd556cc32019-09-17 01:21:23 -0600322 }
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700323
Petr Kraus44f1c482020-04-25 20:09:25 +0200324 AddImageStateProps(*is_node, device, physical_device);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700325
sfricke-samsungedce77a2020-07-03 22:35:13 -0700326 is_node->unprotected = ((pCreateInfo->flags & VK_IMAGE_CREATE_PROTECTED_BIT) == 0);
327
locke-lunargd556cc32019-09-17 01:21:23 -0600328 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
329}
330
331void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
332 if (!image) return;
333 IMAGE_STATE *image_state = GetImageState(image);
334 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
335 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
336 // Clean up memory mapping, bindings and range references for image
337 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700338 RemoveImageMemoryRange(image, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600339 }
340 if (image_state->bind_swapchain) {
341 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
342 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -0600343 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -0600344 }
345 }
346 RemoveAliasingImage(image_state);
347 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500348 image_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600349 // Remove image from imageMap
350 imageMap.erase(image);
351}
352
353void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
354 VkImageLayout imageLayout, const VkClearColorValue *pColor,
355 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
356 auto cb_node = GetCBState(commandBuffer);
357 auto image_state = GetImageState(image);
358 if (cb_node && image_state) {
359 AddCommandBufferBindingImage(cb_node, image_state);
360 }
361}
362
363void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
364 VkImageLayout imageLayout,
365 const VkClearDepthStencilValue *pDepthStencil,
366 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
367 auto cb_node = GetCBState(commandBuffer);
368 auto image_state = GetImageState(image);
369 if (cb_node && image_state) {
370 AddCommandBufferBindingImage(cb_node, image_state);
371 }
372}
373
374void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
375 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
376 uint32_t regionCount, const VkImageCopy *pRegions) {
377 auto cb_node = GetCBState(commandBuffer);
378 auto src_image_state = GetImageState(srcImage);
379 auto dst_image_state = GetImageState(dstImage);
380
381 // Update bindings between images and cmd buffer
382 AddCommandBufferBindingImage(cb_node, src_image_state);
383 AddCommandBufferBindingImage(cb_node, dst_image_state);
384}
385
386void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
387 VkImageLayout srcImageLayout, VkImage dstImage,
388 VkImageLayout dstImageLayout, uint32_t regionCount,
389 const VkImageResolve *pRegions) {
390 auto cb_node = GetCBState(commandBuffer);
391 auto src_image_state = GetImageState(srcImage);
392 auto dst_image_state = GetImageState(dstImage);
393
394 // Update bindings between images and cmd buffer
395 AddCommandBufferBindingImage(cb_node, src_image_state);
396 AddCommandBufferBindingImage(cb_node, dst_image_state);
397}
398
399void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
400 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
401 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
402 auto cb_node = GetCBState(commandBuffer);
403 auto src_image_state = GetImageState(srcImage);
404 auto dst_image_state = GetImageState(dstImage);
405
406 // Update bindings between images and cmd buffer
407 AddCommandBufferBindingImage(cb_node, src_image_state);
408 AddCommandBufferBindingImage(cb_node, dst_image_state);
409}
410
411void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
412 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
413 VkResult result) {
414 if (result != VK_SUCCESS) return;
415 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500416 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600417
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700418 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
419 RecordCreateBufferANDROID(pCreateInfo, buffer_state.get());
420 }
locke-lunargd556cc32019-09-17 01:21:23 -0600421 // Get a set of requirements in the case the app does not
sfricke-samsungad90e722020-07-08 20:54:24 -0700422 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
locke-lunargd556cc32019-09-17 01:21:23 -0600423
sfricke-samsungedce77a2020-07-03 22:35:13 -0700424 buffer_state->unprotected = ((pCreateInfo->flags & VK_BUFFER_CREATE_PROTECTED_BIT) == 0);
425
locke-lunargd556cc32019-09-17 01:21:23 -0600426 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
427}
428
429void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
430 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
431 VkResult result) {
432 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500433 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
locke-lunarg25b6c352020-08-06 17:44:18 -0600434 auto buffer_view_state = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
435
436 VkFormatProperties format_properties;
437 DispatchGetPhysicalDeviceFormatProperties(physical_device, pCreateInfo->format, &format_properties);
438 buffer_view_state->format_features = format_properties.bufferFeatures;
439
440 bufferViewMap.insert(std::make_pair(*pView, std::move(buffer_view_state)));
locke-lunargd556cc32019-09-17 01:21:23 -0600441}
442
443void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
444 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
445 VkResult result) {
446 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500447 auto image_state = GetImageShared(pCreateInfo->image);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700448 auto image_view_state = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
449
450 // Add feature support according to Image View Format Features (vkspec.html#resources-image-view-format-features)
451 const VkImageTiling image_tiling = image_state->createInfo.tiling;
452 const VkFormat image_view_format = pCreateInfo->format;
453 if (image_state->has_ahb_format == true) {
454 // The ImageView uses same Image's format feature since they share same AHB
455 image_view_state->format_features = image_state->format_features;
456 } else if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
457 // Parameter validation should catch if this is used without VK_EXT_image_drm_format_modifier
458 assert(device_extensions.vk_ext_image_drm_format_modifier);
459 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
460 nullptr};
461 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state->image, &drm_format_properties);
462
463 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
464 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
465 nullptr};
466 format_properties_2.pNext = (void *)&drm_properties_list;
467 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
468
469 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300470 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier == drm_format_properties.drmFormatModifier) {
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700471 image_view_state->format_features |=
472 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300473 break;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700474 }
475 }
476 } else {
477 VkFormatProperties format_properties;
478 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_view_format, &format_properties);
479 image_view_state->format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
480 : format_properties.optimalTilingFeatures;
481 }
482
483 imageViewMap.insert(std::make_pair(*pView, std::move(image_view_state)));
locke-lunargd556cc32019-09-17 01:21:23 -0600484}
485
486void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
487 uint32_t regionCount, const VkBufferCopy *pRegions) {
488 auto cb_node = GetCBState(commandBuffer);
489 auto src_buffer_state = GetBufferState(srcBuffer);
490 auto dst_buffer_state = GetBufferState(dstBuffer);
491
492 // Update bindings between buffers and cmd buffer
493 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
494 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
495}
496
497void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
498 const VkAllocationCallbacks *pAllocator) {
499 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
500 if (!image_view_state) return;
501 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
502
503 // Any bound cmd buffers are now invalid
504 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500505 image_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600506 imageViewMap.erase(imageView);
507}
508
509void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
510 if (!buffer) return;
511 auto buffer_state = GetBufferState(buffer);
512 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
513
514 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
515 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700516 RemoveBufferMemoryRange(buffer, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600517 }
518 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500519 buffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600520 bufferMap.erase(buffer_state->buffer);
521}
522
523void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
524 const VkAllocationCallbacks *pAllocator) {
525 if (!bufferView) return;
526 auto buffer_view_state = GetBufferViewState(bufferView);
527 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
528
529 // Any bound cmd buffers are now invalid
530 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500531 buffer_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600532 bufferViewMap.erase(bufferView);
533}
534
535void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
536 VkDeviceSize size, uint32_t data) {
537 auto cb_node = GetCBState(commandBuffer);
538 auto buffer_state = GetBufferState(dstBuffer);
539 // Update bindings between buffer and cmd buffer
540 AddCommandBufferBindingBuffer(cb_node, buffer_state);
541}
542
543void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
544 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
545 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
546 auto cb_node = GetCBState(commandBuffer);
547 auto src_image_state = GetImageState(srcImage);
548 auto dst_buffer_state = GetBufferState(dstBuffer);
549
550 // Update bindings between buffer/image and cmd buffer
551 AddCommandBufferBindingImage(cb_node, src_image_state);
552 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
553}
554
555void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
556 VkImageLayout dstImageLayout, uint32_t regionCount,
557 const VkBufferImageCopy *pRegions) {
558 auto cb_node = GetCBState(commandBuffer);
559 auto src_buffer_state = GetBufferState(srcBuffer);
560 auto dst_image_state = GetImageState(dstImage);
561
562 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
563 AddCommandBufferBindingImage(cb_node, dst_image_state);
564}
565
566// Get the image viewstate for a given framebuffer attachment
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300567IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(CMD_BUFFER_STATE *cb, FRAMEBUFFER_STATE *framebuffer,
568 uint32_t index) {
569 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) {
570 assert(index < cb->imagelessFramebufferAttachments.size());
571 return cb->imagelessFramebufferAttachments[index];
572 }
locke-lunargd556cc32019-09-17 01:21:23 -0600573 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
574 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
575 return GetImageViewState(image_view);
576}
577
578// Get the image viewstate for a given framebuffer attachment
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300579const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const CMD_BUFFER_STATE *cb,
580 const FRAMEBUFFER_STATE *framebuffer,
locke-lunargd556cc32019-09-17 01:21:23 -0600581 uint32_t index) const {
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300582 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) {
583 assert(index < cb->imagelessFramebufferAttachments.size());
584 return cb->imagelessFramebufferAttachments[index];
585 }
locke-lunargd556cc32019-09-17 01:21:23 -0600586 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
587 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
588 return GetImageViewState(image_view);
589}
590
591void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
locke-lunargd556cc32019-09-17 01:21:23 -0600592 std::unordered_set<VkImage> *bound_images = nullptr;
593
locke-lunargb3584732019-10-28 20:18:36 -0600594 if (image_state->bind_swapchain) {
595 auto swapchain_state = GetSwapchainState(image_state->bind_swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -0600596 if (swapchain_state) {
locke-lunargb3584732019-10-28 20:18:36 -0600597 bound_images = &swapchain_state->images[image_state->bind_swapchain_imageIndex].bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600598 }
599 } else {
locke-lunargcf04d582019-11-26 00:31:50 -0700600 if (image_state->binding.mem_state) {
601 bound_images = &image_state->binding.mem_state->bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600602 }
603 }
604
605 if (bound_images) {
606 for (const auto &handle : *bound_images) {
607 if (handle != image_state->image) {
608 auto is = GetImageState(handle);
609 if (is && is->IsCompatibleAliasing(image_state)) {
610 auto inserted = is->aliasing_images.emplace(image_state->image);
611 if (inserted.second) {
612 image_state->aliasing_images.emplace(handle);
613 }
614 }
615 }
616 }
617 }
618}
619
620void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
621 for (const auto &image : image_state->aliasing_images) {
622 auto is = GetImageState(image);
623 if (is) {
624 is->aliasing_images.erase(image_state->image);
625 }
626 }
627 image_state->aliasing_images.clear();
628}
629
630void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
631 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
632 // reference. It doesn't need two ways clear.
633 for (const auto &handle : bound_images) {
634 auto is = GetImageState(handle);
635 if (is) {
636 is->aliasing_images.clear();
637 }
638 }
639}
640
Jeff Bolz310775c2019-10-09 00:46:33 -0500641const EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) const {
642 auto it = eventMap.find(event);
643 if (it == eventMap.end()) {
644 return nullptr;
645 }
646 return &it->second;
647}
648
locke-lunargd556cc32019-09-17 01:21:23 -0600649EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
650 auto it = eventMap.find(event);
651 if (it == eventMap.end()) {
652 return nullptr;
653 }
654 return &it->second;
655}
656
657const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
658 auto it = queueMap.find(queue);
659 if (it == queueMap.cend()) {
660 return nullptr;
661 }
662 return &it->second;
663}
664
665QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
666 auto it = queueMap.find(queue);
667 if (it == queueMap.end()) {
668 return nullptr;
669 }
670 return &it->second;
671}
672
673const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
674 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
675 auto it = phys_dev_map->find(phys);
676 if (it == phys_dev_map->end()) {
677 return nullptr;
678 }
679 return &it->second;
680}
681
682PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
683 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
684 auto it = phys_dev_map->find(phys);
685 if (it == phys_dev_map->end()) {
686 return nullptr;
687 }
688 return &it->second;
689}
690
691PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
692const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
693
694// Return ptr to memory binding for given handle of specified type
695template <typename State, typename Result>
696static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
697 switch (typed_handle.type) {
698 case kVulkanObjectTypeImage:
699 return state->GetImageState(typed_handle.Cast<VkImage>());
700 case kVulkanObjectTypeBuffer:
701 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
702 case kVulkanObjectTypeAccelerationStructureNV:
703 return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
704 default:
705 break;
706 }
707 return nullptr;
708}
709
710const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
711 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
712}
713
714BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
715 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
716}
717
718void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
719 assert(object != NULL);
720
John Zulauf79952712020-04-07 11:25:54 -0600721 auto fake_address = fake_memory.Alloc(pAllocateInfo->allocationSize);
722 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo, fake_address);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500723 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600724
725 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
726 if (dedicated) {
727 mem_info->is_dedicated = true;
728 mem_info->dedicated_buffer = dedicated->buffer;
729 mem_info->dedicated_image = dedicated->image;
730 }
731 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
732 if (export_info) {
733 mem_info->is_export = true;
734 mem_info->export_handle_type_flags = export_info->handleTypes;
735 }
sfricke-samsung23068272020-06-21 14:49:51 -0700736
Mark Lobodzinski7bda6ed2020-07-24 09:57:38 -0600737 auto alloc_flags = lvl_find_in_chain<VkMemoryAllocateFlagsInfo>(pAllocateInfo->pNext);
738 if (alloc_flags) {
739 auto dev_mask = alloc_flags->deviceMask;
740 if ((dev_mask != 0) && (dev_mask & (dev_mask - 1))) {
741 mem_info->multi_instance = true;
742 }
743 }
744 auto heap_index = phys_dev_mem_props.memoryTypes[mem_info->alloc_info.memoryTypeIndex].heapIndex;
Tony-LunarGb399fb02020-08-06 14:20:59 -0600745 mem_info->multi_instance |= (((phys_dev_mem_props.memoryHeaps[heap_index].flags & VK_MEMORY_HEAP_MULTI_INSTANCE_BIT) != 0) &&
746 physical_device_count > 1);
Mark Lobodzinski7bda6ed2020-07-24 09:57:38 -0600747
sfricke-samsung23068272020-06-21 14:49:51 -0700748 // Assumes validation already for only a single import operation in the pNext
749#ifdef VK_USE_PLATFORM_WIN32_KHR
750 auto win32_import = lvl_find_in_chain<VkImportMemoryWin32HandleInfoKHR>(pAllocateInfo->pNext);
751 if (win32_import) {
752 mem_info->is_import = true;
753 mem_info->import_handle_type_flags = win32_import->handleType;
754 }
755#endif
756 auto fd_import = lvl_find_in_chain<VkImportMemoryFdInfoKHR>(pAllocateInfo->pNext);
757 if (fd_import) {
758 mem_info->is_import = true;
759 mem_info->import_handle_type_flags = fd_import->handleType;
760 }
761 auto host_pointer_import = lvl_find_in_chain<VkImportMemoryHostPointerInfoEXT>(pAllocateInfo->pNext);
762 if (host_pointer_import) {
763 mem_info->is_import = true;
764 mem_info->import_handle_type_flags = host_pointer_import->handleType;
765 }
766#ifdef VK_USE_PLATFORM_ANDROID_KHR
767 // AHB Import doesn't have handle in the pNext struct
768 // It should be assumed that all imported AHB can only have the same, single handleType
769 auto ahb_import = lvl_find_in_chain<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
770 if ((ahb_import) && (ahb_import->buffer != nullptr)) {
771 mem_info->is_import_ahb = true;
772 mem_info->is_import = true;
773 mem_info->import_handle_type_flags = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
774 }
775#endif
sfricke-samsungedce77a2020-07-03 22:35:13 -0700776
777 const VkMemoryType memory_type = phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex];
778 mem_info->unprotected = ((memory_type.propertyFlags & VK_MEMORY_PROPERTY_PROTECTED_BIT) == 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600779}
780
781// Create binding link between given sampler and command buffer node
782void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600783 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600784 return;
785 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500786 AddCommandBufferBinding(sampler_state->cb_bindings,
787 VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600788}
789
790// Create binding link between given image node and command buffer node
791void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600792 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600793 return;
794 }
795 // Skip validation if this image was created through WSI
796 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
797 // First update cb binding for image
Jeff Bolzadbfa852019-10-04 13:53:30 -0500798 if (AddCommandBufferBinding(image_state->cb_bindings,
799 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600800 // Now update CB binding in MemObj mini CB list
801 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700802 // Now update CBInfo's Mem reference list
803 AddCommandBufferBinding(mem_binding->cb_bindings,
804 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600805 }
806 }
807 }
808}
809
810// Create binding link between given image view node and its image with command buffer node
811void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600812 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600813 return;
814 }
815 // First add bindings for imageView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500816 if (AddCommandBufferBinding(view_state->cb_bindings,
817 VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600818 // Only need to continue if this is a new item
Jeff Bolzadbfa852019-10-04 13:53:30 -0500819 auto image_state = view_state->image_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600820 // Add bindings for image within imageView
821 if (image_state) {
822 AddCommandBufferBindingImage(cb_node, image_state);
823 }
824 }
825}
826
827// Create binding link between given buffer node and command buffer node
828void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600829 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600830 return;
831 }
832 // First update cb binding for buffer
Jeff Bolzadbfa852019-10-04 13:53:30 -0500833 if (AddCommandBufferBinding(buffer_state->cb_bindings,
834 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600835 // Now update CB binding in MemObj mini CB list
836 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700837 // Now update CBInfo's Mem reference list
838 AddCommandBufferBinding(mem_binding->cb_bindings,
839 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600840 }
841 }
842}
843
844// Create binding link between given buffer view node and its buffer with command buffer node
845void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600846 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600847 return;
848 }
849 // First add bindings for bufferView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500850 if (AddCommandBufferBinding(view_state->cb_bindings,
851 VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
852 auto buffer_state = view_state->buffer_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600853 // Add bindings for buffer within bufferView
854 if (buffer_state) {
855 AddCommandBufferBindingBuffer(cb_node, buffer_state);
856 }
857 }
858}
859
860// Create binding link between given acceleration structure and command buffer node
861void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
862 ACCELERATION_STRUCTURE_STATE *as_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600863 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600864 return;
865 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500866 if (AddCommandBufferBinding(
867 as_state->cb_bindings,
868 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600869 // Now update CB binding in MemObj mini CB list
870 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700871 // Now update CBInfo's Mem reference list
872 AddCommandBufferBinding(mem_binding->cb_bindings,
873 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600874 }
875 }
876}
877
locke-lunargd556cc32019-09-17 01:21:23 -0600878// Clear a single object binding from given memory object
locke-lunarg5f59e782019-12-19 10:32:23 -0700879void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
locke-lunargd556cc32019-09-17 01:21:23 -0600880 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
881 if (mem_info) {
882 mem_info->obj_bindings.erase(typed_handle);
883 }
884}
885
886// ClearMemoryObjectBindings clears the binding of objects to memory
887// For the given object it pulls the memory bindings and makes sure that the bindings
888// no longer refer to the object being cleared. This occurs when objects are destroyed.
889void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
890 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
891 if (mem_binding) {
892 if (!mem_binding->sparse) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700893 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600894 } else { // Sparse, clear all bindings
895 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700896 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600897 }
898 }
899 }
900}
901
902// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
903// Corresponding valid usage checks are in ValidateSetMemBinding().
904void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
905 const VulkanTypedHandle &typed_handle) {
906 assert(mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600907
908 if (mem != VK_NULL_HANDLE) {
locke-lunargcf04d582019-11-26 00:31:50 -0700909 mem_binding->binding.mem_state = GetShared<DEVICE_MEMORY_STATE>(mem);
910 if (mem_binding->binding.mem_state) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700911 mem_binding->binding.offset = memory_offset;
912 mem_binding->binding.size = mem_binding->requirements.size;
locke-lunargcf04d582019-11-26 00:31:50 -0700913 mem_binding->binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -0600914 // For image objects, make sure default memory state is correctly set
915 // TODO : What's the best/correct way to handle this?
916 if (kVulkanObjectTypeImage == typed_handle.type) {
917 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
918 if (image_state) {
919 VkImageCreateInfo ici = image_state->createInfo;
920 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
921 // TODO:: More memory state transition stuff.
922 }
923 }
924 }
locke-lunargcf04d582019-11-26 00:31:50 -0700925 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
locke-lunargd556cc32019-09-17 01:21:23 -0600926 }
927 }
928}
929
930// For NULL mem case, clear any previous binding Else...
931// Make sure given object is in its object map
932// IF a previous binding existed, update binding
933// Add reference from objectInfo to memoryInfo
934// Add reference off of object's binding info
935// Return VK_TRUE if addition is successful, VK_FALSE otherwise
locke-lunargcf04d582019-11-26 00:31:50 -0700936bool ValidationStateTracker::SetSparseMemBinding(const VkDeviceMemory mem, const VkDeviceSize mem_offset,
937 const VkDeviceSize mem_size, const VulkanTypedHandle &typed_handle) {
locke-lunargd556cc32019-09-17 01:21:23 -0600938 bool skip = VK_FALSE;
939 // Handle NULL case separately, just clear previous binding & decrement reference
locke-lunargcf04d582019-11-26 00:31:50 -0700940 if (mem == VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -0600941 // TODO : This should cause the range of the resource to be unbound according to spec
942 } else {
943 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
944 assert(mem_binding);
945 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
946 assert(mem_binding->sparse);
locke-lunargcf04d582019-11-26 00:31:50 -0700947 MEM_BINDING binding = {GetShared<DEVICE_MEMORY_STATE>(mem), mem_offset, mem_size};
948 if (binding.mem_state) {
949 binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -0600950 // Need to set mem binding for this object
951 mem_binding->sparse_bindings.insert(binding);
952 mem_binding->UpdateBoundMemorySet();
953 }
954 }
955 }
956 return skip;
957}
958
locke-lunarg540b2252020-08-03 13:23:36 -0600959void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, const VkPipelineBindPoint bind_point,
960 const char *function) {
locke-lunargd556cc32019-09-17 01:21:23 -0600961 auto &state = cb_state->lastBound[bind_point];
962 PIPELINE_STATE *pPipe = state.pipeline_state;
963 if (VK_NULL_HANDLE != state.pipeline_layout) {
964 for (const auto &set_binding_pair : pPipe->active_slots) {
965 uint32_t setIndex = set_binding_pair.first;
966 // Pull the set node
967 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
locke-lunargd556cc32019-09-17 01:21:23 -0600968
Tony-LunarG77822802020-05-28 16:35:46 -0600969 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
locke-lunargd556cc32019-09-17 01:21:23 -0600970
Tony-LunarG77822802020-05-28 16:35:46 -0600971 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
972 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
973 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
974 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
975
976 if (reduced_map.IsManyDescriptors()) {
977 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
978 descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
979 }
980
981 // We can skip updating the state if "nothing" has changed since the last validation.
982 // See CoreChecks::ValidateCmdBufDrawState for more details.
983 bool descriptor_set_changed =
984 !reduced_map.IsManyDescriptors() ||
985 // Update if descriptor set (or contents) has changed
986 state.per_set[setIndex].validated_set != descriptor_set ||
987 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
988 (!disabled[image_layout_validation] &&
989 state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
990 bool need_update = descriptor_set_changed ||
991 // Update if previous bindingReqMap doesn't include new bindingReqMap
992 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
993 state.per_set[setIndex].validated_set_binding_req_map.end(), binding_req_map.begin(),
994 binding_req_map.end());
995
996 if (need_update) {
997 // Bind this set and its active descriptor resources to the command buffer
998 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
999 // Only record the bindings that haven't already been recorded
1000 BindingReqMap delta_reqs;
1001 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
1002 state.per_set[setIndex].validated_set_binding_req_map.begin(),
1003 state.per_set[setIndex].validated_set_binding_req_map.end(),
1004 std::inserter(delta_reqs, delta_reqs.begin()));
locke-lunarg540b2252020-08-03 13:23:36 -06001005 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pPipe, delta_reqs, function);
Tony-LunarG77822802020-05-28 16:35:46 -06001006 } else {
locke-lunarg540b2252020-08-03 13:23:36 -06001007 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pPipe, binding_req_map, function);
locke-lunargd556cc32019-09-17 01:21:23 -06001008 }
1009
Tony-LunarG77822802020-05-28 16:35:46 -06001010 state.per_set[setIndex].validated_set = descriptor_set;
1011 state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
1012 state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
1013 if (reduced_map.IsManyDescriptors()) {
1014 // Check whether old == new before assigning, the equality check is much cheaper than
1015 // freeing and reallocating the map.
1016 if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
1017 state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
Jeff Bolz56308942019-10-06 22:05:23 -05001018 }
Tony-LunarG77822802020-05-28 16:35:46 -06001019 } else {
1020 state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
locke-lunargd556cc32019-09-17 01:21:23 -06001021 }
1022 }
1023 }
1024 }
1025 if (!pPipe->vertex_binding_descriptions_.empty()) {
1026 cb_state->vertex_buffer_used = true;
1027 }
1028}
1029
1030// Remove set from setMap and delete the set
1031void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001032 descriptor_set->destroyed = true;
Jeff Bolzadbfa852019-10-04 13:53:30 -05001033 const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
Jeff Bolz41a1ced2019-10-11 11:40:49 -05001034 // Any bound cmd buffers are now invalid
Jeff Bolzadbfa852019-10-04 13:53:30 -05001035 InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
Jeff Bolz41a1ced2019-10-11 11:40:49 -05001036
locke-lunargd556cc32019-09-17 01:21:23 -06001037 setMap.erase(descriptor_set->GetSet());
1038}
1039
1040// Free all DS Pools including their Sets & related sub-structs
1041// NOTE : Calls to this function should be wrapped in mutex
1042void ValidationStateTracker::DeleteDescriptorSetPools() {
1043 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
1044 // Remove this pools' sets from setMap and delete them
1045 for (auto ds : ii->second->sets) {
1046 FreeDescriptorSet(ds);
1047 }
1048 ii->second->sets.clear();
1049 ii = descriptorPoolMap.erase(ii);
1050 }
1051}
1052
1053// For given object struct return a ptr of BASE_NODE type for its wrapping struct
1054BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001055 if (object_struct.node) {
1056#ifdef _DEBUG
1057 // assert that lookup would find the same object
1058 VulkanTypedHandle other = object_struct;
1059 other.node = nullptr;
1060 assert(object_struct.node == GetStateStructPtrFromObject(other));
1061#endif
1062 return object_struct.node;
1063 }
locke-lunargd556cc32019-09-17 01:21:23 -06001064 BASE_NODE *base_ptr = nullptr;
1065 switch (object_struct.type) {
1066 case kVulkanObjectTypeDescriptorSet: {
1067 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
1068 break;
1069 }
1070 case kVulkanObjectTypeSampler: {
1071 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
1072 break;
1073 }
1074 case kVulkanObjectTypeQueryPool: {
1075 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
1076 break;
1077 }
1078 case kVulkanObjectTypePipeline: {
1079 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
1080 break;
1081 }
1082 case kVulkanObjectTypeBuffer: {
1083 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
1084 break;
1085 }
1086 case kVulkanObjectTypeBufferView: {
1087 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
1088 break;
1089 }
1090 case kVulkanObjectTypeImage: {
1091 base_ptr = GetImageState(object_struct.Cast<VkImage>());
1092 break;
1093 }
1094 case kVulkanObjectTypeImageView: {
1095 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
1096 break;
1097 }
1098 case kVulkanObjectTypeEvent: {
1099 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
1100 break;
1101 }
1102 case kVulkanObjectTypeDescriptorPool: {
1103 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
1104 break;
1105 }
1106 case kVulkanObjectTypeCommandPool: {
1107 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
1108 break;
1109 }
1110 case kVulkanObjectTypeFramebuffer: {
1111 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
1112 break;
1113 }
1114 case kVulkanObjectTypeRenderPass: {
1115 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
1116 break;
1117 }
1118 case kVulkanObjectTypeDeviceMemory: {
1119 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
1120 break;
1121 }
1122 case kVulkanObjectTypeAccelerationStructureNV: {
1123 base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
1124 break;
1125 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001126 case kVulkanObjectTypeUnknown:
1127 // This can happen if an element of the object_bindings vector has been
1128 // zeroed out, after an object is destroyed.
1129 break;
locke-lunargd556cc32019-09-17 01:21:23 -06001130 default:
1131 // TODO : Any other objects to be handled here?
1132 assert(0);
1133 break;
1134 }
1135 return base_ptr;
1136}
1137
sfricke-samsungbf1a2ed2020-06-14 23:31:00 -07001138// Gets union of all features defined by Potential Format Features
1139// except, does not handle the external format case for AHB as that only can be used for sampled images
sfricke-samsungbe3584f2020-04-22 14:58:06 -07001140VkFormatFeatureFlags ValidationStateTracker::GetPotentialFormatFeatures(VkFormat format) const {
1141 VkFormatFeatureFlags format_features = 0;
1142
1143 if (format != VK_FORMAT_UNDEFINED) {
1144 VkFormatProperties format_properties;
1145 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
1146 format_features |= format_properties.linearTilingFeatures;
1147 format_features |= format_properties.optimalTilingFeatures;
1148 if (device_extensions.vk_ext_image_drm_format_modifier) {
1149 // VK_KHR_get_physical_device_properties2 is required in this case
1150 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2};
1151 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
1152 nullptr};
1153 format_properties_2.pNext = (void *)&drm_properties_list;
1154 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
1155 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
1156 format_features |= drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
1157 }
1158 }
1159 }
1160
1161 return format_features;
1162}
1163
locke-lunargd556cc32019-09-17 01:21:23 -06001164// Tie the VulkanTypedHandle to the cmd buffer which includes:
1165// Add object_binding to cmd buffer
1166// Add cb_binding to object
Jeff Bolzadbfa852019-10-04 13:53:30 -05001167bool ValidationStateTracker::AddCommandBufferBinding(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_bindings,
locke-lunargd556cc32019-09-17 01:21:23 -06001168 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001169 if (disabled[command_buffer_state]) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001170 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001171 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001172 // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
1173 // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
1174 auto inserted = cb_bindings.insert({cb_node, -1});
1175 if (inserted.second) {
1176 cb_node->object_bindings.push_back(obj);
1177 inserted.first->second = (int)cb_node->object_bindings.size() - 1;
1178 return true;
1179 }
1180 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001181}
1182
1183// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
1184void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
1185 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
1186 if (base_obj) base_obj->cb_bindings.erase(cb_node);
1187}
1188
1189// Reset the command buffer state
1190// Maintain the createInfo and set state to CB_NEW, but clear all other state
1191void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
1192 CMD_BUFFER_STATE *pCB = GetCBState(cb);
1193 if (pCB) {
1194 pCB->in_use.store(0);
1195 // Reset CB state (note that createInfo is not cleared)
1196 pCB->commandBuffer = cb;
1197 memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
1198 memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
1199 pCB->hasDrawCmd = false;
1200 pCB->hasTraceRaysCmd = false;
1201 pCB->hasBuildAccelerationStructureCmd = false;
1202 pCB->hasDispatchCmd = false;
1203 pCB->state = CB_NEW;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001204 pCB->commandCount = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06001205 pCB->submitCount = 0;
1206 pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
1207 pCB->status = 0;
1208 pCB->static_status = 0;
1209 pCB->viewportMask = 0;
Piers Daniell39842ee2020-07-10 16:42:33 -06001210 pCB->viewportWithCountMask = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06001211 pCB->scissorMask = 0;
Piers Daniell39842ee2020-07-10 16:42:33 -06001212 pCB->scissorWithCountMask = 0;
1213 pCB->primitiveTopology = VK_PRIMITIVE_TOPOLOGY_MAX_ENUM;
locke-lunargd556cc32019-09-17 01:21:23 -06001214
1215 for (auto &item : pCB->lastBound) {
1216 item.second.reset();
1217 }
1218
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07001219 pCB->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo();
locke-lunargd556cc32019-09-17 01:21:23 -06001220 pCB->activeRenderPass = nullptr;
1221 pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
1222 pCB->activeSubpass = 0;
1223 pCB->broken_bindings.clear();
1224 pCB->waitedEvents.clear();
1225 pCB->events.clear();
1226 pCB->writeEventsBeforeWait.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001227 pCB->activeQueries.clear();
1228 pCB->startedQueries.clear();
1229 pCB->image_layout_map.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001230 pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
1231 pCB->vertex_buffer_used = false;
1232 pCB->primaryCommandBuffer = VK_NULL_HANDLE;
1233 // If secondary, invalidate any primary command buffer that may call us.
1234 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001235 InvalidateLinkedCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -06001236 }
1237
1238 // Remove reverse command buffer links.
1239 for (auto pSubCB : pCB->linkedCommandBuffers) {
1240 pSubCB->linkedCommandBuffers.erase(pCB);
1241 }
1242 pCB->linkedCommandBuffers.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001243 pCB->queue_submit_functions.clear();
1244 pCB->cmd_execute_commands_functions.clear();
1245 pCB->eventUpdates.clear();
1246 pCB->queryUpdates.clear();
1247
1248 // Remove object bindings
1249 for (const auto &obj : pCB->object_bindings) {
1250 RemoveCommandBufferBinding(obj, pCB);
1251 }
1252 pCB->object_bindings.clear();
1253 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
1254 for (auto framebuffer : pCB->framebuffers) {
locke-lunargaecf2152020-05-12 17:15:41 -06001255 framebuffer->cb_bindings.erase(pCB);
locke-lunargd556cc32019-09-17 01:21:23 -06001256 }
1257 pCB->framebuffers.clear();
1258 pCB->activeFramebuffer = VK_NULL_HANDLE;
1259 memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
1260
1261 pCB->qfo_transfer_image_barriers.Reset();
1262 pCB->qfo_transfer_buffer_barriers.Reset();
1263
1264 // Clean up the label data
1265 ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
1266 pCB->debug_label.Reset();
locke-gb3ce08f2019-09-30 12:30:56 -06001267 pCB->validate_descriptorsets_in_queuesubmit.clear();
Attilio Provenzano02859b22020-02-27 14:17:28 +00001268
1269 // Best practices info
1270 pCB->small_indexed_draw_call_count = 0;
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06001271
1272 pCB->transform_feedback_active = false;
locke-lunargd556cc32019-09-17 01:21:23 -06001273 }
1274 if (command_buffer_reset_callback) {
1275 (*command_buffer_reset_callback)(cb);
1276 }
1277}
1278
1279void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
1280 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
1281 VkResult result) {
1282 if (VK_SUCCESS != result) return;
1283
1284 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
1285 if (nullptr == enabled_features_found) {
1286 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
1287 if (features2) {
1288 enabled_features_found = &(features2->features);
1289 }
1290 }
1291
1292 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
1293 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
1294 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
1295
1296 if (nullptr == enabled_features_found) {
1297 state_tracker->enabled_features.core = {};
1298 } else {
1299 state_tracker->enabled_features.core = *enabled_features_found;
1300 }
1301
1302 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
1303 // previously set them through an explicit API call.
1304 uint32_t count;
1305 auto pd_state = GetPhysicalDeviceState(gpu);
1306 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
1307 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
1308 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
1309 // Save local link to this device's physical device state
1310 state_tracker->physical_device_state = pd_state;
1311
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001312 const auto *vulkan_12_features = lvl_find_in_chain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
1313 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001314 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001315 } else {
sfricke-samsung27c70722020-05-02 08:42:39 -07001316 // Set Extension Feature Aliases to false as there is no struct to check
1317 state_tracker->enabled_features.core12.drawIndirectCount = VK_FALSE;
1318 state_tracker->enabled_features.core12.samplerMirrorClampToEdge = VK_FALSE;
1319 state_tracker->enabled_features.core12.descriptorIndexing = VK_FALSE;
1320 state_tracker->enabled_features.core12.samplerFilterMinmax = VK_FALSE;
1321 state_tracker->enabled_features.core12.shaderOutputLayer = VK_FALSE;
1322 state_tracker->enabled_features.core12.shaderOutputViewportIndex = VK_FALSE;
1323
1324 // These structs are only allowed in pNext chain if there is no VkPhysicalDeviceVulkan12Features
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001325
1326 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
1327 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001328 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
1329 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
1330 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
1331 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001332 }
1333
1334 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
1335 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001336 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
1337 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001338 }
1339
1340 const auto *descriptor_indexing_features =
1341 lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
1342 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001343 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
1344 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
1345 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
1346 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
1347 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
1348 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
1349 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
1350 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
1351 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
1352 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
1353 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
1354 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
1355 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
1356 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
1357 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
1358 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
1359 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
1360 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
1361 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
1362 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
1363 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
1364 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
1365 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
1366 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
1367 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
1368 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
1369 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
1370 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
1371 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
1372 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
1373 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
1374 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
1375 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
1376 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
1377 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
1378 descriptor_indexing_features->descriptorBindingPartiallyBound;
1379 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
1380 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
1381 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001382 }
1383
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001384 const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001385 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001386 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001387 }
1388
1389 const auto *imageless_framebuffer_features =
1390 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
1391 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001392 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001393 }
1394
1395 const auto *uniform_buffer_standard_layout_features =
1396 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
1397 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001398 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
1399 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001400 }
1401
1402 const auto *subgroup_extended_types_features =
1403 lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
1404 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001405 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
1406 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001407 }
1408
1409 const auto *separate_depth_stencil_layouts_features =
1410 lvl_find_in_chain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
1411 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001412 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
1413 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001414 }
1415
1416 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
1417 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001418 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001419 }
1420
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001421 const auto *timeline_semaphore_features = lvl_find_in_chain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001422 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001423 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001424 }
1425
1426 const auto *buffer_device_address = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
1427 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001428 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
1429 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1430 buffer_device_address->bufferDeviceAddressCaptureReplay;
1431 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1432 buffer_device_address->bufferDeviceAddressMultiDevice;
1433 }
1434 }
1435
1436 const auto *vulkan_11_features = lvl_find_in_chain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
1437 if (vulkan_11_features) {
1438 state_tracker->enabled_features.core11 = *vulkan_11_features;
1439 } else {
1440 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
1441
1442 const auto *sixteen_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
1443 if (sixteen_bit_storage_features) {
1444 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
1445 sixteen_bit_storage_features->storageBuffer16BitAccess;
1446 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
1447 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
1448 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
1449 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
1450 }
1451
1452 const auto *multiview_features = lvl_find_in_chain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
1453 if (multiview_features) {
1454 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1455 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1456 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1457 }
1458
1459 const auto *variable_pointers_features = lvl_find_in_chain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
1460 if (variable_pointers_features) {
1461 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1462 variable_pointers_features->variablePointersStorageBuffer;
1463 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1464 }
1465
1466 const auto *protected_memory_features = lvl_find_in_chain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
1467 if (protected_memory_features) {
1468 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1469 }
1470
1471 const auto *ycbcr_conversion_features =
1472 lvl_find_in_chain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
1473 if (ycbcr_conversion_features) {
1474 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1475 }
1476
1477 const auto *shader_draw_parameters_features =
1478 lvl_find_in_chain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
1479 if (shader_draw_parameters_features) {
1480 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001481 }
1482 }
1483
locke-lunargd556cc32019-09-17 01:21:23 -06001484 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
Tony-LunarGca4891a2020-08-10 15:46:46 -06001485 if (device_group_ci) {
1486 state_tracker->physical_device_count = device_group_ci->physicalDeviceCount;
1487 state_tracker->device_group_create_info = *device_group_ci;
1488 } else {
1489 state_tracker->physical_device_count = 1;
1490 }
locke-lunargd556cc32019-09-17 01:21:23 -06001491
locke-lunargd556cc32019-09-17 01:21:23 -06001492 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
1493 if (exclusive_scissor_features) {
1494 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1495 }
1496
1497 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1498 if (shading_rate_image_features) {
1499 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1500 }
1501
1502 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1503 if (mesh_shader_features) {
1504 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1505 }
1506
1507 const auto *inline_uniform_block_features =
1508 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1509 if (inline_uniform_block_features) {
1510 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1511 }
1512
1513 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1514 if (transform_feedback_features) {
1515 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1516 }
1517
locke-lunargd556cc32019-09-17 01:21:23 -06001518 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1519 if (vtx_attrib_div_features) {
1520 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1521 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001522
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001523 const auto *buffer_device_address_ext = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
1524 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001525 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001526 }
1527
1528 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1529 if (cooperative_matrix_features) {
1530 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1531 }
1532
locke-lunargd556cc32019-09-17 01:21:23 -06001533 const auto *compute_shader_derivatives_features =
1534 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1535 if (compute_shader_derivatives_features) {
1536 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1537 }
1538
1539 const auto *fragment_shader_barycentric_features =
1540 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1541 if (fragment_shader_barycentric_features) {
1542 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1543 }
1544
1545 const auto *shader_image_footprint_features =
1546 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1547 if (shader_image_footprint_features) {
1548 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1549 }
1550
1551 const auto *fragment_shader_interlock_features =
1552 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1553 if (fragment_shader_interlock_features) {
1554 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1555 }
1556
1557 const auto *demote_to_helper_invocation_features =
1558 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1559 if (demote_to_helper_invocation_features) {
1560 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1561 }
1562
1563 const auto *texel_buffer_alignment_features =
1564 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1565 if (texel_buffer_alignment_features) {
1566 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1567 }
1568
locke-lunargd556cc32019-09-17 01:21:23 -06001569 const auto *pipeline_exe_props_features =
1570 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1571 if (pipeline_exe_props_features) {
1572 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1573 }
1574
Jeff Bolz82f854d2019-09-17 14:56:47 -05001575 const auto *dedicated_allocation_image_aliasing_features =
1576 lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
1577 if (dedicated_allocation_image_aliasing_features) {
1578 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1579 *dedicated_allocation_image_aliasing_features;
1580 }
1581
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001582 const auto *performance_query_features = lvl_find_in_chain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
1583 if (performance_query_features) {
1584 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1585 }
1586
Tobias Hector782bcde2019-11-28 16:19:42 +00001587 const auto *device_coherent_memory_features = lvl_find_in_chain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
1588 if (device_coherent_memory_features) {
1589 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1590 }
1591
sfricke-samsungcead0802020-01-30 22:20:10 -08001592 const auto *ycbcr_image_array_features = lvl_find_in_chain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
1593 if (ycbcr_image_array_features) {
1594 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1595 }
1596
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001597 const auto *ray_tracing_features = lvl_find_in_chain<VkPhysicalDeviceRayTracingFeaturesKHR>(pCreateInfo->pNext);
1598 if (ray_tracing_features) {
1599 state_tracker->enabled_features.ray_tracing_features = *ray_tracing_features;
1600 }
1601
Jeff Bolz165818a2020-05-08 11:19:03 -05001602 const auto *robustness2_features = lvl_find_in_chain<VkPhysicalDeviceRobustness2FeaturesEXT>(pCreateInfo->pNext);
1603 if (robustness2_features) {
1604 state_tracker->enabled_features.robustness2_features = *robustness2_features;
1605 }
1606
janharaldfredriksen-arm3b793772020-05-12 18:55:53 +02001607 const auto *fragment_density_map_features =
1608 lvl_find_in_chain<VkPhysicalDeviceFragmentDensityMapFeaturesEXT>(pCreateInfo->pNext);
1609 if (fragment_density_map_features) {
1610 state_tracker->enabled_features.fragment_density_map_features = *fragment_density_map_features;
1611 }
1612
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001613 const auto *fragment_density_map_features2 =
1614 lvl_find_in_chain<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT>(pCreateInfo->pNext);
1615 if (fragment_density_map_features2) {
1616 state_tracker->enabled_features.fragment_density_map2_features = *fragment_density_map_features2;
1617 }
1618
sfricke-samsung0c4a06f2020-06-27 01:24:32 -07001619 const auto *astc_decode_features = lvl_find_in_chain<VkPhysicalDeviceASTCDecodeFeaturesEXT>(pCreateInfo->pNext);
1620 if (astc_decode_features) {
1621 state_tracker->enabled_features.astc_decode_features = *astc_decode_features;
1622 }
1623
Tony-LunarG7337b312020-04-15 16:40:25 -06001624 const auto *custom_border_color_features = lvl_find_in_chain<VkPhysicalDeviceCustomBorderColorFeaturesEXT>(pCreateInfo->pNext);
1625 if (custom_border_color_features) {
1626 state_tracker->enabled_features.custom_border_color_features = *custom_border_color_features;
1627 }
1628
sfricke-samsungfd661d62020-05-16 00:57:27 -07001629 const auto *pipeline_creation_cache_control_features =
1630 lvl_find_in_chain<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT>(pCreateInfo->pNext);
1631 if (pipeline_creation_cache_control_features) {
1632 state_tracker->enabled_features.pipeline_creation_cache_control_features = *pipeline_creation_cache_control_features;
1633 }
1634
Piers Daniell39842ee2020-07-10 16:42:33 -06001635 const auto *extended_dynamic_state_features =
1636 lvl_find_in_chain<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT>(pCreateInfo->pNext);
1637 if (extended_dynamic_state_features) {
1638 state_tracker->enabled_features.extended_dynamic_state_features = *extended_dynamic_state_features;
1639 }
1640
locke-lunargd556cc32019-09-17 01:21:23 -06001641 // Store physical device properties and physical device mem limits into CoreChecks structs
1642 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1643 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001644 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1645 &state_tracker->phys_dev_props_core11);
1646 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1647 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001648
1649 const auto &dev_ext = state_tracker->device_extensions;
1650 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1651
1652 if (dev_ext.vk_khr_push_descriptor) {
1653 // Get the needed push_descriptor limits
1654 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1655 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1656 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1657 }
1658
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001659 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
1660 VkPhysicalDeviceDescriptorIndexingPropertiesEXT descriptor_indexing_prop;
1661 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1662 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1663 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1664 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1665 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1666 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1667 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1668 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1669 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1670 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1671 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1672 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1673 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1674 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1675 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1676 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1677 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1678 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1679 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1680 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1681 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1682 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1683 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1684 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1685 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1686 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1687 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
1688 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1689 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
1690 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
1691 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
1692 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
1693 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
1694 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
1695 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1696 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1697 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
1698 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
1699 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1700 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
1701 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
1702 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
1703 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
1704 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
1705 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
1706 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
1707 }
1708
locke-lunargd556cc32019-09-17 01:21:23 -06001709 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1710 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1711 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1712 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001713
1714 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
1715 VkPhysicalDeviceDepthStencilResolvePropertiesKHR depth_stencil_resolve_props;
1716 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
1717 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
1718 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
1719 depth_stencil_resolve_props.supportedStencilResolveModes;
1720 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
1721 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
1722 }
1723
locke-lunargd556cc32019-09-17 01:21:23 -06001724 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001725 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
1726 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing, &phys_dev_props->ray_tracing_propsKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06001727 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1728 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001729 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map_2, &phys_dev_props->fragment_density_map2_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001730 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
sfricke-samsung8f658d42020-05-03 20:12:24 -07001731 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_sample_locations, &phys_dev_props->sample_locations_props);
Tony-LunarG7337b312020-04-15 16:40:25 -06001732 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_custom_border_color, &phys_dev_props->custom_border_color_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001733
1734 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
1735 VkPhysicalDeviceTimelineSemaphorePropertiesKHR timeline_semaphore_props;
1736 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
1737 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
1738 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
1739 }
1740
1741 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
1742 VkPhysicalDeviceFloatControlsPropertiesKHR float_controls_props;
1743 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
1744 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
1745 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
1746 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
1747 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
1748 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
1749 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
1750 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
1751 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
1752 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
1753 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
1754 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
1755 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
1756 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
1757 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
1758 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
1759 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
1760 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
1761 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
1762 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
1763 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
1764 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07001765
locke-lunargd556cc32019-09-17 01:21:23 -06001766 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1767 // Get the needed cooperative_matrix properties
1768 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1769 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
1770 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1771 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1772
1773 uint32_t numCooperativeMatrixProperties = 0;
1774 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
1775 state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
1776 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
1777
1778 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
1779 state_tracker->cooperative_matrix_properties.data());
1780 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001781 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06001782 // Get the needed subgroup limits
1783 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
1784 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
1785 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1786
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001787 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
1788 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
1789 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
1790 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06001791 }
1792
1793 // Store queue family data
1794 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1795 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001796 const VkDeviceQueueCreateInfo &queue_create_info = pCreateInfo->pQueueCreateInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06001797 state_tracker->queue_family_index_map.insert(
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001798 std::make_pair(queue_create_info.queueFamilyIndex, queue_create_info.queueCount));
1799 state_tracker->queue_family_create_flags_map.insert(
1800 std::make_pair(queue_create_info.queueFamilyIndex, queue_create_info.flags));
locke-lunargd556cc32019-09-17 01:21:23 -06001801 }
1802 }
1803}
1804
1805void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1806 if (!device) return;
1807
locke-lunargd556cc32019-09-17 01:21:23 -06001808 // Reset all command buffers before destroying them, to unlink object_bindings.
1809 for (auto &commandBuffer : commandBufferMap) {
1810 ResetCommandBufferState(commandBuffer.first);
1811 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001812 pipelineMap.clear();
1813 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001814 commandBufferMap.clear();
1815
1816 // This will also delete all sets in the pool & remove them from setMap
1817 DeleteDescriptorSetPools();
1818 // All sets should be removed
1819 assert(setMap.empty());
1820 descriptorSetLayoutMap.clear();
1821 imageViewMap.clear();
1822 imageMap.clear();
1823 bufferViewMap.clear();
1824 bufferMap.clear();
1825 // Queues persist until device is destroyed
1826 queueMap.clear();
1827}
1828
1829// Loop through bound objects and increment their in_use counts.
1830void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
1831 for (auto obj : cb_node->object_bindings) {
1832 auto base_obj = GetStateStructPtrFromObject(obj);
1833 if (base_obj) {
1834 base_obj->in_use.fetch_add(1);
1835 }
1836 }
1837}
1838
1839// Track which resources are in-flight by atomically incrementing their "in_use" count
1840void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1841 cb_node->submitCount++;
1842 cb_node->in_use.fetch_add(1);
1843
1844 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
1845 IncrementBoundObjects(cb_node);
1846 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1847 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1848 // should then be flagged prior to calling this function
1849 for (auto event : cb_node->writeEventsBeforeWait) {
1850 auto event_state = GetEventState(event);
1851 if (event_state) event_state->write_in_use++;
1852 }
1853}
1854
1855// Decrement in-use count for objects bound to command buffer
1856void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
1857 BASE_NODE *base_obj = nullptr;
1858 for (auto obj : cb_node->object_bindings) {
1859 base_obj = GetStateStructPtrFromObject(obj);
1860 if (base_obj) {
1861 base_obj->in_use.fetch_sub(1);
1862 }
1863 }
1864}
1865
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001866void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
locke-lunargd556cc32019-09-17 01:21:23 -06001867 std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
1868
1869 // Roll this queue forward, one submission at a time.
1870 while (pQueue->seq < seq) {
1871 auto &submission = pQueue->submissions.front();
1872
1873 for (auto &wait : submission.waitSemaphores) {
1874 auto pSemaphore = GetSemaphoreState(wait.semaphore);
1875 if (pSemaphore) {
1876 pSemaphore->in_use.fetch_sub(1);
1877 }
1878 auto &lastSeq = otherQueueSeqs[wait.queue];
1879 lastSeq = std::max(lastSeq, wait.seq);
1880 }
1881
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001882 for (auto &signal : submission.signalSemaphores) {
1883 auto pSemaphore = GetSemaphoreState(signal.semaphore);
locke-lunargd556cc32019-09-17 01:21:23 -06001884 if (pSemaphore) {
1885 pSemaphore->in_use.fetch_sub(1);
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001886 if (pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && pSemaphore->payload < signal.payload) {
1887 pSemaphore->payload = signal.payload;
1888 }
locke-lunargd556cc32019-09-17 01:21:23 -06001889 }
1890 }
1891
1892 for (auto &semaphore : submission.externalSemaphores) {
1893 auto pSemaphore = GetSemaphoreState(semaphore);
1894 if (pSemaphore) {
1895 pSemaphore->in_use.fetch_sub(1);
1896 }
1897 }
1898
1899 for (auto cb : submission.cbs) {
1900 auto cb_node = GetCBState(cb);
1901 if (!cb_node) {
1902 continue;
1903 }
1904 // First perform decrement on general case bound objects
1905 DecrementBoundResources(cb_node);
1906 for (auto event : cb_node->writeEventsBeforeWait) {
1907 auto eventNode = eventMap.find(event);
1908 if (eventNode != eventMap.end()) {
1909 eventNode->second.write_in_use--;
1910 }
1911 }
Jeff Bolz310775c2019-10-09 00:46:33 -05001912 QueryMap localQueryToStateMap;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001913 VkQueryPool first_pool = VK_NULL_HANDLE;
Jeff Bolz310775c2019-10-09 00:46:33 -05001914 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001915 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &localQueryToStateMap);
Jeff Bolz310775c2019-10-09 00:46:33 -05001916 }
1917
1918 for (auto queryStatePair : localQueryToStateMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001919 if (queryStatePair.second == QUERYSTATE_ENDED) {
1920 queryToStateMap[queryStatePair.first] = QUERYSTATE_AVAILABLE;
1921 }
locke-lunargd556cc32019-09-17 01:21:23 -06001922 }
locke-lunargd556cc32019-09-17 01:21:23 -06001923 cb_node->in_use.fetch_sub(1);
1924 }
1925
1926 auto pFence = GetFenceState(submission.fence);
1927 if (pFence && pFence->scope == kSyncScopeInternal) {
1928 pFence->state = FENCE_RETIRED;
1929 }
1930
1931 pQueue->submissions.pop_front();
1932 pQueue->seq++;
1933 }
1934
1935 // Roll other queues forward to the highest seq we saw a wait for
1936 for (auto qs : otherQueueSeqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001937 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001938 }
1939}
1940
1941// Submit a fence to a queue, delimiting previous fences and previous untracked
1942// work by it.
1943static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1944 pFence->state = FENCE_INFLIGHT;
1945 pFence->signaler.first = pQueue->queue;
1946 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1947}
1948
1949void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1950 VkFence fence, VkResult result) {
Mark Lobodzinski09379db2020-05-07 08:22:01 -06001951 if (result != VK_SUCCESS) return;
locke-lunargd556cc32019-09-17 01:21:23 -06001952 uint64_t early_retire_seq = 0;
1953 auto pQueue = GetQueueState(queue);
1954 auto pFence = GetFenceState(fence);
1955
1956 if (pFence) {
1957 if (pFence->scope == kSyncScopeInternal) {
1958 // Mark fence in use
1959 SubmitFence(pQueue, pFence, std::max(1u, submitCount));
1960 if (!submitCount) {
1961 // If no submissions, but just dropping a fence on the end of the queue,
1962 // record an empty submission with just the fence, so we can determine
1963 // its completion.
1964 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001965 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001966 }
1967 } else {
1968 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1969 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1970 }
1971 }
1972
1973 // Now process each individual submit
1974 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1975 std::vector<VkCommandBuffer> cbs;
1976 const VkSubmitInfo *submit = &pSubmits[submit_idx];
Jeremy Gebben404e6832020-09-29 14:58:07 -06001977 std::vector<SEMAPHORE_WAIT> semaphore_waits;
1978 std::vector<SEMAPHORE_SIGNAL> semaphore_signals;
1979 std::vector<VkSemaphore> semaphore_externals;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001980 const uint64_t next_seq = pQueue->seq + pQueue->submissions.size() + 1;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001981 auto *timeline_semaphore_submit = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001982 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
1983 VkSemaphore semaphore = submit->pWaitSemaphores[i];
1984 auto pSemaphore = GetSemaphoreState(semaphore);
1985 if (pSemaphore) {
1986 if (pSemaphore->scope == kSyncScopeInternal) {
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001987 SEMAPHORE_WAIT wait;
1988 wait.semaphore = semaphore;
1989 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
1990 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1991 wait.queue = pSemaphore->signaler.first;
1992 wait.seq = pSemaphore->signaler.second;
1993 semaphore_waits.push_back(wait);
1994 pSemaphore->in_use.fetch_add(1);
1995 }
1996 pSemaphore->signaler.first = VK_NULL_HANDLE;
1997 pSemaphore->signaled = false;
1998 } else if (pSemaphore->payload < timeline_semaphore_submit->pWaitSemaphoreValues[i]) {
1999 wait.queue = queue;
2000 wait.seq = next_seq;
2001 wait.payload = timeline_semaphore_submit->pWaitSemaphoreValues[i];
2002 semaphore_waits.push_back(wait);
locke-lunargd556cc32019-09-17 01:21:23 -06002003 pSemaphore->in_use.fetch_add(1);
2004 }
locke-lunargd556cc32019-09-17 01:21:23 -06002005 } else {
2006 semaphore_externals.push_back(semaphore);
2007 pSemaphore->in_use.fetch_add(1);
2008 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
2009 pSemaphore->scope = kSyncScopeInternal;
2010 }
2011 }
2012 }
2013 }
2014 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
2015 VkSemaphore semaphore = submit->pSignalSemaphores[i];
2016 auto pSemaphore = GetSemaphoreState(semaphore);
2017 if (pSemaphore) {
2018 if (pSemaphore->scope == kSyncScopeInternal) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002019 SEMAPHORE_SIGNAL signal;
2020 signal.semaphore = semaphore;
2021 signal.seq = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002022 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
2023 pSemaphore->signaler.first = queue;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002024 pSemaphore->signaler.second = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002025 pSemaphore->signaled = true;
2026 } else {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002027 signal.payload = timeline_semaphore_submit->pSignalSemaphoreValues[i];
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002028 }
locke-lunargd556cc32019-09-17 01:21:23 -06002029 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002030 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06002031 } else {
2032 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002033 early_retire_seq = std::max(early_retire_seq, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002034 }
2035 }
2036 }
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002037 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
2038 uint32_t perf_pass = perf_submit ? perf_submit->counterPassIndex : 0;
2039
locke-lunargd556cc32019-09-17 01:21:23 -06002040 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
2041 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
2042 if (cb_node) {
2043 cbs.push_back(submit->pCommandBuffers[i]);
2044 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
2045 cbs.push_back(secondaryCmdBuffer->commandBuffer);
2046 IncrementResources(secondaryCmdBuffer);
2047 }
2048 IncrementResources(cb_node);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002049
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002050 VkQueryPool first_pool = VK_NULL_HANDLE;
2051 EventToStageMap localEventToStageMap;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002052 QueryMap localQueryToStateMap;
2053 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002054 function(nullptr, /*do_validate*/ false, first_pool, perf_pass, &localQueryToStateMap);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002055 }
2056
2057 for (auto queryStatePair : localQueryToStateMap) {
2058 queryToStateMap[queryStatePair.first] = queryStatePair.second;
2059 }
2060
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002061 for (auto &function : cb_node->eventUpdates) {
2062 function(nullptr, /*do_validate*/ false, &localEventToStageMap);
2063 }
2064
2065 for (auto eventStagePair : localEventToStageMap) {
2066 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
2067 }
locke-lunargd556cc32019-09-17 01:21:23 -06002068 }
2069 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002070
locke-lunargd556cc32019-09-17 01:21:23 -06002071 pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002072 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, perf_pass);
locke-lunargd556cc32019-09-17 01:21:23 -06002073 }
2074
2075 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002076 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002077 }
2078}
2079
2080void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
2081 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
2082 VkResult result) {
2083 if (VK_SUCCESS == result) {
2084 AddMemObjInfo(device, *pMemory, pAllocateInfo);
2085 }
2086 return;
2087}
2088
2089void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
2090 if (!mem) return;
2091 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
2092 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
2093
2094 // Clear mem binding for any bound objects
2095 for (const auto &obj : mem_info->obj_bindings) {
2096 BINDABLE *bindable_state = nullptr;
2097 switch (obj.type) {
2098 case kVulkanObjectTypeImage:
2099 bindable_state = GetImageState(obj.Cast<VkImage>());
2100 break;
2101 case kVulkanObjectTypeBuffer:
2102 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
2103 break;
2104 case kVulkanObjectTypeAccelerationStructureNV:
2105 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
2106 break;
2107
2108 default:
2109 // Should only have acceleration structure, buffer, or image objects bound to memory
2110 assert(0);
2111 }
2112
2113 if (bindable_state) {
Jeff Bolz41e29052020-03-29 22:33:55 -05002114 // Remove any sparse bindings bound to the resource that use this memory.
2115 for (auto it = bindable_state->sparse_bindings.begin(); it != bindable_state->sparse_bindings.end();) {
2116 auto nextit = it;
2117 nextit++;
2118
2119 auto &sparse_mem_binding = *it;
2120 if (sparse_mem_binding.mem_state.get() == mem_info) {
2121 bindable_state->sparse_bindings.erase(it);
2122 }
2123
2124 it = nextit;
2125 }
locke-lunargd556cc32019-09-17 01:21:23 -06002126 bindable_state->UpdateBoundMemorySet();
2127 }
2128 }
2129 // Any bound cmd buffers are now invalid
2130 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
2131 RemoveAliasingImages(mem_info->bound_images);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002132 mem_info->destroyed = true;
John Zulauf79952712020-04-07 11:25:54 -06002133 fake_memory.Free(mem_info->fake_base_address);
locke-lunargd556cc32019-09-17 01:21:23 -06002134 memObjMap.erase(mem);
2135}
2136
2137void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
2138 VkFence fence, VkResult result) {
2139 if (result != VK_SUCCESS) return;
2140 uint64_t early_retire_seq = 0;
2141 auto pFence = GetFenceState(fence);
2142 auto pQueue = GetQueueState(queue);
2143
2144 if (pFence) {
2145 if (pFence->scope == kSyncScopeInternal) {
2146 SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
2147 if (!bindInfoCount) {
2148 // No work to do, just dropping a fence in the queue by itself.
2149 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002150 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002151 }
2152 } else {
2153 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
2154 early_retire_seq = pQueue->seq + pQueue->submissions.size();
2155 }
2156 }
2157
2158 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
2159 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
2160 // Track objects tied to memory
2161 for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
2162 for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
2163 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07002164 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06002165 VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
2166 }
2167 }
2168 for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
2169 for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
2170 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07002171 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06002172 VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
2173 }
2174 }
2175 for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
2176 for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
2177 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
2178 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
2179 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
locke-lunargcf04d582019-11-26 00:31:50 -07002180 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, size,
locke-lunargd556cc32019-09-17 01:21:23 -06002181 VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
2182 }
2183 }
2184
2185 std::vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002186 std::vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06002187 std::vector<VkSemaphore> semaphore_externals;
2188 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
2189 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
2190 auto pSemaphore = GetSemaphoreState(semaphore);
2191 if (pSemaphore) {
2192 if (pSemaphore->scope == kSyncScopeInternal) {
2193 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
2194 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
2195 pSemaphore->in_use.fetch_add(1);
2196 }
2197 pSemaphore->signaler.first = VK_NULL_HANDLE;
2198 pSemaphore->signaled = false;
2199 } else {
2200 semaphore_externals.push_back(semaphore);
2201 pSemaphore->in_use.fetch_add(1);
2202 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
2203 pSemaphore->scope = kSyncScopeInternal;
2204 }
2205 }
2206 }
2207 }
2208 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
2209 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
2210 auto pSemaphore = GetSemaphoreState(semaphore);
2211 if (pSemaphore) {
2212 if (pSemaphore->scope == kSyncScopeInternal) {
2213 pSemaphore->signaler.first = queue;
2214 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
2215 pSemaphore->signaled = true;
2216 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002217
2218 SEMAPHORE_SIGNAL signal;
2219 signal.semaphore = semaphore;
2220 signal.seq = pSemaphore->signaler.second;
2221 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06002222 } else {
2223 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
2224 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
2225 }
2226 }
2227 }
2228
2229 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002230 bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002231 }
2232
2233 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002234 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002235 }
2236}
2237
2238void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
2239 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
2240 VkResult result) {
2241 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002242 auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002243 semaphore_state->signaler.first = VK_NULL_HANDLE;
2244 semaphore_state->signaler.second = 0;
2245 semaphore_state->signaled = false;
2246 semaphore_state->scope = kSyncScopeInternal;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002247 semaphore_state->type = VK_SEMAPHORE_TYPE_BINARY_KHR;
2248 semaphore_state->payload = 0;
2249 auto semaphore_type_create_info = lvl_find_in_chain<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo->pNext);
2250 if (semaphore_type_create_info) {
2251 semaphore_state->type = semaphore_type_create_info->semaphoreType;
2252 semaphore_state->payload = semaphore_type_create_info->initialValue;
2253 }
locke-lunargd556cc32019-09-17 01:21:23 -06002254 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
2255}
2256
2257void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
2258 VkSemaphoreImportFlagsKHR flags) {
2259 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
2260 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
2261 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
2262 sema_node->scope == kSyncScopeInternal) {
2263 sema_node->scope = kSyncScopeExternalTemporary;
2264 } else {
2265 sema_node->scope = kSyncScopeExternalPermanent;
2266 }
2267 }
2268}
2269
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002270void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR *pSignalInfo,
2271 VkResult result) {
2272 auto *pSemaphore = GetSemaphoreState(pSignalInfo->semaphore);
2273 pSemaphore->payload = pSignalInfo->value;
2274}
2275
locke-lunargd556cc32019-09-17 01:21:23 -06002276void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
2277 auto mem_info = GetDevMemState(mem);
2278 if (mem_info) {
2279 mem_info->mapped_range.offset = offset;
2280 mem_info->mapped_range.size = size;
2281 mem_info->p_driver_data = *ppData;
2282 }
2283}
2284
2285void ValidationStateTracker::RetireFence(VkFence fence) {
2286 auto pFence = GetFenceState(fence);
2287 if (pFence && pFence->scope == kSyncScopeInternal) {
2288 if (pFence->signaler.first != VK_NULL_HANDLE) {
2289 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002290 RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002291 } else {
2292 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
2293 // the fence as retired.
2294 pFence->state = FENCE_RETIRED;
2295 }
2296 }
2297}
2298
2299void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2300 VkBool32 waitAll, uint64_t timeout, VkResult result) {
2301 if (VK_SUCCESS != result) return;
2302
2303 // When we know that all fences are complete we can clean/remove their CBs
2304 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
2305 for (uint32_t i = 0; i < fenceCount; i++) {
2306 RetireFence(pFences[i]);
2307 }
2308 }
2309 // NOTE : Alternate case not handled here is when some fences have completed. In
2310 // this case for app to guarantee which fences completed it will have to call
2311 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
2312}
2313
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002314void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
2315 auto pSemaphore = GetSemaphoreState(semaphore);
2316 if (pSemaphore) {
2317 for (auto &pair : queueMap) {
2318 QUEUE_STATE &queueState = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002319 uint64_t max_seq = 0;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002320 for (const auto &submission : queueState.submissions) {
2321 for (const auto &signalSemaphore : submission.signalSemaphores) {
2322 if (signalSemaphore.semaphore == semaphore && signalSemaphore.payload <= until_payload) {
Tony-LunarG47d5e272020-04-07 15:35:55 -06002323 if (signalSemaphore.seq > max_seq) {
2324 max_seq = signalSemaphore.seq;
2325 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002326 }
2327 }
2328 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06002329 if (max_seq) {
2330 RetireWorkOnQueue(&queueState, max_seq);
2331 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002332 }
2333 }
2334}
2335
John Zulauff89de662020-04-13 18:57:34 -06002336void ValidationStateTracker::RecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2337 VkResult result) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002338 if (VK_SUCCESS != result) return;
2339
2340 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
2341 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
2342 }
2343}
2344
John Zulauff89de662020-04-13 18:57:34 -06002345void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2346 VkResult result) {
2347 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2348}
2349
2350void ValidationStateTracker::PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo,
2351 uint64_t timeout, VkResult result) {
2352 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2353}
2354
Adrian Coca Lorentec7d76102020-09-28 13:58:16 +02002355void ValidationStateTracker::RecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2356 VkResult result) {
2357 if (VK_SUCCESS != result) return;
2358
2359 RetireTimelineSemaphore(semaphore, *pValue);
2360}
2361
2362void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2363 VkResult result) {
2364 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2365}
2366void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2367 VkResult result) {
2368 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2369}
2370
locke-lunargd556cc32019-09-17 01:21:23 -06002371void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
2372 if (VK_SUCCESS != result) return;
2373 RetireFence(fence);
2374}
2375
2376void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
2377 // Add queue to tracking set only if it is new
2378 auto queue_is_new = queues.emplace(queue);
2379 if (queue_is_new.second == true) {
2380 QUEUE_STATE *queue_state = &queueMap[queue];
2381 queue_state->queue = queue;
2382 queue_state->queueFamilyIndex = queue_family_index;
2383 queue_state->seq = 0;
2384 }
2385}
2386
2387void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2388 VkQueue *pQueue) {
2389 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2390}
2391
2392void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2393 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2394}
2395
2396void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2397 if (VK_SUCCESS != result) return;
2398 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002399 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002400}
2401
2402void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2403 if (VK_SUCCESS != result) return;
2404 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002405 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002406 }
2407}
2408
2409void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2410 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002411 auto fence_state = GetFenceState(fence);
2412 fence_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002413 fenceMap.erase(fence);
2414}
2415
2416void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2417 const VkAllocationCallbacks *pAllocator) {
2418 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002419 auto semaphore_state = GetSemaphoreState(semaphore);
2420 semaphore_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002421 semaphoreMap.erase(semaphore);
2422}
2423
2424void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2425 if (!event) return;
2426 EVENT_STATE *event_state = GetEventState(event);
2427 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
2428 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
2429 eventMap.erase(event);
2430}
2431
2432void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2433 const VkAllocationCallbacks *pAllocator) {
2434 if (!queryPool) return;
2435 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
2436 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
2437 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002438 qp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002439 queryPoolMap.erase(queryPool);
2440}
2441
2442// Object with given handle is being bound to memory w/ given mem_info struct.
2443// Track the newly bound memory range with given memoryOffset
2444// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
2445// and non-linear range incorrectly overlap.
locke-lunargd556cc32019-09-17 01:21:23 -06002446void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002447 VkDeviceSize memoryOffset) {
locke-lunargd556cc32019-09-17 01:21:23 -06002448 if (typed_handle.type == kVulkanObjectTypeImage) {
2449 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
2450 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002451 mem_info->bound_buffers.insert(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002452 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002453 mem_info->bound_acceleration_structures.insert(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002454 } else {
2455 // Unsupported object type
2456 assert(false);
2457 }
2458}
2459
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002460void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2461 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002462}
2463
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002464void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2465 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002466}
2467
2468void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002469 VkDeviceSize mem_offset) {
2470 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002471}
2472
2473// This function will remove the handle-to-index mapping from the appropriate map.
2474static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
2475 if (typed_handle.type == kVulkanObjectTypeImage) {
2476 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
2477 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002478 mem_info->bound_buffers.erase(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002479 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002480 mem_info->bound_acceleration_structures.erase(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002481 } else {
2482 // Unsupported object type
2483 assert(false);
2484 }
2485}
2486
2487void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
2488 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
2489}
2490
2491void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
2492 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
2493}
2494
2495void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
2496 RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
2497}
2498
2499void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2500 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2501 if (buffer_state) {
2502 // Track bound memory range information
2503 auto mem_info = GetDevMemState(mem);
2504 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002505 InsertBufferMemoryRange(buffer, mem_info, memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06002506 }
2507 // Track objects tied to memory
2508 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
2509 }
2510}
2511
2512void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2513 VkDeviceSize memoryOffset, VkResult result) {
2514 if (VK_SUCCESS != result) return;
2515 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2516}
2517
2518void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
2519 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2520 for (uint32_t i = 0; i < bindInfoCount; i++) {
2521 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2522 }
2523}
2524
2525void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
2526 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2527 for (uint32_t i = 0; i < bindInfoCount; i++) {
2528 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2529 }
2530}
2531
Spencer Fricke6c127102020-04-16 06:25:20 -07002532void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer) {
locke-lunargd556cc32019-09-17 01:21:23 -06002533 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2534 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002535 buffer_state->memory_requirements_checked = true;
2536 }
2537}
2538
2539void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2540 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002541 RecordGetBufferMemoryRequirementsState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002542}
2543
2544void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
2545 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2546 VkMemoryRequirements2KHR *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002547 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002548}
2549
2550void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
2551 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2552 VkMemoryRequirements2KHR *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002553 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002554}
2555
Spencer Fricke6c127102020-04-16 06:25:20 -07002556void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002557 const VkImagePlaneMemoryRequirementsInfo *plane_info =
2558 (pInfo == nullptr) ? nullptr : lvl_find_in_chain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002559 IMAGE_STATE *image_state = GetImageState(image);
2560 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002561 if (plane_info != nullptr) {
2562 // Multi-plane image
2563 image_state->memory_requirements_checked = false; // Each image plane needs to be checked itself
2564 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
2565 image_state->plane0_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002566 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
2567 image_state->plane1_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002568 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
2569 image_state->plane2_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002570 }
2571 } else {
2572 // Single Plane image
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002573 image_state->memory_requirements_checked = true;
2574 }
locke-lunargd556cc32019-09-17 01:21:23 -06002575 }
2576}
2577
2578void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2579 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002580 RecordGetImageMemoryRequirementsState(image, nullptr);
locke-lunargd556cc32019-09-17 01:21:23 -06002581}
2582
2583void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2584 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002585 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002586}
2587
2588void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2589 const VkImageMemoryRequirementsInfo2 *pInfo,
2590 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002591 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002592}
2593
2594static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2595 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2596 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2597 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2598 image_state->sparse_metadata_required = true;
2599 }
2600}
2601
2602void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2603 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2604 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2605 auto image_state = GetImageState(image);
2606 image_state->get_sparse_reqs_called = true;
2607 if (!pSparseMemoryRequirements) return;
2608 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2609 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2610 }
2611}
2612
2613void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
2614 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2615 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2616 auto image_state = GetImageState(pInfo->image);
2617 image_state->get_sparse_reqs_called = true;
2618 if (!pSparseMemoryRequirements) return;
2619 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2620 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2621 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2622 }
2623}
2624
2625void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
2626 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2627 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2628 auto image_state = GetImageState(pInfo->image);
2629 image_state->get_sparse_reqs_called = true;
2630 if (!pSparseMemoryRequirements) return;
2631 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2632 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2633 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2634 }
2635}
2636
2637void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2638 const VkAllocationCallbacks *pAllocator) {
2639 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002640 auto shader_module_state = GetShaderModuleState(shaderModule);
2641 shader_module_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002642 shaderModuleMap.erase(shaderModule);
2643}
2644
2645void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2646 const VkAllocationCallbacks *pAllocator) {
2647 if (!pipeline) return;
2648 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
2649 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
2650 // Any bound cmd buffers are now invalid
2651 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002652 pipeline_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002653 pipelineMap.erase(pipeline);
2654}
2655
2656void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2657 const VkAllocationCallbacks *pAllocator) {
2658 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002659 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
2660 pipeline_layout_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002661 pipelineLayoutMap.erase(pipelineLayout);
2662}
2663
2664void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2665 const VkAllocationCallbacks *pAllocator) {
2666 if (!sampler) return;
2667 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
2668 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
2669 // Any bound cmd buffers are now invalid
2670 if (sampler_state) {
2671 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
Yuly Novikov424cdd52020-05-26 16:45:12 -04002672
2673 if (sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2674 sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
2675 custom_border_color_sampler_count--;
2676 }
2677
2678 sampler_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002679 }
2680 samplerMap.erase(sampler);
2681}
2682
2683void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2684 const VkAllocationCallbacks *pAllocator) {
2685 if (!descriptorSetLayout) return;
2686 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2687 if (layout_it != descriptorSetLayoutMap.end()) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002688 layout_it->second.get()->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002689 descriptorSetLayoutMap.erase(layout_it);
2690 }
2691}
2692
2693void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2694 const VkAllocationCallbacks *pAllocator) {
2695 if (!descriptorPool) return;
2696 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
2697 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
2698 if (desc_pool_state) {
2699 // Any bound cmd buffers are now invalid
2700 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
2701 // Free sets that were in this pool
2702 for (auto ds : desc_pool_state->sets) {
2703 FreeDescriptorSet(ds);
2704 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002705 desc_pool_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002706 descriptorPoolMap.erase(descriptorPool);
2707 }
2708}
2709
2710// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2711void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2712 const VkCommandBuffer *command_buffers) {
2713 for (uint32_t i = 0; i < command_buffer_count; i++) {
John Zulaufd1f85d42020-04-15 12:23:15 -06002714 // Allow any derived class to clean up command buffer state
2715 if (command_buffer_free_callback) {
2716 (*command_buffer_free_callback)(command_buffers[i]);
2717 }
2718
locke-lunargd556cc32019-09-17 01:21:23 -06002719 auto cb_state = GetCBState(command_buffers[i]);
2720 // Remove references to command buffer's state and delete
2721 if (cb_state) {
2722 // reset prior to delete, removing various references to it.
2723 // TODO: fix this, it's insane.
2724 ResetCommandBufferState(cb_state->commandBuffer);
2725 // Remove the cb_state's references from COMMAND_POOL_STATEs
2726 pool_state->commandBuffers.erase(command_buffers[i]);
2727 // Remove the cb debug labels
2728 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
2729 // Remove CBState from CB map
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002730 cb_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002731 commandBufferMap.erase(cb_state->commandBuffer);
2732 }
2733 }
2734}
2735
2736void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2737 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
2738 auto pPool = GetCommandPoolState(commandPool);
2739 FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
2740}
2741
2742void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2743 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2744 VkResult result) {
2745 if (VK_SUCCESS != result) return;
sfricke-samsungc1543372020-08-18 22:37:27 -07002746 VkCommandPool command_pool = *pCommandPool;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002747 auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
sfricke-samsungc1543372020-08-18 22:37:27 -07002748 cmd_pool_state->commandPool = command_pool;
locke-lunargd556cc32019-09-17 01:21:23 -06002749 cmd_pool_state->createFlags = pCreateInfo->flags;
2750 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
sfricke-samsung0c45edc2020-07-01 22:19:53 -07002751 cmd_pool_state->unprotected = ((pCreateInfo->flags & VK_COMMAND_POOL_CREATE_PROTECTED_BIT) == 0);
sfricke-samsungc1543372020-08-18 22:37:27 -07002752 commandPoolMap[command_pool] = std::move(cmd_pool_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002753}
2754
2755void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2756 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2757 VkResult result) {
2758 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002759 auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002760 query_pool_state->createInfo = *pCreateInfo;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002761 query_pool_state->pool = *pQueryPool;
2762 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
2763 const auto *perf = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
2764 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
2765
2766 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2767 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2768 switch (counter.scope) {
2769 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
2770 query_pool_state->has_perf_scope_command_buffer = true;
2771 break;
2772 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
2773 query_pool_state->has_perf_scope_render_pass = true;
2774 break;
2775 default:
2776 break;
2777 }
2778 }
2779
2780 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
2781 &query_pool_state->n_performance_passes);
2782 }
2783
locke-lunargd556cc32019-09-17 01:21:23 -06002784 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
2785
2786 QueryObject query_obj{*pQueryPool, 0u};
2787 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2788 query_obj.query = i;
2789 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2790 }
2791}
2792
2793void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2794 const VkAllocationCallbacks *pAllocator) {
2795 if (!commandPool) return;
2796 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2797 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2798 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2799 if (cp_state) {
2800 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2801 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2802 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002803 cp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002804 commandPoolMap.erase(commandPool);
2805 }
2806}
2807
2808void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2809 VkCommandPoolResetFlags flags, VkResult result) {
2810 if (VK_SUCCESS != result) return;
2811 // Reset all of the CBs allocated from this pool
2812 auto command_pool_state = GetCommandPoolState(commandPool);
2813 for (auto cmdBuffer : command_pool_state->commandBuffers) {
2814 ResetCommandBufferState(cmdBuffer);
2815 }
2816}
2817
2818void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2819 VkResult result) {
2820 for (uint32_t i = 0; i < fenceCount; ++i) {
2821 auto pFence = GetFenceState(pFences[i]);
2822 if (pFence) {
2823 if (pFence->scope == kSyncScopeInternal) {
2824 pFence->state = FENCE_UNSIGNALED;
2825 } else if (pFence->scope == kSyncScopeExternalTemporary) {
2826 pFence->scope = kSyncScopeInternal;
2827 }
2828 }
2829 }
2830}
2831
Jeff Bolzadbfa852019-10-04 13:53:30 -05002832// For given cb_nodes, invalidate them and track object causing invalidation.
2833// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
2834// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
2835// can also unlink objects from command buffers.
2836void ValidationStateTracker::InvalidateCommandBuffers(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_nodes,
2837 const VulkanTypedHandle &obj, bool unlink) {
2838 for (const auto &cb_node_pair : cb_nodes) {
2839 auto &cb_node = cb_node_pair.first;
2840 if (cb_node->state == CB_RECORDING) {
2841 cb_node->state = CB_INVALID_INCOMPLETE;
2842 } else if (cb_node->state == CB_RECORDED) {
2843 cb_node->state = CB_INVALID_COMPLETE;
2844 }
2845 cb_node->broken_bindings.push_back(obj);
2846
2847 // if secondary, then propagate the invalidation to the primaries that will call us.
2848 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2849 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
2850 }
2851 if (unlink) {
2852 int index = cb_node_pair.second;
2853 assert(cb_node->object_bindings[index] == obj);
2854 cb_node->object_bindings[index] = VulkanTypedHandle();
2855 }
2856 }
2857 if (unlink) {
2858 cb_nodes.clear();
2859 }
2860}
2861
2862void ValidationStateTracker::InvalidateLinkedCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
2863 const VulkanTypedHandle &obj) {
locke-lunargd556cc32019-09-17 01:21:23 -06002864 for (auto cb_node : cb_nodes) {
2865 if (cb_node->state == CB_RECORDING) {
2866 cb_node->state = CB_INVALID_INCOMPLETE;
2867 } else if (cb_node->state == CB_RECORDED) {
2868 cb_node->state = CB_INVALID_COMPLETE;
2869 }
2870 cb_node->broken_bindings.push_back(obj);
2871
2872 // if secondary, then propagate the invalidation to the primaries that will call us.
2873 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002874 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
locke-lunargd556cc32019-09-17 01:21:23 -06002875 }
2876 }
2877}
2878
2879void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2880 const VkAllocationCallbacks *pAllocator) {
2881 if (!framebuffer) return;
2882 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
2883 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
2884 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002885 framebuffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002886 frameBufferMap.erase(framebuffer);
2887}
2888
2889void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2890 const VkAllocationCallbacks *pAllocator) {
2891 if (!renderPass) return;
2892 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
2893 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
2894 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002895 rp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002896 renderPassMap.erase(renderPass);
2897}
2898
2899void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2900 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2901 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002902 auto fence_state = std::make_shared<FENCE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002903 fence_state->fence = *pFence;
2904 fence_state->createInfo = *pCreateInfo;
2905 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
2906 fenceMap[*pFence] = std::move(fence_state);
2907}
2908
2909bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2910 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2911 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002912 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002913 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2914 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2915 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2916 cgpl_state->pipe_state.reserve(count);
2917 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002918 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002919 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002920 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002921 }
2922 return false;
2923}
2924
2925void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2926 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2927 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2928 VkResult result, void *cgpl_state_data) {
2929 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2930 // This API may create pipelines regardless of the return value
2931 for (uint32_t i = 0; i < count; i++) {
2932 if (pPipelines[i] != VK_NULL_HANDLE) {
2933 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2934 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2935 }
2936 }
2937 cgpl_state->pipe_state.clear();
2938}
2939
2940bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2941 const VkComputePipelineCreateInfo *pCreateInfos,
2942 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002943 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002944 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2945 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2946 ccpl_state->pipe_state.reserve(count);
2947 for (uint32_t i = 0; i < count; i++) {
2948 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002949 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002950 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002951 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002952 }
2953 return false;
2954}
2955
2956void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2957 const VkComputePipelineCreateInfo *pCreateInfos,
2958 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2959 VkResult result, void *ccpl_state_data) {
2960 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2961
2962 // This API may create pipelines regardless of the return value
2963 for (uint32_t i = 0; i < count; i++) {
2964 if (pPipelines[i] != VK_NULL_HANDLE) {
2965 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2966 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2967 }
2968 }
2969 ccpl_state->pipe_state.clear();
2970}
2971
2972bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2973 uint32_t count,
2974 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2975 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002976 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002977 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2978 crtpl_state->pipe_state.reserve(count);
2979 for (uint32_t i = 0; i < count; i++) {
2980 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002981 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002982 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002983 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002984 }
2985 return false;
2986}
2987
2988void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2989 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2990 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2991 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2992 // This API may create pipelines regardless of the return value
2993 for (uint32_t i = 0; i < count; i++) {
2994 if (pPipelines[i] != VK_NULL_HANDLE) {
2995 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2996 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2997 }
2998 }
2999 crtpl_state->pipe_state.clear();
3000}
3001
Jeff Bolz443c2ca2020-03-19 12:11:51 -05003002bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkPipelineCache pipelineCache,
3003 uint32_t count,
3004 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
3005 const VkAllocationCallbacks *pAllocator,
3006 VkPipeline *pPipelines, void *crtpl_state_data) const {
3007 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
3008 crtpl_state->pipe_state.reserve(count);
3009 for (uint32_t i = 0; i < count; i++) {
3010 // Create and initialize internal tracking data structure
3011 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
3012 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
3013 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
3014 }
3015 return false;
3016}
3017
3018void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(
3019 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
3020 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
3021 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
3022 // This API may create pipelines regardless of the return value
3023 for (uint32_t i = 0; i < count; i++) {
3024 if (pPipelines[i] != VK_NULL_HANDLE) {
3025 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3026 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
3027 }
3028 }
3029 crtpl_state->pipe_state.clear();
3030}
3031
locke-lunargd556cc32019-09-17 01:21:23 -06003032void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
3033 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
3034 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003035 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
Tony-LunarG7337b312020-04-15 16:40:25 -06003036 if (pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT || pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT)
3037 custom_border_color_sampler_count++;
locke-lunargd556cc32019-09-17 01:21:23 -06003038}
3039
3040void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
3041 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
3042 const VkAllocationCallbacks *pAllocator,
3043 VkDescriptorSetLayout *pSetLayout, VkResult result) {
3044 if (VK_SUCCESS != result) return;
3045 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
3046}
3047
3048// For repeatable sorting, not very useful for "memory in range" search
3049struct PushConstantRangeCompare {
3050 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
3051 if (lhs->offset == rhs->offset) {
3052 if (lhs->size == rhs->size) {
3053 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
3054 return lhs->stageFlags < rhs->stageFlags;
3055 }
3056 // If the offsets are the same then sorting by the end of range is useful for validation
3057 return lhs->size < rhs->size;
3058 }
3059 return lhs->offset < rhs->offset;
3060 }
3061};
3062
3063static PushConstantRangesDict push_constant_ranges_dict;
3064
3065PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
3066 if (!info->pPushConstantRanges) {
3067 // Hand back the empty entry (creating as needed)...
3068 return push_constant_ranges_dict.look_up(PushConstantRanges());
3069 }
3070
3071 // Sort the input ranges to ensure equivalent ranges map to the same id
3072 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
3073 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
3074 sorted.insert(info->pPushConstantRanges + i);
3075 }
3076
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07003077 PushConstantRanges ranges;
3078 ranges.reserve(sorted.size());
locke-lunargd556cc32019-09-17 01:21:23 -06003079 for (const auto range : sorted) {
3080 ranges.emplace_back(*range);
3081 }
3082 return push_constant_ranges_dict.look_up(std::move(ranges));
3083}
3084
3085// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
3086static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
3087
3088// Dictionary of canonical form of the "compatible for set" records
3089static PipelineLayoutCompatDict pipeline_layout_compat_dict;
3090
3091static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
3092 const PipelineLayoutSetLayoutsId set_layouts_id) {
3093 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
3094}
3095
3096void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
3097 const VkAllocationCallbacks *pAllocator,
3098 VkPipelineLayout *pPipelineLayout, VkResult result) {
3099 if (VK_SUCCESS != result) return;
3100
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003101 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003102 pipeline_layout_state->layout = *pPipelineLayout;
3103 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
3104 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
3105 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05003106 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003107 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
3108 }
3109
3110 // Get canonical form IDs for the "compatible for set" contents
3111 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
3112 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
3113 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
3114
3115 // Create table of "compatible for set N" cannonical forms for trivial accept validation
3116 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
3117 pipeline_layout_state->compat_for_set.emplace_back(
3118 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
3119 }
3120 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
3121}
3122
3123void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
3124 const VkAllocationCallbacks *pAllocator,
3125 VkDescriptorPool *pDescriptorPool, VkResult result) {
3126 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003127 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003128}
3129
3130void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
3131 VkDescriptorPoolResetFlags flags, VkResult result) {
3132 if (VK_SUCCESS != result) return;
3133 DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
3134 // TODO: validate flags
3135 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
3136 for (auto ds : pPool->sets) {
3137 FreeDescriptorSet(ds);
3138 }
3139 pPool->sets.clear();
3140 // Reset available count for each type and available sets for this pool
3141 for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
3142 pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
3143 }
3144 pPool->availableSets = pPool->maxSets;
3145}
3146
3147bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
3148 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003149 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003150 // Always update common data
3151 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
3152 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
3153 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
3154
3155 return false;
3156}
3157
3158// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
3159void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
3160 VkDescriptorSet *pDescriptorSets, VkResult result,
3161 void *ads_state_data) {
3162 if (VK_SUCCESS != result) return;
3163 // All the updates are contained in a single cvdescriptorset function
3164 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
3165 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
3166 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
3167}
3168
3169void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
3170 const VkDescriptorSet *pDescriptorSets) {
3171 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
3172 // Update available descriptor sets in pool
3173 pool_state->availableSets += count;
3174
3175 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
3176 for (uint32_t i = 0; i < count; ++i) {
3177 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
3178 auto descriptor_set = setMap[pDescriptorSets[i]].get();
3179 uint32_t type_index = 0, descriptor_count = 0;
3180 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
3181 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
3182 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
3183 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
3184 }
3185 FreeDescriptorSet(descriptor_set);
3186 pool_state->sets.erase(descriptor_set);
3187 }
3188 }
3189}
3190
3191void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
3192 const VkWriteDescriptorSet *pDescriptorWrites,
3193 uint32_t descriptorCopyCount,
3194 const VkCopyDescriptorSet *pDescriptorCopies) {
3195 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
3196 pDescriptorCopies);
3197}
3198
3199void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
3200 VkCommandBuffer *pCommandBuffer, VkResult result) {
3201 if (VK_SUCCESS != result) return;
Jeff Bolz6835fda2019-10-06 00:15:34 -05003202 auto pPool = GetCommandPoolShared(pCreateInfo->commandPool);
locke-lunargd556cc32019-09-17 01:21:23 -06003203 if (pPool) {
3204 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
3205 // Add command buffer to its commandPool map
3206 pPool->commandBuffers.insert(pCommandBuffer[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003207 auto pCB = std::make_shared<CMD_BUFFER_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003208 pCB->createInfo = *pCreateInfo;
Jeff Bolz6835fda2019-10-06 00:15:34 -05003209 pCB->command_pool = pPool;
sfricke-samsung0c45edc2020-07-01 22:19:53 -07003210 pCB->unprotected = pPool->unprotected;
locke-lunargd556cc32019-09-17 01:21:23 -06003211 // Add command buffer to map
3212 commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
3213 ResetCommandBufferState(pCommandBuffer[i]);
3214 }
3215 }
3216}
3217
3218// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
3219void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003220 AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003221 cb_state);
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -07003222 // If imageless fb, skip fb binding
3223 if (fb_state->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003224 const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
3225 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003226 auto view_state = GetAttachmentImageViewState(cb_state, fb_state, attachment);
locke-lunargd556cc32019-09-17 01:21:23 -06003227 if (view_state) {
3228 AddCommandBufferBindingImageView(cb_state, view_state);
3229 }
3230 }
3231}
3232
3233void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
3234 const VkCommandBufferBeginInfo *pBeginInfo) {
3235 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3236 if (!cb_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003237 if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
3238 // Secondary Command Buffer
3239 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
3240 if (pInfo) {
3241 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
3242 assert(pInfo->renderPass);
3243 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
3244 if (framebuffer) {
3245 // Connect this framebuffer and its children to this cmdBuffer
3246 AddFramebufferBinding(cb_state, framebuffer);
3247 }
3248 }
3249 }
3250 }
3251 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
3252 ResetCommandBufferState(commandBuffer);
3253 }
3254 // Set updated state here in case implicit reset occurs above
3255 cb_state->state = CB_RECORDING;
3256 cb_state->beginInfo = *pBeginInfo;
3257 if (cb_state->beginInfo.pInheritanceInfo) {
3258 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
3259 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
3260 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
3261 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
3262 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
locke-lunargaecf2152020-05-12 17:15:41 -06003263 cb_state->activeRenderPass = GetShared<RENDER_PASS_STATE>(cb_state->beginInfo.pInheritanceInfo->renderPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003264 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
locke-lunargaecf2152020-05-12 17:15:41 -06003265 if (cb_state->beginInfo.pInheritanceInfo->framebuffer) {
3266 cb_state->activeFramebuffer = GetShared<FRAMEBUFFER_STATE>(cb_state->beginInfo.pInheritanceInfo->framebuffer);
3267 if (cb_state->activeFramebuffer) cb_state->framebuffers.insert(cb_state->activeFramebuffer);
3268 }
locke-lunargd556cc32019-09-17 01:21:23 -06003269 }
3270 }
3271
3272 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
3273 if (chained_device_group_struct) {
3274 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
3275 } else {
3276 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
3277 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003278
3279 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06003280}
3281
3282void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
3283 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3284 if (!cb_state) return;
3285 // Cached validation is specific to a specific recording of a specific command buffer.
3286 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
3287 descriptor_set->ClearCachedValidation(cb_state);
3288 }
3289 cb_state->validated_descriptor_sets.clear();
3290 if (VK_SUCCESS == result) {
3291 cb_state->state = CB_RECORDED;
3292 }
3293}
3294
3295void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
3296 VkResult result) {
3297 if (VK_SUCCESS == result) {
3298 ResetCommandBufferState(commandBuffer);
3299 }
3300}
3301
3302CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
3303 // initially assume everything is static state
3304 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
3305
3306 if (ds) {
3307 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
3308 switch (ds->pDynamicStates[i]) {
3309 case VK_DYNAMIC_STATE_LINE_WIDTH:
3310 flags &= ~CBSTATUS_LINE_WIDTH_SET;
3311 break;
3312 case VK_DYNAMIC_STATE_DEPTH_BIAS:
3313 flags &= ~CBSTATUS_DEPTH_BIAS_SET;
3314 break;
3315 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
3316 flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
3317 break;
3318 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
3319 flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
3320 break;
3321 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
3322 flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
3323 break;
3324 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
3325 flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
3326 break;
3327 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
3328 flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
3329 break;
3330 case VK_DYNAMIC_STATE_SCISSOR:
3331 flags &= ~CBSTATUS_SCISSOR_SET;
3332 break;
3333 case VK_DYNAMIC_STATE_VIEWPORT:
3334 flags &= ~CBSTATUS_VIEWPORT_SET;
3335 break;
3336 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
3337 flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
3338 break;
3339 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
3340 flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
3341 break;
3342 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
3343 flags &= ~CBSTATUS_LINE_STIPPLE_SET;
3344 break;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003345 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
3346 flags &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
3347 break;
Piers Daniell39842ee2020-07-10 16:42:33 -06003348 case VK_DYNAMIC_STATE_CULL_MODE_EXT:
3349 flags &= ~CBSTATUS_CULL_MODE_SET;
3350 break;
3351 case VK_DYNAMIC_STATE_FRONT_FACE_EXT:
3352 flags &= ~CBSTATUS_FRONT_FACE_SET;
3353 break;
3354 case VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT:
3355 flags &= ~CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
3356 break;
3357 case VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT:
3358 flags &= ~CBSTATUS_VIEWPORT_WITH_COUNT_SET;
3359 break;
3360 case VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT:
3361 flags &= ~CBSTATUS_SCISSOR_WITH_COUNT_SET;
3362 break;
3363 case VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT:
3364 flags &= ~CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
3365 break;
3366 case VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT:
3367 flags &= ~CBSTATUS_DEPTH_TEST_ENABLE_SET;
3368 break;
3369 case VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT:
3370 flags &= ~CBSTATUS_DEPTH_WRITE_ENABLE_SET;
3371 break;
3372 case VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT:
3373 flags &= ~CBSTATUS_DEPTH_COMPARE_OP_SET;
3374 break;
3375 case VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT:
3376 flags &= ~CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
3377 break;
3378 case VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT:
3379 flags &= ~CBSTATUS_STENCIL_TEST_ENABLE_SET;
3380 break;
3381 case VK_DYNAMIC_STATE_STENCIL_OP_EXT:
3382 flags &= ~CBSTATUS_STENCIL_OP_SET;
3383 break;
locke-lunargd556cc32019-09-17 01:21:23 -06003384 default:
3385 break;
3386 }
3387 }
3388 }
3389
3390 return flags;
3391}
3392
3393// Validation cache:
3394// CV is the bottommost implementor of this extension. Don't pass calls down.
3395// utility function to set collective state for pipeline
3396void SetPipelineState(PIPELINE_STATE *pPipe) {
3397 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
3398 if (pPipe->graphicsPipelineCI.pColorBlendState) {
3399 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
3400 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
3401 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3402 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3403 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3404 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3405 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3406 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3407 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3408 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
3409 pPipe->blendConstantsEnabled = true;
3410 }
3411 }
3412 }
3413 }
sfricke-samsung8f658d42020-05-03 20:12:24 -07003414 // Check if sample location is enabled
3415 if (pPipe->graphicsPipelineCI.pMultisampleState) {
3416 const VkPipelineSampleLocationsStateCreateInfoEXT *sample_location_state =
3417 lvl_find_in_chain<VkPipelineSampleLocationsStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pMultisampleState->pNext);
3418 if (sample_location_state != nullptr) {
3419 pPipe->sample_location_enabled = sample_location_state->sampleLocationsEnable;
3420 }
3421 }
locke-lunargd556cc32019-09-17 01:21:23 -06003422}
3423
3424void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
3425 VkPipeline pipeline) {
3426 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3427 assert(cb_state);
3428
3429 auto pipe_state = GetPipelineState(pipeline);
3430 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
3431 cb_state->status &= ~cb_state->static_status;
3432 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
3433 cb_state->status |= cb_state->static_status;
3434 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003435 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout->layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003436 cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
3437 SetPipelineState(pipe_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003438 AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003439}
3440
3441void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3442 uint32_t viewportCount, const VkViewport *pViewports) {
3443 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3444 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
3445 cb_state->status |= CBSTATUS_VIEWPORT_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003446 cb_state->static_status &= ~CBSTATUS_VIEWPORT_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003447}
3448
3449void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
3450 uint32_t exclusiveScissorCount,
3451 const VkRect2D *pExclusiveScissors) {
3452 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3453 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
3454 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
3455 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003456 cb_state->static_status &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003457}
3458
3459void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3460 VkImageLayout imageLayout) {
3461 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3462
3463 if (imageView != VK_NULL_HANDLE) {
3464 auto view_state = GetImageViewState(imageView);
3465 AddCommandBufferBindingImageView(cb_state, view_state);
3466 }
3467}
3468
3469void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3470 uint32_t viewportCount,
3471 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3472 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3473 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3474 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3475 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003476 cb_state->static_status &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003477}
3478
3479void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3480 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3481 const VkAllocationCallbacks *pAllocator,
3482 VkAccelerationStructureNV *pAccelerationStructure,
3483 VkResult result) {
3484 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003485 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003486
3487 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3488 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
3489 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3490 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
3491 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3492 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3493
3494 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
3495 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3496 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
3497 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3498 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3499 &as_state->build_scratch_memory_requirements);
3500
3501 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
3502 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3503 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
3504 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3505 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3506 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003507 as_state->allocator = pAllocator;
locke-lunargd556cc32019-09-17 01:21:23 -06003508 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3509}
3510
Jeff Bolz95176d02020-04-01 00:36:16 -05003511void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3512 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3513 const VkAllocationCallbacks *pAllocator,
3514 VkAccelerationStructureKHR *pAccelerationStructure,
3515 VkResult result) {
3516 if (VK_SUCCESS != result) return;
3517 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
3518
3519 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3520 VkAccelerationStructureMemoryRequirementsInfoKHR as_memory_requirements_info = {};
3521 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3522 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR;
3523 as_memory_requirements_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3524 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3525 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &as_memory_requirements_info, &as_state->memory_requirements);
3526
3527 VkAccelerationStructureMemoryRequirementsInfoKHR scratch_memory_req_info = {};
3528 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3529 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR;
3530 scratch_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3531 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3532 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &scratch_memory_req_info,
3533 &as_state->build_scratch_memory_requirements);
3534
3535 VkAccelerationStructureMemoryRequirementsInfoKHR update_memory_req_info = {};
3536 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3537 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR;
3538 update_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3539 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3540 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &update_memory_req_info,
3541 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003542 as_state->allocator = pAllocator;
Jeff Bolz95176d02020-04-01 00:36:16 -05003543 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3544}
3545
locke-lunargd556cc32019-09-17 01:21:23 -06003546void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
3547 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
3548 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
3549 if (as_state != nullptr) {
3550 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
3551 as_state->memory_requirements = *pMemoryRequirements;
3552 as_state->memory_requirements_checked = true;
3553 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
3554 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
3555 as_state->build_scratch_memory_requirements_checked = true;
3556 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
3557 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
3558 as_state->update_scratch_memory_requirements_checked = true;
3559 }
3560 }
3561}
3562
Jeff Bolz95176d02020-04-01 00:36:16 -05003563void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryCommon(
3564 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result,
3565 bool isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003566 if (VK_SUCCESS != result) return;
3567 for (uint32_t i = 0; i < bindInfoCount; i++) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003568 const VkBindAccelerationStructureMemoryInfoKHR &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06003569
3570 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
3571 if (as_state) {
3572 // Track bound memory range information
3573 auto mem_info = GetDevMemState(info.memory);
3574 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07003575 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06003576 }
3577 // Track objects tied to memory
3578 SetMemBinding(info.memory, as_state, info.memoryOffset,
Jeff Bolz95176d02020-04-01 00:36:16 -05003579 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR));
locke-lunargd556cc32019-09-17 01:21:23 -06003580
3581 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05003582 // XXX TODO: Query device address for KHR extension
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003583 if (enabled[gpu_validation] && isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003584 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
3585 }
3586 }
3587 }
3588}
3589
Jeff Bolz95176d02020-04-01 00:36:16 -05003590void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
3591 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
3592 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, true);
3593}
3594
3595void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryKHR(
3596 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result) {
3597 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, false);
3598}
3599
locke-lunargd556cc32019-09-17 01:21:23 -06003600void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
3601 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
3602 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
3603 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3604 if (cb_state == nullptr) {
3605 return;
3606 }
3607
3608 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3609 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3610 if (dst_as_state != nullptr) {
3611 dst_as_state->built = true;
3612 dst_as_state->build_info.initialize(pInfo);
3613 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3614 }
3615 if (src_as_state != nullptr) {
3616 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3617 }
3618 cb_state->hasBuildAccelerationStructureCmd = true;
3619}
3620
3621void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
3622 VkAccelerationStructureNV dst,
3623 VkAccelerationStructureNV src,
3624 VkCopyAccelerationStructureModeNV mode) {
3625 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3626 if (cb_state) {
3627 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3628 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3629 if (dst_as_state != nullptr && src_as_state != nullptr) {
3630 dst_as_state->built = true;
3631 dst_as_state->build_info = src_as_state->build_info;
3632 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3633 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3634 }
3635 }
3636}
3637
Jeff Bolz95176d02020-04-01 00:36:16 -05003638void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
3639 VkAccelerationStructureKHR accelerationStructure,
3640 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06003641 if (!accelerationStructure) return;
3642 auto *as_state = GetAccelerationStructureState(accelerationStructure);
3643 if (as_state) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003644 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06003645 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
3646 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07003647 RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -06003648 }
3649 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003650 as_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003651 accelerationStructureMap.erase(accelerationStructure);
3652 }
3653}
3654
Jeff Bolz95176d02020-04-01 00:36:16 -05003655void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
3656 VkAccelerationStructureNV accelerationStructure,
3657 const VkAllocationCallbacks *pAllocator) {
3658 PreCallRecordDestroyAccelerationStructureKHR(device, accelerationStructure, pAllocator);
3659}
3660
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003661void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3662 uint32_t viewportCount,
3663 const VkViewportWScalingNV *pViewportWScalings) {
3664 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3665 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003666 cb_state->static_status &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003667}
3668
locke-lunargd556cc32019-09-17 01:21:23 -06003669void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
3670 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3671 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003672 cb_state->static_status &= ~CBSTATUS_LINE_WIDTH_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003673}
3674
3675void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
3676 uint16_t lineStipplePattern) {
3677 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3678 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003679 cb_state->static_status &= ~CBSTATUS_LINE_STIPPLE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003680}
3681
3682void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
3683 float depthBiasClamp, float depthBiasSlopeFactor) {
3684 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3685 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003686 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003687}
3688
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003689void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
3690 const VkRect2D *pScissors) {
3691 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3692 cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
3693 cb_state->status |= CBSTATUS_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003694 cb_state->static_status &= ~CBSTATUS_SCISSOR_SET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003695}
3696
locke-lunargd556cc32019-09-17 01:21:23 -06003697void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
3698 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3699 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003700 cb_state->static_status &= ~CBSTATUS_BLEND_CONSTANTS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003701}
3702
3703void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
3704 float maxDepthBounds) {
3705 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3706 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003707 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003708}
3709
3710void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3711 uint32_t compareMask) {
3712 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3713 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003714 cb_state->static_status &= ~CBSTATUS_STENCIL_READ_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003715}
3716
3717void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3718 uint32_t writeMask) {
3719 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3720 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003721 cb_state->static_status &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003722}
3723
3724void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3725 uint32_t reference) {
3726 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3727 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003728 cb_state->static_status &= ~CBSTATUS_STENCIL_REFERENCE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003729}
3730
3731// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
3732// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
3733// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
3734void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
3735 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
3736 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
3737 cvdescriptorset::DescriptorSet *push_descriptor_set,
3738 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
3739 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
3740 // Defensive
3741 assert(pipeline_layout);
3742 if (!pipeline_layout) return;
3743
3744 uint32_t required_size = first_set + set_count;
3745 const uint32_t last_binding_index = required_size - 1;
3746 assert(last_binding_index < pipeline_layout->compat_for_set.size());
3747
3748 // Some useful shorthand
3749 auto &last_bound = cb_state->lastBound[pipeline_bind_point];
3750 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
3751 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
3752
3753 // We need this three times in this function, but nowhere else
3754 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
3755 if (ds && ds->IsPushDescriptor()) {
3756 assert(ds == last_bound.push_descriptor_set.get());
3757 last_bound.push_descriptor_set = nullptr;
3758 return true;
3759 }
3760 return false;
3761 };
3762
3763 // Clean up the "disturbed" before and after the range to be set
3764 if (required_size < current_size) {
3765 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
3766 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
3767 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
3768 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
3769 }
3770 } else {
3771 // We're not disturbing past last, so leave the upper binding data alone.
3772 required_size = current_size;
3773 }
3774 }
3775
3776 // We resize if we need more set entries or if those past "last" are disturbed
3777 if (required_size != current_size) {
3778 last_bound.per_set.resize(required_size);
3779 }
3780
3781 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
3782 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
3783 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
3784 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3785 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
3786 last_bound.per_set[set_idx].dynamicOffsets.clear();
3787 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
3788 }
3789 }
3790
3791 // Now update the bound sets with the input sets
3792 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
3793 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
3794 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
3795 cvdescriptorset::DescriptorSet *descriptor_set =
3796 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
3797
3798 // Record binding (or push)
3799 if (descriptor_set != last_bound.push_descriptor_set.get()) {
3800 // Only cleanup the push descriptors if they aren't the currently used set.
3801 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3802 }
3803 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
3804 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
3805
3806 if (descriptor_set) {
3807 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
3808 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
3809 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
3810 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
3811 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
3812 input_dynamic_offsets = end_offset;
3813 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
3814 } else {
3815 last_bound.per_set[set_idx].dynamicOffsets.clear();
3816 }
3817 if (!descriptor_set->IsPushDescriptor()) {
3818 // Can't cache validation of push_descriptors
3819 cb_state->validated_descriptor_sets.insert(descriptor_set);
3820 }
3821 }
3822 }
3823}
3824
3825// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
3826void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
3827 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3828 uint32_t firstSet, uint32_t setCount,
3829 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
3830 const uint32_t *pDynamicOffsets) {
3831 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3832 auto pipeline_layout = GetPipelineLayout(layout);
3833
3834 // Resize binding arrays
3835 uint32_t last_set_index = firstSet + setCount - 1;
3836 if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
3837 cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
3838 }
3839
3840 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
3841 dynamicOffsetCount, pDynamicOffsets);
3842 cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
3843 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3844}
3845
3846void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
3847 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
3848 const VkWriteDescriptorSet *pDescriptorWrites) {
3849 const auto &pipeline_layout = GetPipelineLayout(layout);
3850 // Short circuit invalid updates
3851 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
3852 !pipeline_layout->set_layouts[set]->IsPushDescriptor())
3853 return;
3854
3855 // We need a descriptor set to update the bindings with, compatible with the passed layout
3856 const auto dsl = pipeline_layout->set_layouts[set];
3857 auto &last_bound = cb_state->lastBound[pipelineBindPoint];
3858 auto &push_descriptor_set = last_bound.push_descriptor_set;
3859 // If we are disturbing the current push_desriptor_set clear it
3860 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
John Zulaufd2c3dae2019-12-12 11:02:17 -07003861 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003862 }
3863
3864 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3865 nullptr);
3866 last_bound.pipeline_layout = layout;
3867
3868 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003869 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003870}
3871
3872void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3873 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3874 uint32_t set, uint32_t descriptorWriteCount,
3875 const VkWriteDescriptorSet *pDescriptorWrites) {
3876 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3877 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3878}
3879
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003880void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3881 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3882 const void *pValues) {
3883 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3884 if (cb_state != nullptr) {
3885 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3886
3887 auto &push_constant_data = cb_state->push_constant_data;
3888 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3889 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
3890 }
3891}
3892
locke-lunargd556cc32019-09-17 01:21:23 -06003893void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3894 VkIndexType indexType) {
3895 auto buffer_state = GetBufferState(buffer);
3896 auto cb_state = GetCBState(commandBuffer);
3897
3898 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
Piers Daniell39842ee2020-07-10 16:42:33 -06003899 cb_state->static_status &= ~CBSTATUS_INDEX_BUFFER_BOUND;
locke-lunargd556cc32019-09-17 01:21:23 -06003900 cb_state->index_buffer_binding.buffer = buffer;
3901 cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
3902 cb_state->index_buffer_binding.offset = offset;
3903 cb_state->index_buffer_binding.index_type = indexType;
3904 // Add binding for this index buffer to this commandbuffer
3905 AddCommandBufferBindingBuffer(cb_state, buffer_state);
3906}
3907
3908void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3909 uint32_t bindingCount, const VkBuffer *pBuffers,
3910 const VkDeviceSize *pOffsets) {
3911 auto cb_state = GetCBState(commandBuffer);
3912
3913 uint32_t end = firstBinding + bindingCount;
3914 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3915 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3916 }
3917
3918 for (uint32_t i = 0; i < bindingCount; ++i) {
3919 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
3920 vertex_buffer_binding.buffer = pBuffers[i];
3921 vertex_buffer_binding.offset = pOffsets[i];
Piers Daniell39842ee2020-07-10 16:42:33 -06003922 vertex_buffer_binding.size = VK_WHOLE_SIZE;
3923 vertex_buffer_binding.stride = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06003924 // Add binding for this vertex buffer to this commandbuffer
Jeff Bolz165818a2020-05-08 11:19:03 -05003925 if (pBuffers[i]) {
3926 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
3927 }
locke-lunargd556cc32019-09-17 01:21:23 -06003928 }
3929}
3930
3931void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3932 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
3933 auto cb_state = GetCBState(commandBuffer);
3934 auto dst_buffer_state = GetBufferState(dstBuffer);
3935
3936 // Update bindings between buffer and cmd buffer
3937 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
3938}
3939
Jeff Bolz310775c2019-10-09 00:46:33 -05003940bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags stageMask,
3941 EventToStageMap *localEventToStageMap) {
3942 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003943 return false;
3944}
3945
3946void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3947 VkPipelineStageFlags stageMask) {
3948 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3949 auto event_state = GetEventState(event);
3950 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003951 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003952 }
3953 cb_state->events.push_back(event);
3954 if (!cb_state->waitedEvents.count(event)) {
3955 cb_state->writeEventsBeforeWait.push_back(event);
3956 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003957 cb_state->eventUpdates.emplace_back(
3958 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3959 return SetEventStageMask(event, stageMask, localEventToStageMap);
3960 });
locke-lunargd556cc32019-09-17 01:21:23 -06003961}
3962
3963void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3964 VkPipelineStageFlags stageMask) {
3965 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3966 auto event_state = GetEventState(event);
3967 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003968 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003969 }
3970 cb_state->events.push_back(event);
3971 if (!cb_state->waitedEvents.count(event)) {
3972 cb_state->writeEventsBeforeWait.push_back(event);
3973 }
3974
3975 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003976 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
3977 return SetEventStageMask(event, VkPipelineStageFlags(0), localEventToStageMap);
3978 });
locke-lunargd556cc32019-09-17 01:21:23 -06003979}
3980
3981void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3982 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3983 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3984 uint32_t bufferMemoryBarrierCount,
3985 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3986 uint32_t imageMemoryBarrierCount,
3987 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3988 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3989 for (uint32_t i = 0; i < eventCount; ++i) {
3990 auto event_state = GetEventState(pEvents[i]);
3991 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003992 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
3993 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003994 }
3995 cb_state->waitedEvents.insert(pEvents[i]);
3996 cb_state->events.push_back(pEvents[i]);
3997 }
3998}
3999
Jeff Bolz310775c2019-10-09 00:46:33 -05004000bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
4001 (*localQueryToStateMap)[object] = value;
4002 return false;
4003}
4004
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004005bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
4006 QueryState value, QueryMap *localQueryToStateMap) {
Jeff Bolz310775c2019-10-09 00:46:33 -05004007 for (uint32_t i = 0; i < queryCount; i++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004008 QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
Jeff Bolz310775c2019-10-09 00:46:33 -05004009 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06004010 }
4011 return false;
4012}
4013
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004014QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex,
4015 uint32_t perfPass) const {
4016 QueryObject query = QueryObject(QueryObject(queryPool, queryIndex), perfPass);
locke-lunargd556cc32019-09-17 01:21:23 -06004017
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004018 auto iter = localQueryToStateMap->find(query);
4019 if (iter != localQueryToStateMap->end()) return iter->second;
Jeff Bolz310775c2019-10-09 00:46:33 -05004020
Jeff Bolz310775c2019-10-09 00:46:33 -05004021 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06004022}
4023
4024void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004025 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004026 cb_state->activeQueries.insert(query_obj);
4027 cb_state->startedQueries.insert(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004028 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
4029 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4030 QueryMap *localQueryToStateMap) {
4031 SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
4032 return false;
4033 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004034 auto pool_state = GetQueryPoolState(query_obj.pool);
4035 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
4036 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004037}
4038
4039void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
4040 VkFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004041 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004042 QueryObject query = {queryPool, slot};
4043 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4044 RecordCmdBeginQuery(cb_state, query);
4045}
4046
4047void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004048 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004049 cb_state->activeQueries.erase(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004050 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
4051 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4052 QueryMap *localQueryToStateMap) {
4053 return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
4054 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004055 auto pool_state = GetQueryPoolState(query_obj.pool);
4056 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
4057 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004058}
4059
4060void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004061 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004062 QueryObject query_obj = {queryPool, slot};
4063 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4064 RecordCmdEndQuery(cb_state, query_obj);
4065}
4066
4067void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4068 uint32_t firstQuery, uint32_t queryCount) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004069 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004070 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4071
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02004072 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
4073 QueryObject query = {queryPool, slot};
4074 cb_state->resetQueries.insert(query);
4075 }
4076
Jeff Bolz310775c2019-10-09 00:46:33 -05004077 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004078 bool do_validate, VkQueryPool &firstPerfQueryPool,
4079 uint32_t perfQueryPass,
4080 QueryMap *localQueryToStateMap) {
4081 return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06004082 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004083 auto pool_state = GetQueryPoolState(queryPool);
4084 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004085 cb_state);
4086}
4087
4088void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4089 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
4090 VkDeviceSize dstOffset, VkDeviceSize stride,
4091 VkQueryResultFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004092 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004093 auto cb_state = GetCBState(commandBuffer);
4094 auto dst_buff_state = GetBufferState(dstBuffer);
4095 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05004096 auto pool_state = GetQueryPoolState(queryPool);
4097 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004098 cb_state);
4099}
4100
4101void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
4102 VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004103 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004104 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeff Bolzadbfa852019-10-04 13:53:30 -05004105 auto pool_state = GetQueryPoolState(queryPool);
4106 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004107 cb_state);
4108 QueryObject query = {queryPool, slot};
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004109 cb_state->queryUpdates.emplace_back([query](const ValidationStateTracker *device_data, bool do_validate,
4110 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4111 QueryMap *localQueryToStateMap) {
4112 return SetQueryState(QueryObject(query, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
4113 });
locke-lunargd556cc32019-09-17 01:21:23 -06004114}
4115
4116void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
4117 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
4118 VkResult result) {
4119 if (VK_SUCCESS != result) return;
4120 // Shadow create info and store in map
Jeff Bolz6ae39612019-10-11 20:57:36 -05004121 auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
locke-lunargd556cc32019-09-17 01:21:23 -06004122
4123 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
4124 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
4125 VkImageView view = pCreateInfo->pAttachments[i];
4126 auto view_state = GetImageViewState(view);
4127 if (!view_state) {
4128 continue;
4129 }
4130 }
4131 }
4132 frameBufferMap[*pFramebuffer] = std::move(fb_state);
4133}
4134
4135void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4136 RENDER_PASS_STATE *render_pass) {
4137 auto &subpass_to_node = render_pass->subpassToNode;
4138 subpass_to_node.resize(pCreateInfo->subpassCount);
4139 auto &self_dependencies = render_pass->self_dependencies;
4140 self_dependencies.resize(pCreateInfo->subpassCount);
John Zulauf4aff5d92020-02-21 08:29:35 -07004141 auto &subpass_dependencies = render_pass->subpass_dependencies;
4142 subpass_dependencies.resize(pCreateInfo->subpassCount);
locke-lunargd556cc32019-09-17 01:21:23 -06004143
4144 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
4145 subpass_to_node[i].pass = i;
4146 self_dependencies[i].clear();
John Zulauf4aff5d92020-02-21 08:29:35 -07004147 subpass_dependencies[i].pass = i;
locke-lunargd556cc32019-09-17 01:21:23 -06004148 }
4149 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004150 const auto &dependency = pCreateInfo->pDependencies[i];
John Zulauf4aff5d92020-02-21 08:29:35 -07004151 const auto srcSubpass = dependency.srcSubpass;
4152 const auto dstSubpass = dependency.dstSubpass;
locke-lunargd556cc32019-09-17 01:21:23 -06004153 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
4154 if (dependency.srcSubpass == dependency.dstSubpass) {
4155 self_dependencies[dependency.srcSubpass].push_back(i);
4156 } else {
4157 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
4158 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
4159 }
4160 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004161 if (srcSubpass == VK_SUBPASS_EXTERNAL) {
4162 assert(dstSubpass != VK_SUBPASS_EXTERNAL); // this is invalid per VUID-VkSubpassDependency-srcSubpass-00865
John Zulaufbaea94f2020-09-15 17:55:16 -06004163 subpass_dependencies[dstSubpass].barrier_from_external.emplace_back(&dependency);
John Zulauf4aff5d92020-02-21 08:29:35 -07004164 } else if (dstSubpass == VK_SUBPASS_EXTERNAL) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004165 subpass_dependencies[srcSubpass].barrier_to_external.emplace_back(&dependency);
John Zulauf4aff5d92020-02-21 08:29:35 -07004166 } else if (dependency.srcSubpass != dependency.dstSubpass) {
4167 // ignore self dependencies in prev and next
John Zulaufbaea94f2020-09-15 17:55:16 -06004168 subpass_dependencies[srcSubpass].next[&subpass_dependencies[dstSubpass]].emplace_back(&dependency);
4169 subpass_dependencies[dstSubpass].prev[&subpass_dependencies[srcSubpass]].emplace_back(&dependency);
John Zulauf4aff5d92020-02-21 08:29:35 -07004170 }
4171 }
4172
4173 //
4174 // Determine "asynchrononous" subpassess
4175 // syncronization is only interested in asyncronous stages *earlier* that the current one... so we'll only look towards those.
4176 // NOTE: This is O(N^3), which we could shrink to O(N^2logN) using sets instead of arrays, but given that N is likely to be
4177 // small and the K for |= from the prev is must less than for set, we'll accept the brute force.
4178 std::vector<std::vector<bool>> pass_depends(pCreateInfo->subpassCount);
4179 for (uint32_t i = 1; i < pCreateInfo->subpassCount; ++i) {
4180 auto &depends = pass_depends[i];
4181 depends.resize(i);
4182 auto &subpass_dep = subpass_dependencies[i];
4183 for (const auto &prev : subpass_dep.prev) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004184 const auto prev_pass = prev.first->pass;
John Zulauf4aff5d92020-02-21 08:29:35 -07004185 const auto &prev_depends = pass_depends[prev_pass];
4186 for (uint32_t j = 0; j < prev_pass; j++) {
4187 depends[j] = depends[j] | prev_depends[j];
4188 }
4189 depends[prev_pass] = true;
4190 }
4191 for (uint32_t pass = 0; pass < subpass_dep.pass; pass++) {
4192 if (!depends[pass]) {
4193 subpass_dep.async.push_back(pass);
4194 }
4195 }
locke-lunargd556cc32019-09-17 01:21:23 -06004196 }
4197}
4198
John Zulauf4aff5d92020-02-21 08:29:35 -07004199static VkSubpassDependency2 ImplicitDependencyFromExternal(uint32_t subpass) {
4200 VkSubpassDependency2 from_external = {VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
4201 nullptr,
4202 VK_SUBPASS_EXTERNAL,
4203 subpass,
4204 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
4205 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
4206 0,
4207 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
4208 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
4209 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
4210 0,
4211 0};
4212 return from_external;
4213}
4214
4215static VkSubpassDependency2 ImplicitDependencyToExternal(uint32_t subpass) {
4216 VkSubpassDependency2 to_external = {VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
4217 nullptr,
4218 subpass,
4219 VK_SUBPASS_EXTERNAL,
4220 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
4221 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
4222 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
4223 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
4224 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
4225 0,
4226 0,
4227 0};
4228 return to_external;
4229}
4230
locke-lunargd556cc32019-09-17 01:21:23 -06004231void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
4232 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
4233 VkRenderPass *pRenderPass) {
4234 render_pass->renderPass = *pRenderPass;
4235 auto create_info = render_pass->createInfo.ptr();
4236
4237 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
4238
John Zulauf8863c332020-03-20 10:34:33 -06004239 struct AttachmentTracker { // This is really only of local interest, but a bit big for a lambda
4240 RENDER_PASS_STATE *const rp;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004241 std::vector<uint32_t> &first;
John Zulauf1507ee42020-05-18 11:33:09 -06004242 std::vector<bool> &first_is_transition;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004243 std::vector<uint32_t> &last;
John Zulauf8863c332020-03-20 10:34:33 -06004244 std::vector<std::vector<RENDER_PASS_STATE::AttachmentTransition>> &subpass_transitions;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004245 std::unordered_map<uint32_t, bool> &first_read;
4246 const uint32_t attachment_count;
John Zulauf8863c332020-03-20 10:34:33 -06004247 std::vector<VkImageLayout> attachment_layout;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004248 std::vector<std::vector<VkImageLayout>> subpass_attachment_layout;
John Zulauf8863c332020-03-20 10:34:33 -06004249 AttachmentTracker(std::shared_ptr<RENDER_PASS_STATE> &render_pass)
4250 : rp(render_pass.get()),
4251 first(rp->attachment_first_subpass),
John Zulauf1507ee42020-05-18 11:33:09 -06004252 first_is_transition(rp->attachment_first_is_transition),
John Zulauf8863c332020-03-20 10:34:33 -06004253 last(rp->attachment_last_subpass),
4254 subpass_transitions(rp->subpass_transitions),
4255 first_read(rp->attachment_first_read),
4256 attachment_count(rp->createInfo.attachmentCount),
John Zulauf2bc1fde2020-04-24 15:09:51 -06004257 attachment_layout(),
4258 subpass_attachment_layout() {
John Zulaufbb9f07f2020-03-19 16:53:06 -06004259 first.resize(attachment_count, VK_SUBPASS_EXTERNAL);
John Zulauf1507ee42020-05-18 11:33:09 -06004260 first_is_transition.resize(attachment_count, false);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004261 last.resize(attachment_count, VK_SUBPASS_EXTERNAL);
John Zulauf8863c332020-03-20 10:34:33 -06004262 subpass_transitions.resize(rp->createInfo.subpassCount + 1); // Add an extra for EndRenderPass
4263 attachment_layout.reserve(attachment_count);
John Zulauf2bc1fde2020-04-24 15:09:51 -06004264 subpass_attachment_layout.resize(rp->createInfo.subpassCount);
4265 for (auto &subpass_layouts : subpass_attachment_layout) {
4266 subpass_layouts.resize(attachment_count, kInvalidLayout);
4267 }
4268
John Zulauf8863c332020-03-20 10:34:33 -06004269 for (uint32_t j = 0; j < attachment_count; j++) {
4270 attachment_layout.push_back(rp->createInfo.pAttachments[j].initialLayout);
4271 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004272 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004273
John Zulaufbb9f07f2020-03-19 16:53:06 -06004274 void Update(uint32_t subpass, const VkAttachmentReference2 *attach_ref, uint32_t count, bool is_read) {
4275 if (nullptr == attach_ref) return;
4276 for (uint32_t j = 0; j < count; ++j) {
4277 const auto attachment = attach_ref[j].attachment;
4278 if (attachment != VK_ATTACHMENT_UNUSED) {
John Zulauf8863c332020-03-20 10:34:33 -06004279 const auto layout = attach_ref[j].layout;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004280 // Take advantage of the fact that insert won't overwrite, so we'll only write the first time.
4281 first_read.insert(std::make_pair(attachment, is_read));
John Zulauf2bc1fde2020-04-24 15:09:51 -06004282 if (first[attachment] == VK_SUBPASS_EXTERNAL) {
4283 first[attachment] = subpass;
4284 const auto initial_layout = rp->createInfo.pAttachments[attachment].initialLayout;
John Zulauf1507ee42020-05-18 11:33:09 -06004285 if (initial_layout != layout) {
4286 subpass_transitions[subpass].emplace_back(VK_SUBPASS_EXTERNAL, attachment, initial_layout, layout);
4287 first_is_transition[attachment] = true;
4288 }
John Zulauf2bc1fde2020-04-24 15:09:51 -06004289 }
John Zulaufbb9f07f2020-03-19 16:53:06 -06004290 last[attachment] = subpass;
John Zulauf8863c332020-03-20 10:34:33 -06004291
John Zulauf2bc1fde2020-04-24 15:09:51 -06004292 for (const auto &prev : rp->subpass_dependencies[subpass].prev) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004293 const auto prev_pass = prev.first->pass;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004294 const auto prev_layout = subpass_attachment_layout[prev_pass][attachment];
4295 if ((prev_layout != kInvalidLayout) && (prev_layout != layout)) {
4296 subpass_transitions[subpass].emplace_back(prev_pass, attachment, prev_layout, layout);
4297 }
John Zulauf8863c332020-03-20 10:34:33 -06004298 }
John Zulauf2bc1fde2020-04-24 15:09:51 -06004299 attachment_layout[attachment] = layout;
John Zulauf8863c332020-03-20 10:34:33 -06004300 }
4301 }
4302 }
4303 void FinalTransitions() {
4304 auto &final_transitions = subpass_transitions[rp->createInfo.subpassCount];
4305
4306 for (uint32_t attachment = 0; attachment < attachment_count; ++attachment) {
4307 const auto final_layout = rp->createInfo.pAttachments[attachment].finalLayout;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004308 // Add final transitions for attachments that were used and change layout.
4309 if ((last[attachment] != VK_SUBPASS_EXTERNAL) && final_layout != attachment_layout[attachment]) {
4310 final_transitions.emplace_back(last[attachment], attachment, attachment_layout[attachment], final_layout);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004311 }
locke-lunargd556cc32019-09-17 01:21:23 -06004312 }
4313 }
John Zulaufbb9f07f2020-03-19 16:53:06 -06004314 };
John Zulauf8863c332020-03-20 10:34:33 -06004315 AttachmentTracker attachment_tracker(render_pass);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004316
4317 for (uint32_t subpass_index = 0; subpass_index < create_info->subpassCount; ++subpass_index) {
4318 const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[subpass_index];
John Zulauf8863c332020-03-20 10:34:33 -06004319 attachment_tracker.Update(subpass_index, subpass.pColorAttachments, subpass.colorAttachmentCount, false);
4320 attachment_tracker.Update(subpass_index, subpass.pResolveAttachments, subpass.colorAttachmentCount, false);
4321 attachment_tracker.Update(subpass_index, subpass.pDepthStencilAttachment, 1, false);
4322 attachment_tracker.Update(subpass_index, subpass.pInputAttachments, subpass.inputAttachmentCount, true);
John Zulauf4aff5d92020-02-21 08:29:35 -07004323 }
John Zulauf8863c332020-03-20 10:34:33 -06004324 attachment_tracker.FinalTransitions();
John Zulauf4aff5d92020-02-21 08:29:35 -07004325
John Zulaufbb9f07f2020-03-19 16:53:06 -06004326 // Add implicit dependencies
John Zulauf8863c332020-03-20 10:34:33 -06004327 for (uint32_t attachment = 0; attachment < attachment_tracker.attachment_count; attachment++) {
4328 const auto first_use = attachment_tracker.first[attachment];
John Zulauf4aff5d92020-02-21 08:29:35 -07004329 if (first_use != VK_SUBPASS_EXTERNAL) {
4330 auto &subpass_dep = render_pass->subpass_dependencies[first_use];
John Zulaufbaea94f2020-09-15 17:55:16 -06004331 if (subpass_dep.barrier_from_external.size() == 0) {
4332 // Add implicit from barrier if they're aren't any
John Zulauf4aff5d92020-02-21 08:29:35 -07004333 subpass_dep.implicit_barrier_from_external.reset(
4334 new VkSubpassDependency2(ImplicitDependencyFromExternal(first_use)));
John Zulaufbaea94f2020-09-15 17:55:16 -06004335 subpass_dep.barrier_from_external.emplace_back(subpass_dep.implicit_barrier_from_external.get());
John Zulauf4aff5d92020-02-21 08:29:35 -07004336 }
4337 }
4338
John Zulauf8863c332020-03-20 10:34:33 -06004339 const auto last_use = attachment_tracker.last[attachment];
John Zulauf4aff5d92020-02-21 08:29:35 -07004340 if (last_use != VK_SUBPASS_EXTERNAL) {
4341 auto &subpass_dep = render_pass->subpass_dependencies[last_use];
John Zulaufbaea94f2020-09-15 17:55:16 -06004342 if (render_pass->subpass_dependencies[last_use].barrier_to_external.size() == 0) {
4343 // Add implicit to barrier if they're aren't any
John Zulauf4aff5d92020-02-21 08:29:35 -07004344 subpass_dep.implicit_barrier_to_external.reset(new VkSubpassDependency2(ImplicitDependencyToExternal(last_use)));
John Zulaufbaea94f2020-09-15 17:55:16 -06004345 subpass_dep.barrier_to_external.emplace_back(subpass_dep.implicit_barrier_to_external.get());
John Zulauf4aff5d92020-02-21 08:29:35 -07004346 }
locke-lunargd556cc32019-09-17 01:21:23 -06004347 }
4348 }
4349
4350 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
4351 renderPassMap[*pRenderPass] = std::move(render_pass);
4352}
4353
4354// Style note:
4355// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
4356// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
4357// construction or assignment.
4358void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
4359 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4360 VkResult result) {
4361 if (VK_SUCCESS != result) return;
4362 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
4363 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
4364}
4365
Tony-LunarG977448c2019-12-02 14:52:02 -07004366void ValidationStateTracker::RecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4367 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4368 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004369 if (VK_SUCCESS != result) return;
4370 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
4371 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
4372}
4373
Tony-LunarG977448c2019-12-02 14:52:02 -07004374void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4375 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4376 VkResult result) {
4377 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
4378}
4379
4380void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4381 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4382 VkResult result) {
4383 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
4384}
4385
locke-lunargd556cc32019-09-17 01:21:23 -06004386void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
4387 const VkRenderPassBeginInfo *pRenderPassBegin,
4388 const VkSubpassContents contents) {
4389 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunargaecf2152020-05-12 17:15:41 -06004390 auto render_pass_state = pRenderPassBegin ? GetShared<RENDER_PASS_STATE>(pRenderPassBegin->renderPass) : nullptr;
4391 auto framebuffer = pRenderPassBegin ? GetShared<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer) : nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004392
4393 if (render_pass_state) {
locke-lunargaecf2152020-05-12 17:15:41 -06004394 cb_state->activeFramebuffer = framebuffer;
locke-lunargd556cc32019-09-17 01:21:23 -06004395 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07004396 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06004397 cb_state->activeSubpass = 0;
4398 cb_state->activeSubpassContents = contents;
locke-lunargaecf2152020-05-12 17:15:41 -06004399 if (framebuffer) cb_state->framebuffers.insert(framebuffer);
locke-lunargd556cc32019-09-17 01:21:23 -06004400 // Connect this framebuffer and its children to this cmdBuffer
locke-lunargaecf2152020-05-12 17:15:41 -06004401 AddFramebufferBinding(cb_state, framebuffer.get());
locke-lunargd556cc32019-09-17 01:21:23 -06004402 // Connect this RP to cmdBuffer
locke-lunargaecf2152020-05-12 17:15:41 -06004403 AddCommandBufferBinding(
4404 render_pass_state->cb_bindings,
4405 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state.get()), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004406
4407 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
4408 if (chained_device_group_struct) {
4409 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
4410 } else {
4411 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
4412 }
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004413
4414 cb_state->imagelessFramebufferAttachments.clear();
4415 auto attachment_info_struct = lvl_find_in_chain<VkRenderPassAttachmentBeginInfo>(pRenderPassBegin->pNext);
4416 if (attachment_info_struct) {
4417 for (uint32_t i = 0; i < attachment_info_struct->attachmentCount; i++) {
4418 IMAGE_VIEW_STATE *img_view_state = GetImageViewState(attachment_info_struct->pAttachments[i]);
4419 cb_state->imagelessFramebufferAttachments.push_back(img_view_state);
4420 }
4421 }
locke-lunargd556cc32019-09-17 01:21:23 -06004422 }
4423}
4424
4425void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
4426 const VkRenderPassBeginInfo *pRenderPassBegin,
4427 VkSubpassContents contents) {
4428 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
4429}
4430
4431void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
4432 const VkRenderPassBeginInfo *pRenderPassBegin,
4433 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
4434 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4435}
4436
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06004437void ValidationStateTracker::PostCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4438 uint32_t counterBufferCount,
4439 const VkBuffer *pCounterBuffers,
4440 const VkDeviceSize *pCounterBufferOffsets) {
4441 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4442
4443 cb_state->transform_feedback_active = true;
4444}
4445
4446void ValidationStateTracker::PostCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4447 uint32_t counterBufferCount, const VkBuffer *pCounterBuffers,
4448 const VkDeviceSize *pCounterBufferOffsets) {
4449 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4450
4451 cb_state->transform_feedback_active = false;
4452}
4453
Tony-LunarG977448c2019-12-02 14:52:02 -07004454void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
4455 const VkRenderPassBeginInfo *pRenderPassBegin,
4456 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
4457 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4458}
4459
locke-lunargd556cc32019-09-17 01:21:23 -06004460void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4461 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4462 cb_state->activeSubpass++;
4463 cb_state->activeSubpassContents = contents;
4464}
4465
4466void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4467 RecordCmdNextSubpass(commandBuffer, contents);
4468}
4469
4470void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
4471 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
4472 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4473 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4474}
4475
Tony-LunarG977448c2019-12-02 14:52:02 -07004476void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
4477 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
4478 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4479 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4480}
4481
locke-lunargd556cc32019-09-17 01:21:23 -06004482void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
4483 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4484 cb_state->activeRenderPass = nullptr;
4485 cb_state->activeSubpass = 0;
4486 cb_state->activeFramebuffer = VK_NULL_HANDLE;
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004487 cb_state->imagelessFramebufferAttachments.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06004488}
4489
4490void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
4491 RecordCmdEndRenderPassState(commandBuffer);
4492}
4493
4494void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
4495 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4496 RecordCmdEndRenderPassState(commandBuffer);
4497}
4498
Tony-LunarG977448c2019-12-02 14:52:02 -07004499void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
4500 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4501 RecordCmdEndRenderPassState(commandBuffer);
4502}
locke-lunargd556cc32019-09-17 01:21:23 -06004503void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
4504 const VkCommandBuffer *pCommandBuffers) {
4505 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4506
4507 CMD_BUFFER_STATE *sub_cb_state = NULL;
4508 for (uint32_t i = 0; i < commandBuffersCount; i++) {
4509 sub_cb_state = GetCBState(pCommandBuffers[i]);
4510 assert(sub_cb_state);
4511 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
4512 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
4513 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
4514 // from the validation step to the recording step
4515 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
4516 }
4517 }
4518
4519 // Propagate inital layout and current layout state to the primary cmd buffer
4520 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
4521 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
4522 // for those other classes.
4523 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
4524 const auto image = sub_layout_map_entry.first;
4525 const auto *image_state = GetImageState(image);
4526 if (!image_state) continue; // Can't set layouts of a dead image
4527
4528 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
4529 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
4530 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
4531 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
4532 }
4533
4534 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
4535 cb_state->linkedCommandBuffers.insert(sub_cb_state);
4536 sub_cb_state->linkedCommandBuffers.insert(cb_state);
4537 for (auto &function : sub_cb_state->queryUpdates) {
4538 cb_state->queryUpdates.push_back(function);
4539 }
4540 for (auto &function : sub_cb_state->queue_submit_functions) {
4541 cb_state->queue_submit_functions.push_back(function);
4542 }
4543 }
4544}
4545
4546void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
4547 VkFlags flags, void **ppData, VkResult result) {
4548 if (VK_SUCCESS != result) return;
4549 RecordMappedMemory(mem, offset, size, ppData);
4550}
4551
4552void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
4553 auto mem_info = GetDevMemState(mem);
4554 if (mem_info) {
4555 mem_info->mapped_range = MemRange();
4556 mem_info->p_driver_data = nullptr;
4557 }
4558}
4559
4560void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
4561 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
4562 if (image_state) {
locke-lunargae26eac2020-04-16 15:29:05 -06004563 // An Android sepcial image cannot get VkSubresourceLayout until the image binds a memory.
4564 // See: VUID-vkGetImageSubresourceLayout-image-01895
4565 image_state->fragment_encoder =
4566 std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(new subresource_adapter::ImageRangeEncoder(*image_state));
locke-lunargd556cc32019-09-17 01:21:23 -06004567 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
4568 if (swapchain_info) {
4569 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
4570 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -06004571 swapchain->images[swapchain_info->imageIndex].bound_images.emplace(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -06004572 image_state->bind_swapchain = swapchain_info->swapchain;
4573 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
4574 }
4575 } else {
4576 // Track bound memory range information
4577 auto mem_info = GetDevMemState(bindInfo.memory);
4578 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07004579 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06004580 }
4581
4582 // Track objects tied to memory
4583 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
4584 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
4585 }
Tony-LunarG330cf4c2020-03-04 16:29:03 -07004586 if ((image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) || swapchain_info) {
locke-lunargd556cc32019-09-17 01:21:23 -06004587 AddAliasingImage(image_state);
4588 }
4589 }
4590}
4591
4592void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
4593 VkDeviceSize memoryOffset, VkResult result) {
4594 if (VK_SUCCESS != result) return;
4595 VkBindImageMemoryInfo bindInfo = {};
4596 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
4597 bindInfo.image = image;
4598 bindInfo.memory = mem;
4599 bindInfo.memoryOffset = memoryOffset;
4600 UpdateBindImageMemoryState(bindInfo);
4601}
4602
4603void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
4604 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
4605 if (VK_SUCCESS != result) return;
4606 for (uint32_t i = 0; i < bindInfoCount; i++) {
4607 UpdateBindImageMemoryState(pBindInfos[i]);
4608 }
4609}
4610
4611void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
4612 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
4613 if (VK_SUCCESS != result) return;
4614 for (uint32_t i = 0; i < bindInfoCount; i++) {
4615 UpdateBindImageMemoryState(pBindInfos[i]);
4616 }
4617}
4618
4619void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
4620 auto event_state = GetEventState(event);
4621 if (event_state) {
4622 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
4623 }
locke-lunargd556cc32019-09-17 01:21:23 -06004624}
4625
4626void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
4627 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
4628 VkResult result) {
4629 if (VK_SUCCESS != result) return;
4630 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
4631 pImportSemaphoreFdInfo->flags);
4632}
4633
4634void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
4635 VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
4636 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
4637 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4638 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
4639 semaphore_state->scope = kSyncScopeExternalPermanent;
4640 }
4641}
4642
4643#ifdef VK_USE_PLATFORM_WIN32_KHR
4644void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
4645 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
4646 if (VK_SUCCESS != result) return;
4647 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
4648 pImportSemaphoreWin32HandleInfo->flags);
4649}
4650
4651void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
4652 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4653 HANDLE *pHandle, VkResult result) {
4654 if (VK_SUCCESS != result) return;
4655 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
4656}
4657
4658void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
4659 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
4660 if (VK_SUCCESS != result) return;
4661 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
4662 pImportFenceWin32HandleInfo->flags);
4663}
4664
4665void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
4666 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4667 HANDLE *pHandle, VkResult result) {
4668 if (VK_SUCCESS != result) return;
4669 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
4670}
4671#endif
4672
4673void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
4674 VkResult result) {
4675 if (VK_SUCCESS != result) return;
4676 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
4677}
4678
4679void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
4680 VkFenceImportFlagsKHR flags) {
4681 FENCE_STATE *fence_node = GetFenceState(fence);
4682 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
4683 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
4684 fence_node->scope == kSyncScopeInternal) {
4685 fence_node->scope = kSyncScopeExternalTemporary;
4686 } else {
4687 fence_node->scope = kSyncScopeExternalPermanent;
4688 }
4689 }
4690}
4691
4692void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
4693 VkResult result) {
4694 if (VK_SUCCESS != result) return;
4695 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
4696}
4697
4698void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
4699 FENCE_STATE *fence_state = GetFenceState(fence);
4700 if (fence_state) {
4701 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4702 // Export with reference transference becomes external
4703 fence_state->scope = kSyncScopeExternalPermanent;
4704 } else if (fence_state->scope == kSyncScopeInternal) {
4705 // Export with copy transference has a side effect of resetting the fence
4706 fence_state->state = FENCE_UNSIGNALED;
4707 }
4708 }
4709}
4710
4711void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
4712 VkResult result) {
4713 if (VK_SUCCESS != result) return;
4714 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
4715}
4716
4717void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
4718 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
4719 if (VK_SUCCESS != result) return;
4720 eventMap[*pEvent].write_in_use = 0;
4721 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
4722}
4723
4724void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
4725 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
4726 SWAPCHAIN_NODE *old_swapchain_state) {
4727 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004728 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004729 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
4730 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
4731 swapchain_state->shared_presentable = true;
4732 }
4733 surface_state->swapchain = swapchain_state.get();
4734 swapchainMap[*pSwapchain] = std::move(swapchain_state);
4735 } else {
4736 surface_state->swapchain = nullptr;
4737 }
4738 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
4739 if (old_swapchain_state) {
4740 old_swapchain_state->retired = true;
4741 }
4742 return;
4743}
4744
4745void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
4746 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
4747 VkResult result) {
4748 auto surface_state = GetSurfaceState(pCreateInfo->surface);
4749 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
4750 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
4751}
4752
4753void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
4754 const VkAllocationCallbacks *pAllocator) {
4755 if (!swapchain) return;
4756 auto swapchain_data = GetSwapchainState(swapchain);
4757 if (swapchain_data) {
4758 for (const auto &swapchain_image : swapchain_data->images) {
locke-lunargb3584732019-10-28 20:18:36 -06004759 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image, kVulkanObjectTypeImage));
4760 imageMap.erase(swapchain_image.image);
4761 RemoveAliasingImages(swapchain_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06004762 }
4763
4764 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
4765 if (surface_state) {
4766 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
4767 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004768 swapchain_data->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004769 swapchainMap.erase(swapchain);
4770 }
4771}
4772
4773void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
4774 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
4775 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
4776 auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
4777 if (pSemaphore) {
4778 pSemaphore->signaler.first = VK_NULL_HANDLE;
4779 pSemaphore->signaled = false;
4780 }
4781 }
4782
4783 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
4784 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
4785 // confused itself just as much.
4786 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
4787 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
4788 // Mark the image as having been released to the WSI
4789 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
4790 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
locke-lunargb3584732019-10-28 20:18:36 -06004791 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004792 auto image_state = GetImageState(image);
4793 if (image_state) {
4794 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004795 if (image_state->shared_presentable) {
4796 image_state->layout_locked = true;
4797 }
locke-lunargd556cc32019-09-17 01:21:23 -06004798 }
4799 }
4800 }
4801 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
4802 // its semaphore waits) /never/ participate in any completion proof.
4803}
4804
4805void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
4806 const VkSwapchainCreateInfoKHR *pCreateInfos,
4807 const VkAllocationCallbacks *pAllocator,
4808 VkSwapchainKHR *pSwapchains, VkResult result) {
4809 if (pCreateInfos) {
4810 for (uint32_t i = 0; i < swapchainCount; i++) {
4811 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
4812 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
4813 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
4814 }
4815 }
4816}
4817
4818void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4819 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
4820 auto pFence = GetFenceState(fence);
4821 if (pFence && pFence->scope == kSyncScopeInternal) {
4822 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
4823 // import
4824 pFence->state = FENCE_INFLIGHT;
4825 pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
4826 }
4827
4828 auto pSemaphore = GetSemaphoreState(semaphore);
4829 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
4830 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
4831 // temporary import
4832 pSemaphore->signaled = true;
4833 pSemaphore->signaler.first = VK_NULL_HANDLE;
4834 }
4835
4836 // Mark the image as acquired.
4837 auto swapchain_data = GetSwapchainState(swapchain);
4838 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
locke-lunargb3584732019-10-28 20:18:36 -06004839 auto image = swapchain_data->images[*pImageIndex].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004840 auto image_state = GetImageState(image);
4841 if (image_state) {
4842 image_state->acquired = true;
4843 image_state->shared_presentable = swapchain_data->shared_presentable;
4844 }
4845 }
4846}
4847
4848void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4849 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
4850 VkResult result) {
4851 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4852 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
4853}
4854
4855void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
4856 uint32_t *pImageIndex, VkResult result) {
4857 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4858 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
4859 pAcquireInfo->fence, pImageIndex);
4860}
4861
4862void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
4863 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
4864 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
4865 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
4866 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
4867 phys_device_state.phys_device = pPhysicalDevices[i];
4868 // Init actual features for each physical device
4869 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
4870 }
4871 }
4872}
4873
4874// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
4875static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
4876 VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4877 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
4878
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06004879 if (pQueueFamilyProperties) { // Save queue family properties
locke-lunargd556cc32019-09-17 01:21:23 -06004880 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
4881 for (uint32_t i = 0; i < count; ++i) {
4882 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
4883 }
4884 }
4885}
4886
4887void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
4888 uint32_t *pQueueFamilyPropertyCount,
4889 VkQueueFamilyProperties *pQueueFamilyProperties) {
4890 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4891 assert(physical_device_state);
4892 VkQueueFamilyProperties2KHR *pqfp = nullptr;
4893 std::vector<VkQueueFamilyProperties2KHR> qfp;
4894 qfp.resize(*pQueueFamilyPropertyCount);
4895 if (pQueueFamilyProperties) {
4896 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
4897 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
4898 qfp[i].pNext = nullptr;
4899 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
4900 }
4901 pqfp = qfp.data();
4902 }
4903 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
4904}
4905
4906void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
4907 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4908 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4909 assert(physical_device_state);
4910 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4911 pQueueFamilyProperties);
4912}
4913
4914void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
4915 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4916 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4917 assert(physical_device_state);
4918 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4919 pQueueFamilyProperties);
4920}
4921void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
4922 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004923 if (!surface) return;
4924 auto surface_state = GetSurfaceState(surface);
4925 surface_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004926 surface_map.erase(surface);
4927}
4928
4929void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004930 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06004931}
4932
4933void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
4934 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
4935 const VkAllocationCallbacks *pAllocator,
4936 VkSurfaceKHR *pSurface, VkResult result) {
4937 if (VK_SUCCESS != result) return;
4938 RecordVulkanSurface(pSurface);
4939}
4940
4941#ifdef VK_USE_PLATFORM_ANDROID_KHR
4942void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
4943 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
4944 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4945 VkResult result) {
4946 if (VK_SUCCESS != result) return;
4947 RecordVulkanSurface(pSurface);
4948}
4949#endif // VK_USE_PLATFORM_ANDROID_KHR
4950
4951#ifdef VK_USE_PLATFORM_IOS_MVK
4952void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
4953 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4954 VkResult result) {
4955 if (VK_SUCCESS != result) return;
4956 RecordVulkanSurface(pSurface);
4957}
4958#endif // VK_USE_PLATFORM_IOS_MVK
4959
4960#ifdef VK_USE_PLATFORM_MACOS_MVK
4961void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
4962 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
4963 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4964 VkResult result) {
4965 if (VK_SUCCESS != result) return;
4966 RecordVulkanSurface(pSurface);
4967}
4968#endif // VK_USE_PLATFORM_MACOS_MVK
4969
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07004970#ifdef VK_USE_PLATFORM_METAL_EXT
4971void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
4972 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
4973 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4974 VkResult result) {
4975 if (VK_SUCCESS != result) return;
4976 RecordVulkanSurface(pSurface);
4977}
4978#endif // VK_USE_PLATFORM_METAL_EXT
4979
locke-lunargd556cc32019-09-17 01:21:23 -06004980#ifdef VK_USE_PLATFORM_WAYLAND_KHR
4981void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
4982 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
4983 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4984 VkResult result) {
4985 if (VK_SUCCESS != result) return;
4986 RecordVulkanSurface(pSurface);
4987}
4988#endif // VK_USE_PLATFORM_WAYLAND_KHR
4989
4990#ifdef VK_USE_PLATFORM_WIN32_KHR
4991void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
4992 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
4993 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4994 VkResult result) {
4995 if (VK_SUCCESS != result) return;
4996 RecordVulkanSurface(pSurface);
4997}
4998#endif // VK_USE_PLATFORM_WIN32_KHR
4999
5000#ifdef VK_USE_PLATFORM_XCB_KHR
5001void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
5002 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5003 VkResult result) {
5004 if (VK_SUCCESS != result) return;
5005 RecordVulkanSurface(pSurface);
5006}
5007#endif // VK_USE_PLATFORM_XCB_KHR
5008
5009#ifdef VK_USE_PLATFORM_XLIB_KHR
5010void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
5011 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5012 VkResult result) {
5013 if (VK_SUCCESS != result) return;
5014 RecordVulkanSurface(pSurface);
5015}
5016#endif // VK_USE_PLATFORM_XLIB_KHR
5017
Niklas Haas8b84af12020-04-19 22:20:11 +02005018void ValidationStateTracker::PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance,
5019 const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
5020 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5021 VkResult result) {
5022 if (VK_SUCCESS != result) return;
5023 RecordVulkanSurface(pSurface);
5024}
5025
Cort23cf2282019-09-20 18:58:18 +02005026void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005027 VkPhysicalDeviceFeatures *pFeatures) {
5028 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Yilong Li358152a2020-07-08 02:16:45 -07005029 // Reset the features2 safe struct before setting up the features field.
5030 physical_device_state->features2 = safe_VkPhysicalDeviceFeatures2();
Cortffba2642019-09-20 22:09:41 +02005031 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02005032}
5033
5034void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005035 VkPhysicalDeviceFeatures2 *pFeatures) {
5036 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02005037 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02005038}
5039
5040void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005041 VkPhysicalDeviceFeatures2 *pFeatures) {
5042 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02005043 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02005044}
5045
locke-lunargd556cc32019-09-17 01:21:23 -06005046void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
5047 VkSurfaceKHR surface,
5048 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
5049 VkResult result) {
5050 if (VK_SUCCESS != result) return;
5051 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005052 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005053
5054 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
5055 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005056}
5057
5058void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
5059 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
5060 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
5061 if (VK_SUCCESS != result) return;
5062 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005063 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005064
5065 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
5066 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005067}
5068
5069void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
5070 VkSurfaceKHR surface,
5071 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
5072 VkResult result) {
5073 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005074 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
5075 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
5076 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
5077 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
5078 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
5079 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
5080 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
5081 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
5082 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
5083 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005084
5085 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
5086 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005087}
5088
5089void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
5090 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
5091 VkBool32 *pSupported, VkResult result) {
5092 if (VK_SUCCESS != result) return;
5093 auto surface_state = GetSurfaceState(surface);
5094 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
5095}
5096
5097void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
5098 VkSurfaceKHR surface,
5099 uint32_t *pPresentModeCount,
5100 VkPresentModeKHR *pPresentModes,
5101 VkResult result) {
5102 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5103
5104 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
5105 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005106
5107 if (*pPresentModeCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005108 if (*pPresentModeCount > physical_device_state->present_modes.size())
5109 physical_device_state->present_modes.resize(*pPresentModeCount);
5110 }
5111 if (pPresentModes) {
locke-lunargd556cc32019-09-17 01:21:23 -06005112 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
5113 physical_device_state->present_modes[i] = pPresentModes[i];
5114 }
5115 }
5116}
5117
5118void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
5119 uint32_t *pSurfaceFormatCount,
5120 VkSurfaceFormatKHR *pSurfaceFormats,
5121 VkResult result) {
5122 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5123
5124 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005125
5126 if (*pSurfaceFormatCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005127 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
5128 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
5129 }
5130 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06005131 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
5132 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
5133 }
5134 }
5135}
5136
5137void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
5138 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
5139 uint32_t *pSurfaceFormatCount,
5140 VkSurfaceFormat2KHR *pSurfaceFormats,
5141 VkResult result) {
5142 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5143
5144 auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
5145 if (*pSurfaceFormatCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005146 if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
5147 physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
5148 }
5149 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06005150 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
5151 physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
5152 }
5153 }
5154}
5155
5156void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
5157 const VkDebugUtilsLabelEXT *pLabelInfo) {
5158 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
5159}
5160
5161void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
5162 EndCmdDebugUtilsLabel(report_data, commandBuffer);
5163}
5164
5165void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
5166 const VkDebugUtilsLabelEXT *pLabelInfo) {
5167 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
5168
5169 // Squirrel away an easily accessible copy.
5170 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5171 cb_state->debug_label = LoggingLabel(pLabelInfo);
5172}
5173
5174void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
5175 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
5176 if (NULL != pPhysicalDeviceGroupProperties) {
5177 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
5178 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
5179 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
5180 auto &phys_device_state = physical_device_map[cur_phys_dev];
5181 phys_device_state.phys_device = cur_phys_dev;
5182 // Init actual features for each physical device
5183 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
5184 }
5185 }
5186 }
5187}
5188
5189void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
5190 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
5191 VkResult result) {
5192 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5193 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
5194}
5195
5196void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
5197 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
5198 VkResult result) {
5199 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5200 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
5201}
5202
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005203void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
5204 uint32_t queueFamilyIndex,
5205 uint32_t *pCounterCount,
5206 VkPerformanceCounterKHR *pCounters) {
5207 if (NULL == pCounters) return;
5208
5209 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5210 assert(physical_device_state);
5211
5212 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queueFamilyCounters(new QUEUE_FAMILY_PERF_COUNTERS());
5213 queueFamilyCounters->counters.resize(*pCounterCount);
5214 for (uint32_t i = 0; i < *pCounterCount; i++) queueFamilyCounters->counters[i] = pCounters[i];
5215
5216 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queueFamilyCounters);
5217}
5218
5219void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
5220 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
5221 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
5222 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5223 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
5224}
5225
5226void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
5227 VkResult result) {
5228 if (result == VK_SUCCESS) performance_lock_acquired = true;
5229}
5230
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005231void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
5232 performance_lock_acquired = false;
5233 for (auto &cmd_buffer : commandBufferMap) {
5234 cmd_buffer.second->performance_lock_released = true;
5235 }
5236}
5237
locke-lunargd556cc32019-09-17 01:21:23 -06005238void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
5239 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5240 const VkAllocationCallbacks *pAllocator) {
5241 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005242 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5243 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005244 desc_template_map.erase(descriptorUpdateTemplate);
5245}
5246
5247void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
5248 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5249 const VkAllocationCallbacks *pAllocator) {
5250 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005251 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5252 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005253 desc_template_map.erase(descriptorUpdateTemplate);
5254}
5255
5256void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
5257 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
5258 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005259 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06005260 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
5261}
5262
5263void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
5264 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
5265 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
5266 if (VK_SUCCESS != result) return;
5267 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
5268}
5269
5270void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
5271 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
5272 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
5273 if (VK_SUCCESS != result) return;
5274 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
5275}
5276
5277void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
5278 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5279 const void *pData) {
5280 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
5281 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
5282 assert(0);
5283 } else {
5284 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
5285 // TODO: Record template push descriptor updates
5286 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
5287 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
5288 }
5289 }
5290}
5291
5292void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
5293 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
5294 const void *pData) {
5295 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
5296}
5297
5298void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
5299 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5300 const void *pData) {
5301 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
5302}
5303
5304void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
5305 VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
5306 const void *pData) {
5307 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5308
5309 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5310 if (template_state) {
5311 auto layout_data = GetPipelineLayout(layout);
5312 auto dsl = GetDslFromPipelineLayout(layout_data, set);
5313 const auto &template_ci = template_state->create_info;
Jeff Bolz6ae39612019-10-11 20:57:36 -05005314 if (dsl && !dsl->destroyed) {
locke-lunargd556cc32019-09-17 01:21:23 -06005315 // Decode the template into a set of write updates
5316 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
5317 dsl->GetDescriptorSetLayout());
5318 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
5319 static_cast<uint32_t>(decoded_template.desc_writes.size()),
5320 decoded_template.desc_writes.data());
5321 }
5322 }
5323}
5324
5325void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
5326 uint32_t *pPropertyCount, void *pProperties) {
5327 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5328 if (*pPropertyCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005329 physical_device_state->display_plane_property_count = *pPropertyCount;
5330 }
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005331 if (*pPropertyCount || pProperties) {
5332 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005333 }
5334}
5335
5336void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
5337 uint32_t *pPropertyCount,
5338 VkDisplayPlanePropertiesKHR *pProperties,
5339 VkResult result) {
5340 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5341 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5342}
5343
5344void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
5345 uint32_t *pPropertyCount,
5346 VkDisplayPlaneProperties2KHR *pProperties,
5347 VkResult result) {
5348 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5349 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5350}
5351
5352void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5353 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
5354 QueryObject query_obj = {queryPool, query, index};
5355 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5356 RecordCmdBeginQuery(cb_state, query_obj);
5357}
5358
5359void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5360 uint32_t query, uint32_t index) {
5361 QueryObject query_obj = {queryPool, query, index};
5362 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5363 RecordCmdEndQuery(cb_state, query_obj);
5364}
5365
5366void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
5367 VkSamplerYcbcrConversion ycbcr_conversion) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005368 auto ycbcr_state = std::make_shared<SAMPLER_YCBCR_CONVERSION_STATE>();
5369
locke-lunargd556cc32019-09-17 01:21:23 -06005370 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005371 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion, ycbcr_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06005372 }
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005373
5374 const VkFormat conversion_format = create_info->format;
5375
5376 if (conversion_format != VK_FORMAT_UNDEFINED) {
5377 // If format is VK_FORMAT_UNDEFINED, will be set by external AHB features
5378 ycbcr_state->format_features = GetPotentialFormatFeatures(conversion_format);
5379 }
5380
5381 ycbcr_state->chromaFilter = create_info->chromaFilter;
5382 ycbcr_state->format = conversion_format;
5383 samplerYcbcrConversionMap[ycbcr_conversion] = std::move(ycbcr_state);
locke-lunargd556cc32019-09-17 01:21:23 -06005384}
5385
5386void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
5387 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5388 const VkAllocationCallbacks *pAllocator,
5389 VkSamplerYcbcrConversion *pYcbcrConversion,
5390 VkResult result) {
5391 if (VK_SUCCESS != result) return;
5392 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5393}
5394
5395void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
5396 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5397 const VkAllocationCallbacks *pAllocator,
5398 VkSamplerYcbcrConversion *pYcbcrConversion,
5399 VkResult result) {
5400 if (VK_SUCCESS != result) return;
5401 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5402}
5403
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005404void ValidationStateTracker::RecordDestroySamplerYcbcrConversionState(VkSamplerYcbcrConversion ycbcr_conversion) {
5405 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
5406 RecordDestroySamplerYcbcrConversionANDROID(ycbcr_conversion);
5407 }
5408
5409 auto ycbcr_state = GetSamplerYcbcrConversionState(ycbcr_conversion);
5410 ycbcr_state->destroyed = true;
5411 samplerYcbcrConversionMap.erase(ycbcr_conversion);
5412}
5413
locke-lunargd556cc32019-09-17 01:21:23 -06005414void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
5415 const VkAllocationCallbacks *pAllocator) {
5416 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005417 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005418}
5419
5420void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
5421 VkSamplerYcbcrConversion ycbcrConversion,
5422 const VkAllocationCallbacks *pAllocator) {
5423 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005424 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005425}
5426
Tony-LunarG977448c2019-12-02 14:52:02 -07005427void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5428 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005429 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07005430 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06005431
5432 // Do nothing if the query pool has been destroyed.
5433 auto query_pool_state = GetQueryPoolState(queryPool);
5434 if (!query_pool_state) return;
5435
5436 // Reset the state of existing entries.
5437 QueryObject query_obj{queryPool, 0};
5438 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
5439 for (uint32_t i = 0; i < max_query_count; ++i) {
5440 query_obj.query = firstQuery + i;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005441 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005442 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
5443 for (uint32_t passIndex = 0; passIndex < query_pool_state->n_performance_passes; passIndex++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005444 query_obj.perf_pass = passIndex;
5445 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005446 }
5447 }
locke-lunargd556cc32019-09-17 01:21:23 -06005448 }
5449}
5450
Tony-LunarG977448c2019-12-02 14:52:02 -07005451void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5452 uint32_t queryCount) {
5453 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5454}
5455
5456void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5457 uint32_t queryCount) {
5458 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5459}
5460
locke-lunargd556cc32019-09-17 01:21:23 -06005461void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
5462 const TEMPLATE_STATE *template_state, const void *pData) {
5463 // Translate the templated update into a normal update for validation...
5464 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
5465 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
5466 decoded_update.desc_writes.data(), 0, NULL);
5467}
5468
5469// Update the common AllocateDescriptorSetsData
5470void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005471 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005472 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05005473 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06005474 if (layout) {
5475 ds_data->layout_nodes[i] = layout;
5476 // Count total descriptors required per type
5477 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
5478 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
5479 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
5480 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
5481 }
5482 }
5483 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
5484 }
5485}
5486
5487// Decrement allocated sets from the pool and insert new sets into set_map
5488void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
5489 const VkDescriptorSet *descriptor_sets,
5490 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
5491 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
5492 // Account for sets and individual descriptors allocated from pool
5493 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
5494 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
5495 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
5496 }
5497
5498 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
5499 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
5500
5501 // Create tracking object for each descriptor set; insert into global map and the pool's set.
5502 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
5503 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
5504
Jeff Bolz41a1ced2019-10-11 11:40:49 -05005505 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07005506 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06005507 pool_state->sets.insert(new_ds.get());
5508 new_ds->in_use.store(0);
5509 setMap[descriptor_sets[i]] = std::move(new_ds);
5510 }
5511}
5512
5513// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
Jeremy Kniager05631e72020-06-08 14:21:35 -06005514void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type,
locke-lunarg540b2252020-08-03 13:23:36 -06005515 VkPipelineBindPoint bind_point, const char *function) {
5516 UpdateDrawState(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005517 cb_state->hasDispatchCmd = true;
5518}
5519
locke-lunargd556cc32019-09-17 01:21:23 -06005520// Generic function to handle state update for all CmdDraw* type functions
locke-lunarg540b2252020-08-03 13:23:36 -06005521void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, VkPipelineBindPoint bind_point,
5522 const char *function) {
5523 UpdateStateCmdDrawDispatchType(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005524 cb_state->hasDrawCmd = true;
5525}
5526
5527void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
5528 uint32_t firstVertex, uint32_t firstInstance) {
5529 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005530 UpdateStateCmdDrawType(cb_state, CMD_DRAW, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDraw()");
locke-lunargd556cc32019-09-17 01:21:23 -06005531}
5532
5533void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
5534 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
5535 uint32_t firstInstance) {
5536 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005537 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXED, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexed()");
locke-lunargd556cc32019-09-17 01:21:23 -06005538}
5539
5540void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5541 uint32_t count, uint32_t stride) {
5542 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5543 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005544 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06005545 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5546}
5547
5548void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5549 VkDeviceSize offset, uint32_t count, uint32_t stride) {
5550 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5551 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005552 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexedIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06005553 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5554}
5555
5556void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
5557 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005558 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCH, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatch()");
locke-lunargd556cc32019-09-17 01:21:23 -06005559}
5560
5561void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5562 VkDeviceSize offset) {
5563 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005564 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCHINDIRECT, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatchIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06005565 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5566 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5567}
5568
Tony-LunarG977448c2019-12-02 14:52:02 -07005569void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5570 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
locke-lunarg540b2252020-08-03 13:23:36 -06005571 uint32_t stride, const char *function) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005572 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5573 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5574 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005575 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
Tony-LunarG977448c2019-12-02 14:52:02 -07005576 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5577 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5578}
5579
locke-lunargd556cc32019-09-17 01:21:23 -06005580void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5581 VkDeviceSize offset, VkBuffer countBuffer,
5582 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5583 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005584 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5585 "vkCmdDrawIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005586}
5587
5588void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5589 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5590 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005591 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5592 "vkCmdDrawIndirectCount()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005593}
5594
5595void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5596 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
locke-lunarg540b2252020-08-03 13:23:36 -06005597 uint32_t maxDrawCount, uint32_t stride, const char *function) {
locke-lunargd556cc32019-09-17 01:21:23 -06005598 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5599 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5600 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005601 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005602 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5603 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5604}
5605
5606void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5607 VkDeviceSize offset, VkBuffer countBuffer,
5608 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5609 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005610 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5611 "vkCmdDrawIndexedIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005612}
5613
5614void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
5615 VkDeviceSize offset, VkBuffer countBuffer,
5616 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5617 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005618 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5619 "vkCmdDrawIndexedIndirectCount()");
locke-lunargd556cc32019-09-17 01:21:23 -06005620}
5621
5622void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
5623 uint32_t firstTask) {
5624 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005625 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSNV, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawMeshTasksNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005626}
5627
5628void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5629 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
5630 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005631 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
5632 "vkCmdDrawMeshTasksIndirectNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005633 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5634 if (buffer_state) {
5635 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5636 }
5637}
5638
5639void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5640 VkDeviceSize offset, VkBuffer countBuffer,
5641 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5642 uint32_t stride) {
5643 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5644 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5645 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005646 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTCOUNTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
5647 "vkCmdDrawMeshTasksIndirectCountNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005648 if (buffer_state) {
5649 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5650 }
5651 if (count_buffer_state) {
5652 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5653 }
5654}
5655
5656void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
5657 const VkAllocationCallbacks *pAllocator,
5658 VkShaderModule *pShaderModule, VkResult result,
5659 void *csm_state_data) {
5660 if (VK_SUCCESS != result) return;
5661 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
5662
Tony-LunarG8a51b7d2020-07-01 15:57:23 -06005663 spv_target_env spirv_environment = PickSpirvEnv(api_version, (device_extensions.vk_khr_spirv_1_4 != kNotEnabled));
locke-lunargd556cc32019-09-17 01:21:23 -06005664 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005665 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
5666 csm_state->unique_shader_id)
5667 : std::make_shared<SHADER_MODULE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06005668 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
5669}
5670
5671void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005672 PIPELINE_STATE::StageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005673 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
5674 auto module = GetShaderModuleState(pStage->module);
5675 if (!module->has_valid_spirv) return;
5676
5677 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
5678 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
5679 if (entrypoint == module->end()) return;
5680
locke-lunarg654e3692020-06-04 17:19:15 -06005681 stage_state->stage_flag = pStage->stage;
5682
locke-lunargd556cc32019-09-17 01:21:23 -06005683 // Mark accessible ids
5684 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
5685 ProcessExecutionModes(module, entrypoint, pipeline);
5686
locke-lunarg63e4daf2020-08-17 17:53:25 -06005687 stage_state->descriptor_uses = CollectInterfaceByDescriptorSlot(
5688 module, stage_state->accessible_ids, &stage_state->has_writable_descriptor, &stage_state->has_atomic_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06005689 // Capture descriptor uses for the pipeline
locke-lunarg36045992020-08-20 16:54:37 -06005690 for (const auto &use : stage_state->descriptor_uses) {
locke-lunargd556cc32019-09-17 01:21:23 -06005691 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06005692 const uint32_t slot = use.first.first;
locke-lunarg36045992020-08-20 16:54:37 -06005693 auto &reqs = pipeline->active_slots[slot][use.first.second].reqs;
locke-lunargd556cc32019-09-17 01:21:23 -06005694 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
locke-lunarg25b6c352020-08-06 17:44:18 -06005695 if (use.second.is_atomic_operation) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_VIEW_ATOMIC_OPERATION);
locke-lunarg12d20992020-09-21 12:46:49 -06005696 if (use.second.is_sampler_implicitLod_dref_proj) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_IMPLICITLOD_DREF_PROJ);
5697
John Zulauf649edd52019-10-02 14:39:41 -06005698 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunarg36045992020-08-20 16:54:37 -06005699 if (use.second.samplers_used_by_image.size()) {
5700 pipeline->active_slots[slot][use.first.second].samplers_used_by_image[stage_state->stage_flag] =
5701 &use.second.samplers_used_by_image;
5702 }
locke-lunargd556cc32019-09-17 01:21:23 -06005703 }
locke-lunarg78486832020-09-09 19:39:42 -06005704
locke-lunarg96dc9632020-06-10 17:22:18 -06005705 if (pStage->stage == VK_SHADER_STAGE_FRAGMENT_BIT) {
5706 pipeline->fragmentShader_writable_output_location_list = CollectWritableOutputLocationinFS(*module, *pStage);
5707 }
locke-lunargd556cc32019-09-17 01:21:23 -06005708}
5709
5710void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
5711 if (cb_state == nullptr) {
5712 return;
5713 }
5714
5715 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
5716 if (pipeline_layout_state == nullptr) {
5717 return;
5718 }
5719
5720 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
5721 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
5722 cb_state->push_constant_data.clear();
5723 uint32_t size_needed = 0;
5724 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
5725 size_needed = std::max(size_needed, (push_constant_range.offset + push_constant_range.size));
5726 }
5727 cb_state->push_constant_data.resize(size_needed, 0);
5728 }
5729}
John Zulauf22b0fbe2019-10-15 06:26:16 -06005730
5731void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
5732 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
5733 VkResult result) {
5734 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
5735 auto swapchain_state = GetSwapchainState(swapchain);
5736
5737 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
5738
5739 if (pSwapchainImages) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005740 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
locke-lunargb3584732019-10-28 20:18:36 -06005741 if (swapchain_state->images[i].image != VK_NULL_HANDLE) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06005742
5743 // Add imageMap entries for each swapchain image
5744 VkImageCreateInfo image_ci;
5745 image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -06005746 image_ci.pNext = nullptr; // to be set later
5747 image_ci.flags = 0; // to be updated below
John Zulauf22b0fbe2019-10-15 06:26:16 -06005748 image_ci.imageType = VK_IMAGE_TYPE_2D;
5749 image_ci.format = swapchain_state->createInfo.imageFormat;
5750 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
5751 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
5752 image_ci.extent.depth = 1;
5753 image_ci.mipLevels = 1;
5754 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
5755 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
5756 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
5757 image_ci.usage = swapchain_state->createInfo.imageUsage;
5758 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
5759 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
5760 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
5761 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5762
5763 image_ci.pNext = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(swapchain_state->createInfo.pNext);
5764
5765 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR)
5766 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
5767 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)
5768 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
5769 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
5770 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR);
5771
locke-lunarg296a3c92020-03-25 01:04:29 -06005772 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(device, pSwapchainImages[i], &image_ci);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005773 auto &image_state = imageMap[pSwapchainImages[i]];
5774 image_state->valid = false;
5775 image_state->create_from_swapchain = swapchain;
5776 image_state->bind_swapchain = swapchain;
5777 image_state->bind_swapchain_imageIndex = i;
Tony-LunarGe64e4fe2020-02-17 16:21:55 -07005778 image_state->is_swapchain_image = true;
locke-lunargb3584732019-10-28 20:18:36 -06005779 swapchain_state->images[i].image = pSwapchainImages[i];
5780 swapchain_state->images[i].bound_images.emplace(pSwapchainImages[i]);
Petr Kraus44f1c482020-04-25 20:09:25 +02005781
5782 AddImageStateProps(*image_state, device, physical_device);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005783 }
5784 }
5785
5786 if (*pSwapchainImageCount) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005787 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
5788 }
5789}
sourav parmar35e7a002020-06-09 17:58:44 -07005790
5791void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureKHR(
5792 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
5793 const VkAccelerationStructureBuildOffsetInfoKHR *const *ppOffsetInfos) {
5794 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5795 if (cb_state == nullptr) {
5796 return;
5797 }
5798 for (uint32_t i = 0; i < infoCount; ++i) {
5799 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(pInfos[i].dstAccelerationStructure);
5800 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(pInfos[i].srcAccelerationStructure);
5801 if (dst_as_state != nullptr) {
5802 dst_as_state->built = true;
5803 dst_as_state->build_info_khr.initialize(pInfos);
5804 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
5805 }
5806 if (src_as_state != nullptr) {
5807 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
5808 }
5809 }
5810 cb_state->hasBuildAccelerationStructureCmd = true;
5811}
5812
5813void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer,
5814 const VkCopyAccelerationStructureInfoKHR *pInfo) {
5815 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5816 if (cb_state) {
5817 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(pInfo->src);
5818 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(pInfo->dst);
5819 if (dst_as_state != nullptr && src_as_state != nullptr) {
5820 dst_as_state->built = true;
5821 dst_as_state->build_info_khr = src_as_state->build_info_khr;
5822 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
5823 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
5824 }
5825 }
5826}
Piers Daniell39842ee2020-07-10 16:42:33 -06005827
5828void ValidationStateTracker::PreCallRecordCmdSetCullModeEXT(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) {
5829 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5830 cb_state->status |= CBSTATUS_CULL_MODE_SET;
5831 cb_state->static_status &= ~CBSTATUS_CULL_MODE_SET;
5832}
5833
5834void ValidationStateTracker::PreCallRecordCmdSetFrontFaceEXT(VkCommandBuffer commandBuffer, VkFrontFace frontFace) {
5835 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5836 cb_state->status |= CBSTATUS_FRONT_FACE_SET;
5837 cb_state->static_status &= ~CBSTATUS_FRONT_FACE_SET;
5838}
5839
5840void ValidationStateTracker::PreCallRecordCmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer,
5841 VkPrimitiveTopology primitiveTopology) {
5842 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5843 cb_state->primitiveTopology = primitiveTopology;
5844 cb_state->status |= CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5845 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5846}
5847
5848void ValidationStateTracker::PreCallRecordCmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer, uint32_t viewportCount,
5849 const VkViewport *pViewports) {
5850 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5851 cb_state->viewportWithCountMask |= (1u << viewportCount) - 1u;
5852 cb_state->status |= CBSTATUS_VIEWPORT_WITH_COUNT_SET;
5853 cb_state->static_status &= ~CBSTATUS_VIEWPORT_WITH_COUNT_SET;
5854}
5855
5856void ValidationStateTracker::PreCallRecordCmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer, uint32_t scissorCount,
5857 const VkRect2D *pScissors) {
5858 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5859 cb_state->scissorWithCountMask |= (1u << scissorCount) - 1u;
5860 cb_state->status |= CBSTATUS_SCISSOR_WITH_COUNT_SET;
5861 cb_state->static_status &= ~CBSTATUS_SCISSOR_WITH_COUNT_SET;
5862}
5863
5864void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer, uint32_t firstBinding,
5865 uint32_t bindingCount, const VkBuffer *pBuffers,
5866 const VkDeviceSize *pOffsets, const VkDeviceSize *pSizes,
5867 const VkDeviceSize *pStrides) {
5868 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5869 if (pStrides) {
5870 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5871 cb_state->static_status &= ~CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5872 }
5873
5874 uint32_t end = firstBinding + bindingCount;
5875 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
5876 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
5877 }
5878
5879 for (uint32_t i = 0; i < bindingCount; ++i) {
5880 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
5881 vertex_buffer_binding.buffer = pBuffers[i];
5882 vertex_buffer_binding.offset = pOffsets[i];
5883 vertex_buffer_binding.size = (pSizes) ? pSizes[i] : VK_WHOLE_SIZE;
5884 vertex_buffer_binding.stride = (pStrides) ? pStrides[i] : 0;
5885 // Add binding for this vertex buffer to this commandbuffer
5886 if (pBuffers[i]) {
5887 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
5888 }
5889 }
5890}
5891
5892void ValidationStateTracker::PreCallRecordCmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) {
5893 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5894 cb_state->status |= CBSTATUS_DEPTH_TEST_ENABLE_SET;
5895 cb_state->static_status &= ~CBSTATUS_DEPTH_TEST_ENABLE_SET;
5896}
5897
5898void ValidationStateTracker::PreCallRecordCmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) {
5899 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5900 cb_state->status |= CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5901 cb_state->static_status &= ~CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5902}
5903
5904void ValidationStateTracker::PreCallRecordCmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) {
5905 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5906 cb_state->status |= CBSTATUS_DEPTH_COMPARE_OP_SET;
5907 cb_state->static_status &= ~CBSTATUS_DEPTH_COMPARE_OP_SET;
5908}
5909
5910void ValidationStateTracker::PreCallRecordCmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer,
5911 VkBool32 depthBoundsTestEnable) {
5912 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5913 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5914 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5915}
5916void ValidationStateTracker::PreCallRecordCmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) {
5917 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5918 cb_state->status |= CBSTATUS_STENCIL_TEST_ENABLE_SET;
5919 cb_state->static_status &= ~CBSTATUS_STENCIL_TEST_ENABLE_SET;
5920}
5921
5922void ValidationStateTracker::PreCallRecordCmdSetStencilOpEXT(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
5923 VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp,
5924 VkCompareOp compareOp) {
5925 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5926 cb_state->status |= CBSTATUS_STENCIL_OP_SET;
5927 cb_state->static_status &= ~CBSTATUS_STENCIL_OP_SET;
5928}