blob: 2db16425850e4fda1cd2852730c9197d8e0606a2 [file] [log] [blame]
Tony-LunarG73719992020-01-15 10:20:28 -07001/* Copyright (c) 2015-2020 The Khronos Group Inc.
2 * Copyright (c) 2015-2020 Valve Corporation
3 * Copyright (c) 2015-2020 LunarG, Inc.
4 * Copyright (C) 2015-2020 Google Inc.
locke-lunargd556cc32019-09-17 01:21:23 -06005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
locke-lunargd556cc32019-09-17 01:21:23 -060023#include <cmath>
24#include <set>
25#include <sstream>
26#include <string>
27
28#include "vk_enum_string_helper.h"
29#include "vk_format_utils.h"
30#include "vk_layer_data.h"
31#include "vk_layer_utils.h"
32#include "vk_layer_logging.h"
33#include "vk_typemap_helper.h"
34
35#include "chassis.h"
36#include "state_tracker.h"
37#include "shader_validation.h"
38
39using std::max;
40using std::string;
41using std::stringstream;
42using std::unique_ptr;
43using std::unordered_map;
44using std::unordered_set;
45using std::vector;
46
John Zulauf890b50b2020-06-17 15:18:19 -060047const char *CommandTypeString(CMD_TYPE type) {
48 // Autogenerated as part of the vk_validation_error_message.h codegen
49 static const std::array<const char *, CMD_RANGE_SIZE> command_name_list = {{VUID_CMD_NAME_LIST}};
50 return command_name_list[type];
51}
52
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -060053void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
54 if (add_obj) {
55 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
56 // Call base class
57 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
58 }
59}
60
John Zulauf5c5e88d2019-12-26 11:22:02 -070061uint32_t ResolveRemainingLevels(const VkImageSubresourceRange *range, uint32_t mip_levels) {
62 // Return correct number of mip levels taking into account VK_REMAINING_MIP_LEVELS
63 uint32_t mip_level_count = range->levelCount;
64 if (range->levelCount == VK_REMAINING_MIP_LEVELS) {
65 mip_level_count = mip_levels - range->baseMipLevel;
66 }
67 return mip_level_count;
68}
69
70uint32_t ResolveRemainingLayers(const VkImageSubresourceRange *range, uint32_t layers) {
71 // Return correct number of layers taking into account VK_REMAINING_ARRAY_LAYERS
72 uint32_t array_layer_count = range->layerCount;
73 if (range->layerCount == VK_REMAINING_ARRAY_LAYERS) {
74 array_layer_count = layers - range->baseArrayLayer;
75 }
76 return array_layer_count;
77}
78
79VkImageSubresourceRange NormalizeSubresourceRange(const VkImageCreateInfo &image_create_info,
80 const VkImageSubresourceRange &range) {
81 VkImageSubresourceRange norm = range;
82 norm.levelCount = ResolveRemainingLevels(&range, image_create_info.mipLevels);
83
84 // Special case for 3D images with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR flag bit, where <extent.depth> and
85 // <arrayLayers> can potentially alias.
86 uint32_t layer_limit = (0 != (image_create_info.flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR))
87 ? image_create_info.extent.depth
88 : image_create_info.arrayLayers;
89 norm.layerCount = ResolveRemainingLayers(&range, layer_limit);
90
91 // For multiplanar formats, IMAGE_ASPECT_COLOR is equivalent to adding the aspect of the individual planes
92 VkImageAspectFlags &aspect_mask = norm.aspectMask;
93 if (FormatIsMultiplane(image_create_info.format)) {
94 if (aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) {
95 aspect_mask &= ~VK_IMAGE_ASPECT_COLOR_BIT;
96 aspect_mask |= (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT);
97 if (FormatPlaneCount(image_create_info.format) > 2) {
98 aspect_mask |= VK_IMAGE_ASPECT_PLANE_2_BIT;
99 }
100 }
101 }
102 return norm;
103}
104
105VkImageSubresourceRange NormalizeSubresourceRange(const IMAGE_STATE &image_state, const VkImageSubresourceRange &range) {
106 const VkImageCreateInfo &image_create_info = image_state.createInfo;
107 return NormalizeSubresourceRange(image_create_info, range);
108}
109
John Zulauf2bc1fde2020-04-24 15:09:51 -0600110// NOTE: Beware the lifespan of the rp_begin when holding the return. If the rp_begin isn't a "safe" copy, "IMAGELESS"
111// attachments won't persist past the API entry point exit.
112std::pair<uint32_t, const VkImageView *> GetFramebufferAttachments(const VkRenderPassBeginInfo &rp_begin,
113 const FRAMEBUFFER_STATE &fb_state) {
114 const VkImageView *attachments = fb_state.createInfo.pAttachments;
115 uint32_t count = fb_state.createInfo.attachmentCount;
116 if (fb_state.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) {
117 const auto *framebuffer_attachments = lvl_find_in_chain<VkRenderPassAttachmentBeginInfo>(rp_begin.pNext);
118 if (framebuffer_attachments) {
119 attachments = framebuffer_attachments->pAttachments;
120 count = framebuffer_attachments->attachmentCount;
121 }
122 }
123 return std::make_pair(count, attachments);
124}
125
126std::vector<const IMAGE_VIEW_STATE *> ValidationStateTracker::GetAttachmentViews(const VkRenderPassBeginInfo &rp_begin,
127 const FRAMEBUFFER_STATE &fb_state) const {
128 std::vector<const IMAGE_VIEW_STATE *> views;
129
130 const auto count_attachment = GetFramebufferAttachments(rp_begin, fb_state);
131 const auto attachment_count = count_attachment.first;
132 const auto *attachments = count_attachment.second;
133 views.resize(attachment_count, nullptr);
134 for (uint32_t i = 0; i < attachment_count; i++) {
135 if (attachments[i] != VK_NULL_HANDLE) {
136 views[i] = Get<IMAGE_VIEW_STATE>(attachments[i]);
137 }
138 }
139 return views;
140}
141
142std::vector<const IMAGE_VIEW_STATE *> ValidationStateTracker::GetCurrentAttachmentViews(const CMD_BUFFER_STATE &cb_state) const {
143 // Only valid *after* RecordBeginRenderPass and *before* RecordEndRenderpass as it relies on cb_state for the renderpass info.
144 std::vector<const IMAGE_VIEW_STATE *> views;
145
locke-lunargaecf2152020-05-12 17:15:41 -0600146 const auto *rp_state = cb_state.activeRenderPass.get();
John Zulauf2bc1fde2020-04-24 15:09:51 -0600147 if (!rp_state) return views;
148 const auto &rp_begin = *cb_state.activeRenderPassBeginInfo.ptr();
149 const auto *fb_state = Get<FRAMEBUFFER_STATE>(rp_begin.framebuffer);
150 if (!fb_state) return views;
151
152 return GetAttachmentViews(rp_begin, *fb_state);
153}
154
locke-lunarg3e127c72020-06-09 17:45:28 -0600155PIPELINE_STATE *GetCurrentPipelineFromCommandBuffer(const CMD_BUFFER_STATE &cmd, VkPipelineBindPoint pipelineBindPoint) {
156 const auto last_bound_it = cmd.lastBound.find(pipelineBindPoint);
157 if (last_bound_it == cmd.lastBound.cend()) {
158 return nullptr;
159 }
160 return last_bound_it->second.pipeline_state;
161}
162
163void GetCurrentPipelineAndDesriptorSetsFromCommandBuffer(const CMD_BUFFER_STATE &cmd, VkPipelineBindPoint pipelineBindPoint,
164 const PIPELINE_STATE **rtn_pipe,
165 const std::vector<LAST_BOUND_STATE::PER_SET> **rtn_sets) {
166 const auto last_bound_it = cmd.lastBound.find(pipelineBindPoint);
167 if (last_bound_it == cmd.lastBound.cend()) {
168 return;
169 }
170 *rtn_pipe = last_bound_it->second.pipeline_state;
171 *rtn_sets = &(last_bound_it->second.per_set);
172}
173
locke-lunargd556cc32019-09-17 01:21:23 -0600174#ifdef VK_USE_PLATFORM_ANDROID_KHR
175// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
176// This could also move into a seperate core_validation_android.cpp file... ?
177
178void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
179 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
180 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
Spencer Fricke224c9852020-04-06 07:47:29 -0700181 is_node->external_ahb = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600182 }
183 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
184 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
185 is_node->has_ahb_format = true;
186 is_node->ahb_format = ext_fmt_android->externalFormat;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700187 // VUID 01894 will catch if not found in map
188 auto it = ahb_ext_formats_map.find(ext_fmt_android->externalFormat);
189 if (it != ahb_ext_formats_map.end()) {
190 is_node->format_features = it->second;
191 }
locke-lunargd556cc32019-09-17 01:21:23 -0600192 }
193}
194
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700195void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {
196 const VkExternalMemoryBufferCreateInfo *embci = lvl_find_in_chain<VkExternalMemoryBufferCreateInfo>(create_info->pNext);
197 if (embci && (embci->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
198 bs_node->external_ahb = true;
199 }
200}
201
locke-lunargd556cc32019-09-17 01:21:23 -0600202void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700203 VkSamplerYcbcrConversion ycbcr_conversion,
204 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state) {
locke-lunargd556cc32019-09-17 01:21:23 -0600205 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
206 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
207 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700208 // VUID 01894 will catch if not found in map
209 auto it = ahb_ext_formats_map.find(ext_format_android->externalFormat);
210 if (it != ahb_ext_formats_map.end()) {
211 ycbcr_state->format_features = it->second;
212 }
locke-lunargd556cc32019-09-17 01:21:23 -0600213 }
214};
215
216void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
217 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
218};
219
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700220void ValidationStateTracker::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
221 VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties, VkResult result) {
222 if (VK_SUCCESS != result) return;
223 auto ahb_format_props = lvl_find_in_chain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
224 if (ahb_format_props) {
225 ahb_ext_formats_map.insert({ahb_format_props->externalFormat, ahb_format_props->formatFeatures});
226 }
227}
228
locke-lunargd556cc32019-09-17 01:21:23 -0600229#else
230
231void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
232
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700233void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {}
234
locke-lunargd556cc32019-09-17 01:21:23 -0600235void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700236 VkSamplerYcbcrConversion ycbcr_conversion,
237 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state){};
locke-lunargd556cc32019-09-17 01:21:23 -0600238
239void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
240
241#endif // VK_USE_PLATFORM_ANDROID_KHR
242
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -0600243std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPipelineLayout(PIPELINE_LAYOUT_STATE const *layout_data,
244 uint32_t set) {
245 std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> dsl = nullptr;
246 if (layout_data && (set < layout_data->set_layouts.size())) {
247 dsl = layout_data->set_layouts[set];
248 }
249 return dsl;
250}
251
Petr Kraus44f1c482020-04-25 20:09:25 +0200252void AddImageStateProps(IMAGE_STATE &image_state, const VkDevice device, const VkPhysicalDevice physical_device) {
253 // Add feature support according to Image Format Features (vkspec.html#resources-image-format-features)
254 // if format is AHB external format then the features are already set
255 if (image_state.has_ahb_format == false) {
256 const VkImageTiling image_tiling = image_state.createInfo.tiling;
257 const VkFormat image_format = image_state.createInfo.format;
258 if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
259 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {
260 VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, nullptr};
261 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state.image, &drm_format_properties);
262
263 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
264 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
265 nullptr};
266 format_properties_2.pNext = (void *)&drm_properties_list;
267 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Lionel Landwerlin09351a72020-06-22 18:15:59 +0300268 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
269 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
270 drm_properties_list.pDrmFormatModifierProperties = &drm_properties[0];
271 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Petr Kraus44f1c482020-04-25 20:09:25 +0200272
273 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300274 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier ==
275 drm_format_properties.drmFormatModifier) {
276 image_state.format_features =
Petr Kraus44f1c482020-04-25 20:09:25 +0200277 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300278 break;
Petr Kraus44f1c482020-04-25 20:09:25 +0200279 }
280 }
281 } else {
282 VkFormatProperties format_properties;
283 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_format, &format_properties);
284 image_state.format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
285 : format_properties.optimalTilingFeatures;
286 }
287 }
288}
289
locke-lunargd556cc32019-09-17 01:21:23 -0600290void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
291 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
292 if (VK_SUCCESS != result) return;
locke-lunarg296a3c92020-03-25 01:04:29 -0600293 auto is_node = std::make_shared<IMAGE_STATE>(device, *pImage, pCreateInfo);
sfricke-samsung71bc6572020-04-29 15:49:43 -0700294 is_node->disjoint = ((pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT) != 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600295 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
296 RecordCreateImageANDROID(pCreateInfo, is_node.get());
297 }
298 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
299 if (swapchain_info) {
300 is_node->create_from_swapchain = swapchain_info->swapchain;
301 }
302
locke-lunargd556cc32019-09-17 01:21:23 -0600303 // Record the memory requirements in case they won't be queried
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700304 // External AHB memory can't be queried until after memory is bound
Spencer Fricke224c9852020-04-06 07:47:29 -0700305 if (is_node->external_ahb == false) {
sfricke-samsung71bc6572020-04-29 15:49:43 -0700306 if (is_node->disjoint == false) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700307 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
308 } else {
309 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
310 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
311 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
312 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
313 mem_req_info2.pNext = &image_plane_req;
314 mem_req_info2.image = *pImage;
315
316 assert(plane_count != 0); // assumes each format has at least first plane
317 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
318 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
319 is_node->plane0_requirements = mem_reqs2.memoryRequirements;
320
321 if (plane_count >= 2) {
322 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
323 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
324 is_node->plane1_requirements = mem_reqs2.memoryRequirements;
325 }
326 if (plane_count >= 3) {
327 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
328 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
329 is_node->plane2_requirements = mem_reqs2.memoryRequirements;
330 }
331 }
locke-lunargd556cc32019-09-17 01:21:23 -0600332 }
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700333
Petr Kraus44f1c482020-04-25 20:09:25 +0200334 AddImageStateProps(*is_node, device, physical_device);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700335
sfricke-samsungedce77a2020-07-03 22:35:13 -0700336 is_node->unprotected = ((pCreateInfo->flags & VK_IMAGE_CREATE_PROTECTED_BIT) == 0);
337
locke-lunargd556cc32019-09-17 01:21:23 -0600338 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
339}
340
341void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
342 if (!image) return;
343 IMAGE_STATE *image_state = GetImageState(image);
344 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
345 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
346 // Clean up memory mapping, bindings and range references for image
347 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700348 RemoveImageMemoryRange(image, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600349 }
350 if (image_state->bind_swapchain) {
351 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
352 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -0600353 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -0600354 }
355 }
356 RemoveAliasingImage(image_state);
357 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500358 image_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600359 // Remove image from imageMap
360 imageMap.erase(image);
361}
362
363void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
364 VkImageLayout imageLayout, const VkClearColorValue *pColor,
365 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
366 auto cb_node = GetCBState(commandBuffer);
367 auto image_state = GetImageState(image);
368 if (cb_node && image_state) {
369 AddCommandBufferBindingImage(cb_node, image_state);
370 }
371}
372
373void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
374 VkImageLayout imageLayout,
375 const VkClearDepthStencilValue *pDepthStencil,
376 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
377 auto cb_node = GetCBState(commandBuffer);
378 auto image_state = GetImageState(image);
379 if (cb_node && image_state) {
380 AddCommandBufferBindingImage(cb_node, image_state);
381 }
382}
383
384void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
385 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
386 uint32_t regionCount, const VkImageCopy *pRegions) {
387 auto cb_node = GetCBState(commandBuffer);
388 auto src_image_state = GetImageState(srcImage);
389 auto dst_image_state = GetImageState(dstImage);
390
391 // Update bindings between images and cmd buffer
392 AddCommandBufferBindingImage(cb_node, src_image_state);
393 AddCommandBufferBindingImage(cb_node, dst_image_state);
394}
395
396void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
397 VkImageLayout srcImageLayout, VkImage dstImage,
398 VkImageLayout dstImageLayout, uint32_t regionCount,
399 const VkImageResolve *pRegions) {
400 auto cb_node = GetCBState(commandBuffer);
401 auto src_image_state = GetImageState(srcImage);
402 auto dst_image_state = GetImageState(dstImage);
403
404 // Update bindings between images and cmd buffer
405 AddCommandBufferBindingImage(cb_node, src_image_state);
406 AddCommandBufferBindingImage(cb_node, dst_image_state);
407}
408
409void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
410 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
411 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
412 auto cb_node = GetCBState(commandBuffer);
413 auto src_image_state = GetImageState(srcImage);
414 auto dst_image_state = GetImageState(dstImage);
415
416 // Update bindings between images and cmd buffer
417 AddCommandBufferBindingImage(cb_node, src_image_state);
418 AddCommandBufferBindingImage(cb_node, dst_image_state);
419}
420
421void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
422 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
423 VkResult result) {
424 if (result != VK_SUCCESS) return;
425 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500426 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600427
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700428 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
429 RecordCreateBufferANDROID(pCreateInfo, buffer_state.get());
430 }
locke-lunargd556cc32019-09-17 01:21:23 -0600431 // Get a set of requirements in the case the app does not
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700432 // External AHB memory can't be queried until after memory is bound
433 if (buffer_state->external_ahb == false) {
434 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
435 }
locke-lunargd556cc32019-09-17 01:21:23 -0600436
sfricke-samsungedce77a2020-07-03 22:35:13 -0700437 buffer_state->unprotected = ((pCreateInfo->flags & VK_BUFFER_CREATE_PROTECTED_BIT) == 0);
438
locke-lunargd556cc32019-09-17 01:21:23 -0600439 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
440}
441
442void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
443 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
444 VkResult result) {
445 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500446 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
447 bufferViewMap[*pView] = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600448}
449
450void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
451 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
452 VkResult result) {
453 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500454 auto image_state = GetImageShared(pCreateInfo->image);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700455 auto image_view_state = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
456
457 // Add feature support according to Image View Format Features (vkspec.html#resources-image-view-format-features)
458 const VkImageTiling image_tiling = image_state->createInfo.tiling;
459 const VkFormat image_view_format = pCreateInfo->format;
460 if (image_state->has_ahb_format == true) {
461 // The ImageView uses same Image's format feature since they share same AHB
462 image_view_state->format_features = image_state->format_features;
463 } else if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
464 // Parameter validation should catch if this is used without VK_EXT_image_drm_format_modifier
465 assert(device_extensions.vk_ext_image_drm_format_modifier);
466 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
467 nullptr};
468 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state->image, &drm_format_properties);
469
470 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
471 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
472 nullptr};
473 format_properties_2.pNext = (void *)&drm_properties_list;
474 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
475
476 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300477 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier == drm_format_properties.drmFormatModifier) {
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700478 image_view_state->format_features |=
479 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300480 break;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700481 }
482 }
483 } else {
484 VkFormatProperties format_properties;
485 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_view_format, &format_properties);
486 image_view_state->format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
487 : format_properties.optimalTilingFeatures;
488 }
489
490 imageViewMap.insert(std::make_pair(*pView, std::move(image_view_state)));
locke-lunargd556cc32019-09-17 01:21:23 -0600491}
492
493void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
494 uint32_t regionCount, const VkBufferCopy *pRegions) {
495 auto cb_node = GetCBState(commandBuffer);
496 auto src_buffer_state = GetBufferState(srcBuffer);
497 auto dst_buffer_state = GetBufferState(dstBuffer);
498
499 // Update bindings between buffers and cmd buffer
500 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
501 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
502}
503
504void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
505 const VkAllocationCallbacks *pAllocator) {
506 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
507 if (!image_view_state) return;
508 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
509
510 // Any bound cmd buffers are now invalid
511 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500512 image_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600513 imageViewMap.erase(imageView);
514}
515
516void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
517 if (!buffer) return;
518 auto buffer_state = GetBufferState(buffer);
519 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
520
521 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
522 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700523 RemoveBufferMemoryRange(buffer, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600524 }
525 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500526 buffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600527 bufferMap.erase(buffer_state->buffer);
528}
529
530void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
531 const VkAllocationCallbacks *pAllocator) {
532 if (!bufferView) return;
533 auto buffer_view_state = GetBufferViewState(bufferView);
534 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
535
536 // Any bound cmd buffers are now invalid
537 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500538 buffer_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600539 bufferViewMap.erase(bufferView);
540}
541
542void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
543 VkDeviceSize size, uint32_t data) {
544 auto cb_node = GetCBState(commandBuffer);
545 auto buffer_state = GetBufferState(dstBuffer);
546 // Update bindings between buffer and cmd buffer
547 AddCommandBufferBindingBuffer(cb_node, buffer_state);
548}
549
550void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
551 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
552 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
553 auto cb_node = GetCBState(commandBuffer);
554 auto src_image_state = GetImageState(srcImage);
555 auto dst_buffer_state = GetBufferState(dstBuffer);
556
557 // Update bindings between buffer/image and cmd buffer
558 AddCommandBufferBindingImage(cb_node, src_image_state);
559 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
560}
561
562void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
563 VkImageLayout dstImageLayout, uint32_t regionCount,
564 const VkBufferImageCopy *pRegions) {
565 auto cb_node = GetCBState(commandBuffer);
566 auto src_buffer_state = GetBufferState(srcBuffer);
567 auto dst_image_state = GetImageState(dstImage);
568
569 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
570 AddCommandBufferBindingImage(cb_node, dst_image_state);
571}
572
573// Get the image viewstate for a given framebuffer attachment
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300574IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(CMD_BUFFER_STATE *cb, FRAMEBUFFER_STATE *framebuffer,
575 uint32_t index) {
576 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) {
577 assert(index < cb->imagelessFramebufferAttachments.size());
578 return cb->imagelessFramebufferAttachments[index];
579 }
locke-lunargd556cc32019-09-17 01:21:23 -0600580 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
581 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
582 return GetImageViewState(image_view);
583}
584
585// Get the image viewstate for a given framebuffer attachment
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300586const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const CMD_BUFFER_STATE *cb,
587 const FRAMEBUFFER_STATE *framebuffer,
locke-lunargd556cc32019-09-17 01:21:23 -0600588 uint32_t index) const {
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300589 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) {
590 assert(index < cb->imagelessFramebufferAttachments.size());
591 return cb->imagelessFramebufferAttachments[index];
592 }
locke-lunargd556cc32019-09-17 01:21:23 -0600593 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
594 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
595 return GetImageViewState(image_view);
596}
597
598void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
locke-lunargd556cc32019-09-17 01:21:23 -0600599 std::unordered_set<VkImage> *bound_images = nullptr;
600
locke-lunargb3584732019-10-28 20:18:36 -0600601 if (image_state->bind_swapchain) {
602 auto swapchain_state = GetSwapchainState(image_state->bind_swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -0600603 if (swapchain_state) {
locke-lunargb3584732019-10-28 20:18:36 -0600604 bound_images = &swapchain_state->images[image_state->bind_swapchain_imageIndex].bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600605 }
606 } else {
locke-lunargcf04d582019-11-26 00:31:50 -0700607 if (image_state->binding.mem_state) {
608 bound_images = &image_state->binding.mem_state->bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600609 }
610 }
611
612 if (bound_images) {
613 for (const auto &handle : *bound_images) {
614 if (handle != image_state->image) {
615 auto is = GetImageState(handle);
616 if (is && is->IsCompatibleAliasing(image_state)) {
617 auto inserted = is->aliasing_images.emplace(image_state->image);
618 if (inserted.second) {
619 image_state->aliasing_images.emplace(handle);
620 }
621 }
622 }
623 }
624 }
625}
626
627void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
628 for (const auto &image : image_state->aliasing_images) {
629 auto is = GetImageState(image);
630 if (is) {
631 is->aliasing_images.erase(image_state->image);
632 }
633 }
634 image_state->aliasing_images.clear();
635}
636
637void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
638 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
639 // reference. It doesn't need two ways clear.
640 for (const auto &handle : bound_images) {
641 auto is = GetImageState(handle);
642 if (is) {
643 is->aliasing_images.clear();
644 }
645 }
646}
647
Jeff Bolz310775c2019-10-09 00:46:33 -0500648const EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) const {
649 auto it = eventMap.find(event);
650 if (it == eventMap.end()) {
651 return nullptr;
652 }
653 return &it->second;
654}
655
locke-lunargd556cc32019-09-17 01:21:23 -0600656EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
657 auto it = eventMap.find(event);
658 if (it == eventMap.end()) {
659 return nullptr;
660 }
661 return &it->second;
662}
663
664const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
665 auto it = queueMap.find(queue);
666 if (it == queueMap.cend()) {
667 return nullptr;
668 }
669 return &it->second;
670}
671
672QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
673 auto it = queueMap.find(queue);
674 if (it == queueMap.end()) {
675 return nullptr;
676 }
677 return &it->second;
678}
679
680const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
681 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
682 auto it = phys_dev_map->find(phys);
683 if (it == phys_dev_map->end()) {
684 return nullptr;
685 }
686 return &it->second;
687}
688
689PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
690 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
691 auto it = phys_dev_map->find(phys);
692 if (it == phys_dev_map->end()) {
693 return nullptr;
694 }
695 return &it->second;
696}
697
698PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
699const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
700
701// Return ptr to memory binding for given handle of specified type
702template <typename State, typename Result>
703static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
704 switch (typed_handle.type) {
705 case kVulkanObjectTypeImage:
706 return state->GetImageState(typed_handle.Cast<VkImage>());
707 case kVulkanObjectTypeBuffer:
708 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
709 case kVulkanObjectTypeAccelerationStructureNV:
710 return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
711 default:
712 break;
713 }
714 return nullptr;
715}
716
717const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
718 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
719}
720
721BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
722 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
723}
724
725void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
726 assert(object != NULL);
727
John Zulauf79952712020-04-07 11:25:54 -0600728 auto fake_address = fake_memory.Alloc(pAllocateInfo->allocationSize);
729 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo, fake_address);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500730 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600731
732 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
733 if (dedicated) {
734 mem_info->is_dedicated = true;
735 mem_info->dedicated_buffer = dedicated->buffer;
736 mem_info->dedicated_image = dedicated->image;
737 }
738 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
739 if (export_info) {
740 mem_info->is_export = true;
741 mem_info->export_handle_type_flags = export_info->handleTypes;
742 }
sfricke-samsung23068272020-06-21 14:49:51 -0700743
744 // Assumes validation already for only a single import operation in the pNext
745#ifdef VK_USE_PLATFORM_WIN32_KHR
746 auto win32_import = lvl_find_in_chain<VkImportMemoryWin32HandleInfoKHR>(pAllocateInfo->pNext);
747 if (win32_import) {
748 mem_info->is_import = true;
749 mem_info->import_handle_type_flags = win32_import->handleType;
750 }
751#endif
752 auto fd_import = lvl_find_in_chain<VkImportMemoryFdInfoKHR>(pAllocateInfo->pNext);
753 if (fd_import) {
754 mem_info->is_import = true;
755 mem_info->import_handle_type_flags = fd_import->handleType;
756 }
757 auto host_pointer_import = lvl_find_in_chain<VkImportMemoryHostPointerInfoEXT>(pAllocateInfo->pNext);
758 if (host_pointer_import) {
759 mem_info->is_import = true;
760 mem_info->import_handle_type_flags = host_pointer_import->handleType;
761 }
762#ifdef VK_USE_PLATFORM_ANDROID_KHR
763 // AHB Import doesn't have handle in the pNext struct
764 // It should be assumed that all imported AHB can only have the same, single handleType
765 auto ahb_import = lvl_find_in_chain<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
766 if ((ahb_import) && (ahb_import->buffer != nullptr)) {
767 mem_info->is_import_ahb = true;
768 mem_info->is_import = true;
769 mem_info->import_handle_type_flags = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
770 }
771#endif
sfricke-samsungedce77a2020-07-03 22:35:13 -0700772
773 const VkMemoryType memory_type = phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex];
774 mem_info->unprotected = ((memory_type.propertyFlags & VK_MEMORY_PROPERTY_PROTECTED_BIT) == 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600775}
776
777// Create binding link between given sampler and command buffer node
778void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600779 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600780 return;
781 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500782 AddCommandBufferBinding(sampler_state->cb_bindings,
783 VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600784}
785
786// Create binding link between given image node and command buffer node
787void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600788 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600789 return;
790 }
791 // Skip validation if this image was created through WSI
792 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
793 // First update cb binding for image
Jeff Bolzadbfa852019-10-04 13:53:30 -0500794 if (AddCommandBufferBinding(image_state->cb_bindings,
795 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600796 // Now update CB binding in MemObj mini CB list
797 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700798 // Now update CBInfo's Mem reference list
799 AddCommandBufferBinding(mem_binding->cb_bindings,
800 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600801 }
802 }
803 }
804}
805
806// Create binding link between given image view node and its image with command buffer node
807void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600808 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600809 return;
810 }
811 // First add bindings for imageView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500812 if (AddCommandBufferBinding(view_state->cb_bindings,
813 VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600814 // Only need to continue if this is a new item
Jeff Bolzadbfa852019-10-04 13:53:30 -0500815 auto image_state = view_state->image_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600816 // Add bindings for image within imageView
817 if (image_state) {
818 AddCommandBufferBindingImage(cb_node, image_state);
819 }
820 }
821}
822
823// Create binding link between given buffer node and command buffer node
824void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600825 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600826 return;
827 }
828 // First update cb binding for buffer
Jeff Bolzadbfa852019-10-04 13:53:30 -0500829 if (AddCommandBufferBinding(buffer_state->cb_bindings,
830 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600831 // Now update CB binding in MemObj mini CB list
832 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700833 // Now update CBInfo's Mem reference list
834 AddCommandBufferBinding(mem_binding->cb_bindings,
835 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600836 }
837 }
838}
839
840// Create binding link between given buffer view node and its buffer with command buffer node
841void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600842 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600843 return;
844 }
845 // First add bindings for bufferView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500846 if (AddCommandBufferBinding(view_state->cb_bindings,
847 VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
848 auto buffer_state = view_state->buffer_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600849 // Add bindings for buffer within bufferView
850 if (buffer_state) {
851 AddCommandBufferBindingBuffer(cb_node, buffer_state);
852 }
853 }
854}
855
856// Create binding link between given acceleration structure and command buffer node
857void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
858 ACCELERATION_STRUCTURE_STATE *as_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600859 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600860 return;
861 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500862 if (AddCommandBufferBinding(
863 as_state->cb_bindings,
864 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600865 // Now update CB binding in MemObj mini CB list
866 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700867 // Now update CBInfo's Mem reference list
868 AddCommandBufferBinding(mem_binding->cb_bindings,
869 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600870 }
871 }
872}
873
locke-lunargd556cc32019-09-17 01:21:23 -0600874// Clear a single object binding from given memory object
locke-lunarg5f59e782019-12-19 10:32:23 -0700875void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
locke-lunargd556cc32019-09-17 01:21:23 -0600876 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
877 if (mem_info) {
878 mem_info->obj_bindings.erase(typed_handle);
879 }
880}
881
882// ClearMemoryObjectBindings clears the binding of objects to memory
883// For the given object it pulls the memory bindings and makes sure that the bindings
884// no longer refer to the object being cleared. This occurs when objects are destroyed.
885void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
886 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
887 if (mem_binding) {
888 if (!mem_binding->sparse) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700889 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600890 } else { // Sparse, clear all bindings
891 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700892 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600893 }
894 }
895 }
896}
897
898// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
899// Corresponding valid usage checks are in ValidateSetMemBinding().
900void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
901 const VulkanTypedHandle &typed_handle) {
902 assert(mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600903
904 if (mem != VK_NULL_HANDLE) {
locke-lunargcf04d582019-11-26 00:31:50 -0700905 mem_binding->binding.mem_state = GetShared<DEVICE_MEMORY_STATE>(mem);
906 if (mem_binding->binding.mem_state) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700907 mem_binding->binding.offset = memory_offset;
908 mem_binding->binding.size = mem_binding->requirements.size;
locke-lunargcf04d582019-11-26 00:31:50 -0700909 mem_binding->binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -0600910 // For image objects, make sure default memory state is correctly set
911 // TODO : What's the best/correct way to handle this?
912 if (kVulkanObjectTypeImage == typed_handle.type) {
913 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
914 if (image_state) {
915 VkImageCreateInfo ici = image_state->createInfo;
916 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
917 // TODO:: More memory state transition stuff.
918 }
919 }
920 }
locke-lunargcf04d582019-11-26 00:31:50 -0700921 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
locke-lunargd556cc32019-09-17 01:21:23 -0600922 }
923 }
924}
925
926// For NULL mem case, clear any previous binding Else...
927// Make sure given object is in its object map
928// IF a previous binding existed, update binding
929// Add reference from objectInfo to memoryInfo
930// Add reference off of object's binding info
931// Return VK_TRUE if addition is successful, VK_FALSE otherwise
locke-lunargcf04d582019-11-26 00:31:50 -0700932bool ValidationStateTracker::SetSparseMemBinding(const VkDeviceMemory mem, const VkDeviceSize mem_offset,
933 const VkDeviceSize mem_size, const VulkanTypedHandle &typed_handle) {
locke-lunargd556cc32019-09-17 01:21:23 -0600934 bool skip = VK_FALSE;
935 // Handle NULL case separately, just clear previous binding & decrement reference
locke-lunargcf04d582019-11-26 00:31:50 -0700936 if (mem == VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -0600937 // TODO : This should cause the range of the resource to be unbound according to spec
938 } else {
939 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
940 assert(mem_binding);
941 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
942 assert(mem_binding->sparse);
locke-lunargcf04d582019-11-26 00:31:50 -0700943 MEM_BINDING binding = {GetShared<DEVICE_MEMORY_STATE>(mem), mem_offset, mem_size};
944 if (binding.mem_state) {
945 binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -0600946 // Need to set mem binding for this object
947 mem_binding->sparse_bindings.insert(binding);
948 mem_binding->UpdateBoundMemorySet();
949 }
950 }
951 }
952 return skip;
953}
954
Jeremy Kniager05631e72020-06-08 14:21:35 -0600955void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, const VkPipelineBindPoint bind_point) {
locke-lunargd556cc32019-09-17 01:21:23 -0600956 auto &state = cb_state->lastBound[bind_point];
957 PIPELINE_STATE *pPipe = state.pipeline_state;
958 if (VK_NULL_HANDLE != state.pipeline_layout) {
959 for (const auto &set_binding_pair : pPipe->active_slots) {
960 uint32_t setIndex = set_binding_pair.first;
961 // Pull the set node
962 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
locke-lunargd556cc32019-09-17 01:21:23 -0600963
Tony-LunarG77822802020-05-28 16:35:46 -0600964 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
locke-lunargd556cc32019-09-17 01:21:23 -0600965
Tony-LunarG77822802020-05-28 16:35:46 -0600966 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
967 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
968 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
969 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
970
971 if (reduced_map.IsManyDescriptors()) {
972 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
973 descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
974 }
975
976 // We can skip updating the state if "nothing" has changed since the last validation.
977 // See CoreChecks::ValidateCmdBufDrawState for more details.
978 bool descriptor_set_changed =
979 !reduced_map.IsManyDescriptors() ||
980 // Update if descriptor set (or contents) has changed
981 state.per_set[setIndex].validated_set != descriptor_set ||
982 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
983 (!disabled[image_layout_validation] &&
984 state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
985 bool need_update = descriptor_set_changed ||
986 // Update if previous bindingReqMap doesn't include new bindingReqMap
987 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
988 state.per_set[setIndex].validated_set_binding_req_map.end(), binding_req_map.begin(),
989 binding_req_map.end());
990
991 if (need_update) {
992 // Bind this set and its active descriptor resources to the command buffer
993 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
994 // Only record the bindings that haven't already been recorded
995 BindingReqMap delta_reqs;
996 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
997 state.per_set[setIndex].validated_set_binding_req_map.begin(),
998 state.per_set[setIndex].validated_set_binding_req_map.end(),
999 std::inserter(delta_reqs, delta_reqs.begin()));
Jeremy Kniager05631e72020-06-08 14:21:35 -06001000 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pPipe, delta_reqs);
Tony-LunarG77822802020-05-28 16:35:46 -06001001 } else {
Jeremy Kniager05631e72020-06-08 14:21:35 -06001002 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pPipe, binding_req_map);
locke-lunargd556cc32019-09-17 01:21:23 -06001003 }
1004
Tony-LunarG77822802020-05-28 16:35:46 -06001005 state.per_set[setIndex].validated_set = descriptor_set;
1006 state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
1007 state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
1008 if (reduced_map.IsManyDescriptors()) {
1009 // Check whether old == new before assigning, the equality check is much cheaper than
1010 // freeing and reallocating the map.
1011 if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
1012 state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
Jeff Bolz56308942019-10-06 22:05:23 -05001013 }
Tony-LunarG77822802020-05-28 16:35:46 -06001014 } else {
1015 state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
locke-lunargd556cc32019-09-17 01:21:23 -06001016 }
1017 }
1018 }
1019 }
1020 if (!pPipe->vertex_binding_descriptions_.empty()) {
1021 cb_state->vertex_buffer_used = true;
1022 }
1023}
1024
1025// Remove set from setMap and delete the set
1026void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001027 descriptor_set->destroyed = true;
Jeff Bolzadbfa852019-10-04 13:53:30 -05001028 const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
Jeff Bolz41a1ced2019-10-11 11:40:49 -05001029 // Any bound cmd buffers are now invalid
Jeff Bolzadbfa852019-10-04 13:53:30 -05001030 InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
Jeff Bolz41a1ced2019-10-11 11:40:49 -05001031
locke-lunargd556cc32019-09-17 01:21:23 -06001032 setMap.erase(descriptor_set->GetSet());
1033}
1034
1035// Free all DS Pools including their Sets & related sub-structs
1036// NOTE : Calls to this function should be wrapped in mutex
1037void ValidationStateTracker::DeleteDescriptorSetPools() {
1038 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
1039 // Remove this pools' sets from setMap and delete them
1040 for (auto ds : ii->second->sets) {
1041 FreeDescriptorSet(ds);
1042 }
1043 ii->second->sets.clear();
1044 ii = descriptorPoolMap.erase(ii);
1045 }
1046}
1047
1048// For given object struct return a ptr of BASE_NODE type for its wrapping struct
1049BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001050 if (object_struct.node) {
1051#ifdef _DEBUG
1052 // assert that lookup would find the same object
1053 VulkanTypedHandle other = object_struct;
1054 other.node = nullptr;
1055 assert(object_struct.node == GetStateStructPtrFromObject(other));
1056#endif
1057 return object_struct.node;
1058 }
locke-lunargd556cc32019-09-17 01:21:23 -06001059 BASE_NODE *base_ptr = nullptr;
1060 switch (object_struct.type) {
1061 case kVulkanObjectTypeDescriptorSet: {
1062 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
1063 break;
1064 }
1065 case kVulkanObjectTypeSampler: {
1066 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
1067 break;
1068 }
1069 case kVulkanObjectTypeQueryPool: {
1070 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
1071 break;
1072 }
1073 case kVulkanObjectTypePipeline: {
1074 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
1075 break;
1076 }
1077 case kVulkanObjectTypeBuffer: {
1078 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
1079 break;
1080 }
1081 case kVulkanObjectTypeBufferView: {
1082 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
1083 break;
1084 }
1085 case kVulkanObjectTypeImage: {
1086 base_ptr = GetImageState(object_struct.Cast<VkImage>());
1087 break;
1088 }
1089 case kVulkanObjectTypeImageView: {
1090 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
1091 break;
1092 }
1093 case kVulkanObjectTypeEvent: {
1094 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
1095 break;
1096 }
1097 case kVulkanObjectTypeDescriptorPool: {
1098 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
1099 break;
1100 }
1101 case kVulkanObjectTypeCommandPool: {
1102 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
1103 break;
1104 }
1105 case kVulkanObjectTypeFramebuffer: {
1106 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
1107 break;
1108 }
1109 case kVulkanObjectTypeRenderPass: {
1110 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
1111 break;
1112 }
1113 case kVulkanObjectTypeDeviceMemory: {
1114 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
1115 break;
1116 }
1117 case kVulkanObjectTypeAccelerationStructureNV: {
1118 base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
1119 break;
1120 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001121 case kVulkanObjectTypeUnknown:
1122 // This can happen if an element of the object_bindings vector has been
1123 // zeroed out, after an object is destroyed.
1124 break;
locke-lunargd556cc32019-09-17 01:21:23 -06001125 default:
1126 // TODO : Any other objects to be handled here?
1127 assert(0);
1128 break;
1129 }
1130 return base_ptr;
1131}
1132
sfricke-samsungbf1a2ed2020-06-14 23:31:00 -07001133// Gets union of all features defined by Potential Format Features
1134// except, does not handle the external format case for AHB as that only can be used for sampled images
sfricke-samsungbe3584f2020-04-22 14:58:06 -07001135VkFormatFeatureFlags ValidationStateTracker::GetPotentialFormatFeatures(VkFormat format) const {
1136 VkFormatFeatureFlags format_features = 0;
1137
1138 if (format != VK_FORMAT_UNDEFINED) {
1139 VkFormatProperties format_properties;
1140 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
1141 format_features |= format_properties.linearTilingFeatures;
1142 format_features |= format_properties.optimalTilingFeatures;
1143 if (device_extensions.vk_ext_image_drm_format_modifier) {
1144 // VK_KHR_get_physical_device_properties2 is required in this case
1145 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2};
1146 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
1147 nullptr};
1148 format_properties_2.pNext = (void *)&drm_properties_list;
1149 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
1150 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
1151 format_features |= drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
1152 }
1153 }
1154 }
1155
1156 return format_features;
1157}
1158
locke-lunargd556cc32019-09-17 01:21:23 -06001159// Tie the VulkanTypedHandle to the cmd buffer which includes:
1160// Add object_binding to cmd buffer
1161// Add cb_binding to object
Jeff Bolzadbfa852019-10-04 13:53:30 -05001162bool ValidationStateTracker::AddCommandBufferBinding(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_bindings,
locke-lunargd556cc32019-09-17 01:21:23 -06001163 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001164 if (disabled[command_buffer_state]) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001165 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001166 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001167 // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
1168 // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
1169 auto inserted = cb_bindings.insert({cb_node, -1});
1170 if (inserted.second) {
1171 cb_node->object_bindings.push_back(obj);
1172 inserted.first->second = (int)cb_node->object_bindings.size() - 1;
1173 return true;
1174 }
1175 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001176}
1177
1178// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
1179void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
1180 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
1181 if (base_obj) base_obj->cb_bindings.erase(cb_node);
1182}
1183
1184// Reset the command buffer state
1185// Maintain the createInfo and set state to CB_NEW, but clear all other state
1186void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
1187 CMD_BUFFER_STATE *pCB = GetCBState(cb);
1188 if (pCB) {
1189 pCB->in_use.store(0);
1190 // Reset CB state (note that createInfo is not cleared)
1191 pCB->commandBuffer = cb;
1192 memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
1193 memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
1194 pCB->hasDrawCmd = false;
1195 pCB->hasTraceRaysCmd = false;
1196 pCB->hasBuildAccelerationStructureCmd = false;
1197 pCB->hasDispatchCmd = false;
1198 pCB->state = CB_NEW;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001199 pCB->commandCount = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06001200 pCB->submitCount = 0;
1201 pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
1202 pCB->status = 0;
1203 pCB->static_status = 0;
1204 pCB->viewportMask = 0;
Piers Daniell39842ee2020-07-10 16:42:33 -06001205 pCB->viewportWithCountMask = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06001206 pCB->scissorMask = 0;
Piers Daniell39842ee2020-07-10 16:42:33 -06001207 pCB->scissorWithCountMask = 0;
1208 pCB->primitiveTopology = VK_PRIMITIVE_TOPOLOGY_MAX_ENUM;
locke-lunargd556cc32019-09-17 01:21:23 -06001209
1210 for (auto &item : pCB->lastBound) {
1211 item.second.reset();
1212 }
1213
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07001214 pCB->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo();
locke-lunargd556cc32019-09-17 01:21:23 -06001215 pCB->activeRenderPass = nullptr;
1216 pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
1217 pCB->activeSubpass = 0;
1218 pCB->broken_bindings.clear();
1219 pCB->waitedEvents.clear();
1220 pCB->events.clear();
1221 pCB->writeEventsBeforeWait.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001222 pCB->activeQueries.clear();
1223 pCB->startedQueries.clear();
1224 pCB->image_layout_map.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001225 pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
1226 pCB->vertex_buffer_used = false;
1227 pCB->primaryCommandBuffer = VK_NULL_HANDLE;
1228 // If secondary, invalidate any primary command buffer that may call us.
1229 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001230 InvalidateLinkedCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -06001231 }
1232
1233 // Remove reverse command buffer links.
1234 for (auto pSubCB : pCB->linkedCommandBuffers) {
1235 pSubCB->linkedCommandBuffers.erase(pCB);
1236 }
1237 pCB->linkedCommandBuffers.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001238 pCB->queue_submit_functions.clear();
1239 pCB->cmd_execute_commands_functions.clear();
1240 pCB->eventUpdates.clear();
1241 pCB->queryUpdates.clear();
1242
1243 // Remove object bindings
1244 for (const auto &obj : pCB->object_bindings) {
1245 RemoveCommandBufferBinding(obj, pCB);
1246 }
1247 pCB->object_bindings.clear();
1248 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
1249 for (auto framebuffer : pCB->framebuffers) {
locke-lunargaecf2152020-05-12 17:15:41 -06001250 framebuffer->cb_bindings.erase(pCB);
locke-lunargd556cc32019-09-17 01:21:23 -06001251 }
1252 pCB->framebuffers.clear();
1253 pCB->activeFramebuffer = VK_NULL_HANDLE;
1254 memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
1255
1256 pCB->qfo_transfer_image_barriers.Reset();
1257 pCB->qfo_transfer_buffer_barriers.Reset();
1258
1259 // Clean up the label data
1260 ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
1261 pCB->debug_label.Reset();
locke-gb3ce08f2019-09-30 12:30:56 -06001262 pCB->validate_descriptorsets_in_queuesubmit.clear();
Attilio Provenzano02859b22020-02-27 14:17:28 +00001263
1264 // Best practices info
1265 pCB->small_indexed_draw_call_count = 0;
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06001266
1267 pCB->transform_feedback_active = false;
locke-lunargd556cc32019-09-17 01:21:23 -06001268 }
1269 if (command_buffer_reset_callback) {
1270 (*command_buffer_reset_callback)(cb);
1271 }
1272}
1273
1274void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
1275 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
1276 VkResult result) {
1277 if (VK_SUCCESS != result) return;
1278
1279 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
1280 if (nullptr == enabled_features_found) {
1281 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
1282 if (features2) {
1283 enabled_features_found = &(features2->features);
1284 }
1285 }
1286
1287 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
1288 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
1289 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
1290
1291 if (nullptr == enabled_features_found) {
1292 state_tracker->enabled_features.core = {};
1293 } else {
1294 state_tracker->enabled_features.core = *enabled_features_found;
1295 }
1296
1297 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
1298 // previously set them through an explicit API call.
1299 uint32_t count;
1300 auto pd_state = GetPhysicalDeviceState(gpu);
1301 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
1302 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
1303 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
1304 // Save local link to this device's physical device state
1305 state_tracker->physical_device_state = pd_state;
1306
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001307 const auto *vulkan_12_features = lvl_find_in_chain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
1308 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001309 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001310 } else {
sfricke-samsung27c70722020-05-02 08:42:39 -07001311 // Set Extension Feature Aliases to false as there is no struct to check
1312 state_tracker->enabled_features.core12.drawIndirectCount = VK_FALSE;
1313 state_tracker->enabled_features.core12.samplerMirrorClampToEdge = VK_FALSE;
1314 state_tracker->enabled_features.core12.descriptorIndexing = VK_FALSE;
1315 state_tracker->enabled_features.core12.samplerFilterMinmax = VK_FALSE;
1316 state_tracker->enabled_features.core12.shaderOutputLayer = VK_FALSE;
1317 state_tracker->enabled_features.core12.shaderOutputViewportIndex = VK_FALSE;
1318
1319 // These structs are only allowed in pNext chain if there is no VkPhysicalDeviceVulkan12Features
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001320
1321 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
1322 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001323 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
1324 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
1325 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
1326 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001327 }
1328
1329 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
1330 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001331 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
1332 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001333 }
1334
1335 const auto *descriptor_indexing_features =
1336 lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
1337 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001338 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
1339 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
1340 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
1341 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
1342 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
1343 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
1344 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
1345 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
1346 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
1347 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
1348 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
1349 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
1350 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
1351 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
1352 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
1353 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
1354 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
1355 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
1356 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
1357 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
1358 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
1359 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
1360 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
1361 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
1362 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
1363 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
1364 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
1365 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
1366 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
1367 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
1368 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
1369 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
1370 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
1371 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
1372 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
1373 descriptor_indexing_features->descriptorBindingPartiallyBound;
1374 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
1375 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
1376 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001377 }
1378
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001379 const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001380 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001381 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001382 }
1383
1384 const auto *imageless_framebuffer_features =
1385 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
1386 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001387 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001388 }
1389
1390 const auto *uniform_buffer_standard_layout_features =
1391 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
1392 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001393 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
1394 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001395 }
1396
1397 const auto *subgroup_extended_types_features =
1398 lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
1399 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001400 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
1401 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001402 }
1403
1404 const auto *separate_depth_stencil_layouts_features =
1405 lvl_find_in_chain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
1406 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001407 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
1408 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001409 }
1410
1411 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
1412 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001413 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001414 }
1415
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001416 const auto *timeline_semaphore_features = lvl_find_in_chain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001417 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001418 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001419 }
1420
1421 const auto *buffer_device_address = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
1422 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001423 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
1424 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1425 buffer_device_address->bufferDeviceAddressCaptureReplay;
1426 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1427 buffer_device_address->bufferDeviceAddressMultiDevice;
1428 }
1429 }
1430
1431 const auto *vulkan_11_features = lvl_find_in_chain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
1432 if (vulkan_11_features) {
1433 state_tracker->enabled_features.core11 = *vulkan_11_features;
1434 } else {
1435 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
1436
1437 const auto *sixteen_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
1438 if (sixteen_bit_storage_features) {
1439 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
1440 sixteen_bit_storage_features->storageBuffer16BitAccess;
1441 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
1442 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
1443 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
1444 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
1445 }
1446
1447 const auto *multiview_features = lvl_find_in_chain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
1448 if (multiview_features) {
1449 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1450 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1451 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1452 }
1453
1454 const auto *variable_pointers_features = lvl_find_in_chain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
1455 if (variable_pointers_features) {
1456 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1457 variable_pointers_features->variablePointersStorageBuffer;
1458 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1459 }
1460
1461 const auto *protected_memory_features = lvl_find_in_chain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
1462 if (protected_memory_features) {
1463 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1464 }
1465
1466 const auto *ycbcr_conversion_features =
1467 lvl_find_in_chain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
1468 if (ycbcr_conversion_features) {
1469 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1470 }
1471
1472 const auto *shader_draw_parameters_features =
1473 lvl_find_in_chain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
1474 if (shader_draw_parameters_features) {
1475 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001476 }
1477 }
1478
locke-lunargd556cc32019-09-17 01:21:23 -06001479 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
1480 state_tracker->physical_device_count =
1481 device_group_ci && device_group_ci->physicalDeviceCount > 0 ? device_group_ci->physicalDeviceCount : 1;
1482
locke-lunargd556cc32019-09-17 01:21:23 -06001483 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
1484 if (exclusive_scissor_features) {
1485 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1486 }
1487
1488 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1489 if (shading_rate_image_features) {
1490 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1491 }
1492
1493 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1494 if (mesh_shader_features) {
1495 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1496 }
1497
1498 const auto *inline_uniform_block_features =
1499 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1500 if (inline_uniform_block_features) {
1501 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1502 }
1503
1504 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1505 if (transform_feedback_features) {
1506 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1507 }
1508
locke-lunargd556cc32019-09-17 01:21:23 -06001509 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1510 if (vtx_attrib_div_features) {
1511 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1512 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001513
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001514 const auto *buffer_device_address_ext = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
1515 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001516 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001517 }
1518
1519 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1520 if (cooperative_matrix_features) {
1521 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1522 }
1523
locke-lunargd556cc32019-09-17 01:21:23 -06001524 const auto *compute_shader_derivatives_features =
1525 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1526 if (compute_shader_derivatives_features) {
1527 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1528 }
1529
1530 const auto *fragment_shader_barycentric_features =
1531 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1532 if (fragment_shader_barycentric_features) {
1533 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1534 }
1535
1536 const auto *shader_image_footprint_features =
1537 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1538 if (shader_image_footprint_features) {
1539 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1540 }
1541
1542 const auto *fragment_shader_interlock_features =
1543 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1544 if (fragment_shader_interlock_features) {
1545 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1546 }
1547
1548 const auto *demote_to_helper_invocation_features =
1549 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1550 if (demote_to_helper_invocation_features) {
1551 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1552 }
1553
1554 const auto *texel_buffer_alignment_features =
1555 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1556 if (texel_buffer_alignment_features) {
1557 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1558 }
1559
locke-lunargd556cc32019-09-17 01:21:23 -06001560 const auto *pipeline_exe_props_features =
1561 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1562 if (pipeline_exe_props_features) {
1563 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1564 }
1565
Jeff Bolz82f854d2019-09-17 14:56:47 -05001566 const auto *dedicated_allocation_image_aliasing_features =
1567 lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
1568 if (dedicated_allocation_image_aliasing_features) {
1569 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1570 *dedicated_allocation_image_aliasing_features;
1571 }
1572
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001573 const auto *performance_query_features = lvl_find_in_chain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
1574 if (performance_query_features) {
1575 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1576 }
1577
Tobias Hector782bcde2019-11-28 16:19:42 +00001578 const auto *device_coherent_memory_features = lvl_find_in_chain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
1579 if (device_coherent_memory_features) {
1580 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1581 }
1582
sfricke-samsungcead0802020-01-30 22:20:10 -08001583 const auto *ycbcr_image_array_features = lvl_find_in_chain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
1584 if (ycbcr_image_array_features) {
1585 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1586 }
1587
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001588 const auto *ray_tracing_features = lvl_find_in_chain<VkPhysicalDeviceRayTracingFeaturesKHR>(pCreateInfo->pNext);
1589 if (ray_tracing_features) {
1590 state_tracker->enabled_features.ray_tracing_features = *ray_tracing_features;
1591 }
1592
Jeff Bolz165818a2020-05-08 11:19:03 -05001593 const auto *robustness2_features = lvl_find_in_chain<VkPhysicalDeviceRobustness2FeaturesEXT>(pCreateInfo->pNext);
1594 if (robustness2_features) {
1595 state_tracker->enabled_features.robustness2_features = *robustness2_features;
1596 }
1597
janharaldfredriksen-arm3b793772020-05-12 18:55:53 +02001598 const auto *fragment_density_map_features =
1599 lvl_find_in_chain<VkPhysicalDeviceFragmentDensityMapFeaturesEXT>(pCreateInfo->pNext);
1600 if (fragment_density_map_features) {
1601 state_tracker->enabled_features.fragment_density_map_features = *fragment_density_map_features;
1602 }
1603
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001604 const auto *fragment_density_map_features2 =
1605 lvl_find_in_chain<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT>(pCreateInfo->pNext);
1606 if (fragment_density_map_features2) {
1607 state_tracker->enabled_features.fragment_density_map2_features = *fragment_density_map_features2;
1608 }
1609
sfricke-samsung0c4a06f2020-06-27 01:24:32 -07001610 const auto *astc_decode_features = lvl_find_in_chain<VkPhysicalDeviceASTCDecodeFeaturesEXT>(pCreateInfo->pNext);
1611 if (astc_decode_features) {
1612 state_tracker->enabled_features.astc_decode_features = *astc_decode_features;
1613 }
1614
Tony-LunarG7337b312020-04-15 16:40:25 -06001615 const auto *custom_border_color_features = lvl_find_in_chain<VkPhysicalDeviceCustomBorderColorFeaturesEXT>(pCreateInfo->pNext);
1616 if (custom_border_color_features) {
1617 state_tracker->enabled_features.custom_border_color_features = *custom_border_color_features;
1618 }
1619
sfricke-samsungfd661d62020-05-16 00:57:27 -07001620 const auto *pipeline_creation_cache_control_features =
1621 lvl_find_in_chain<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT>(pCreateInfo->pNext);
1622 if (pipeline_creation_cache_control_features) {
1623 state_tracker->enabled_features.pipeline_creation_cache_control_features = *pipeline_creation_cache_control_features;
1624 }
1625
Piers Daniell39842ee2020-07-10 16:42:33 -06001626 const auto *extended_dynamic_state_features =
1627 lvl_find_in_chain<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT>(pCreateInfo->pNext);
1628 if (extended_dynamic_state_features) {
1629 state_tracker->enabled_features.extended_dynamic_state_features = *extended_dynamic_state_features;
1630 }
1631
locke-lunargd556cc32019-09-17 01:21:23 -06001632 // Store physical device properties and physical device mem limits into CoreChecks structs
1633 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1634 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001635 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1636 &state_tracker->phys_dev_props_core11);
1637 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1638 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001639
1640 const auto &dev_ext = state_tracker->device_extensions;
1641 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1642
1643 if (dev_ext.vk_khr_push_descriptor) {
1644 // Get the needed push_descriptor limits
1645 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1646 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1647 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1648 }
1649
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001650 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
1651 VkPhysicalDeviceDescriptorIndexingPropertiesEXT descriptor_indexing_prop;
1652 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1653 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1654 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1655 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1656 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1657 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1658 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1659 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1660 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1661 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1662 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1663 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1664 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1665 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1666 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1667 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1668 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1669 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1670 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1671 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1672 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1673 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1674 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1675 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1676 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1677 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1678 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
1679 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1680 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
1681 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
1682 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
1683 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
1684 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
1685 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
1686 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1687 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1688 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
1689 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
1690 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1691 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
1692 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
1693 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
1694 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
1695 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
1696 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
1697 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
1698 }
1699
locke-lunargd556cc32019-09-17 01:21:23 -06001700 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1701 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1702 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1703 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001704
1705 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
1706 VkPhysicalDeviceDepthStencilResolvePropertiesKHR depth_stencil_resolve_props;
1707 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
1708 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
1709 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
1710 depth_stencil_resolve_props.supportedStencilResolveModes;
1711 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
1712 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
1713 }
1714
locke-lunargd556cc32019-09-17 01:21:23 -06001715 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001716 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
1717 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing, &phys_dev_props->ray_tracing_propsKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06001718 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1719 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001720 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map_2, &phys_dev_props->fragment_density_map2_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001721 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
sfricke-samsung8f658d42020-05-03 20:12:24 -07001722 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_sample_locations, &phys_dev_props->sample_locations_props);
Tony-LunarG7337b312020-04-15 16:40:25 -06001723 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_custom_border_color, &phys_dev_props->custom_border_color_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001724
1725 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
1726 VkPhysicalDeviceTimelineSemaphorePropertiesKHR timeline_semaphore_props;
1727 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
1728 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
1729 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
1730 }
1731
1732 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
1733 VkPhysicalDeviceFloatControlsPropertiesKHR float_controls_props;
1734 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
1735 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
1736 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
1737 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
1738 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
1739 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
1740 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
1741 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
1742 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
1743 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
1744 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
1745 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
1746 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
1747 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
1748 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
1749 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
1750 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
1751 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
1752 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
1753 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
1754 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
1755 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07001756
locke-lunargd556cc32019-09-17 01:21:23 -06001757 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1758 // Get the needed cooperative_matrix properties
1759 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1760 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
1761 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1762 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1763
1764 uint32_t numCooperativeMatrixProperties = 0;
1765 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
1766 state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
1767 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
1768
1769 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
1770 state_tracker->cooperative_matrix_properties.data());
1771 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001772 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06001773 // Get the needed subgroup limits
1774 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
1775 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
1776 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1777
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001778 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
1779 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
1780 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
1781 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06001782 }
1783
1784 // Store queue family data
1785 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1786 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001787 const VkDeviceQueueCreateInfo &queue_create_info = pCreateInfo->pQueueCreateInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06001788 state_tracker->queue_family_index_map.insert(
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001789 std::make_pair(queue_create_info.queueFamilyIndex, queue_create_info.queueCount));
1790 state_tracker->queue_family_create_flags_map.insert(
1791 std::make_pair(queue_create_info.queueFamilyIndex, queue_create_info.flags));
locke-lunargd556cc32019-09-17 01:21:23 -06001792 }
1793 }
1794}
1795
1796void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1797 if (!device) return;
1798
locke-lunargd556cc32019-09-17 01:21:23 -06001799 // Reset all command buffers before destroying them, to unlink object_bindings.
1800 for (auto &commandBuffer : commandBufferMap) {
1801 ResetCommandBufferState(commandBuffer.first);
1802 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001803 pipelineMap.clear();
1804 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001805 commandBufferMap.clear();
1806
1807 // This will also delete all sets in the pool & remove them from setMap
1808 DeleteDescriptorSetPools();
1809 // All sets should be removed
1810 assert(setMap.empty());
1811 descriptorSetLayoutMap.clear();
1812 imageViewMap.clear();
1813 imageMap.clear();
1814 bufferViewMap.clear();
1815 bufferMap.clear();
1816 // Queues persist until device is destroyed
1817 queueMap.clear();
1818}
1819
1820// Loop through bound objects and increment their in_use counts.
1821void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
1822 for (auto obj : cb_node->object_bindings) {
1823 auto base_obj = GetStateStructPtrFromObject(obj);
1824 if (base_obj) {
1825 base_obj->in_use.fetch_add(1);
1826 }
1827 }
1828}
1829
1830// Track which resources are in-flight by atomically incrementing their "in_use" count
1831void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1832 cb_node->submitCount++;
1833 cb_node->in_use.fetch_add(1);
1834
1835 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
1836 IncrementBoundObjects(cb_node);
1837 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1838 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1839 // should then be flagged prior to calling this function
1840 for (auto event : cb_node->writeEventsBeforeWait) {
1841 auto event_state = GetEventState(event);
1842 if (event_state) event_state->write_in_use++;
1843 }
1844}
1845
1846// Decrement in-use count for objects bound to command buffer
1847void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
1848 BASE_NODE *base_obj = nullptr;
1849 for (auto obj : cb_node->object_bindings) {
1850 base_obj = GetStateStructPtrFromObject(obj);
1851 if (base_obj) {
1852 base_obj->in_use.fetch_sub(1);
1853 }
1854 }
1855}
1856
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001857void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
locke-lunargd556cc32019-09-17 01:21:23 -06001858 std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
1859
1860 // Roll this queue forward, one submission at a time.
1861 while (pQueue->seq < seq) {
1862 auto &submission = pQueue->submissions.front();
1863
1864 for (auto &wait : submission.waitSemaphores) {
1865 auto pSemaphore = GetSemaphoreState(wait.semaphore);
1866 if (pSemaphore) {
1867 pSemaphore->in_use.fetch_sub(1);
1868 }
1869 auto &lastSeq = otherQueueSeqs[wait.queue];
1870 lastSeq = std::max(lastSeq, wait.seq);
1871 }
1872
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001873 for (auto &signal : submission.signalSemaphores) {
1874 auto pSemaphore = GetSemaphoreState(signal.semaphore);
locke-lunargd556cc32019-09-17 01:21:23 -06001875 if (pSemaphore) {
1876 pSemaphore->in_use.fetch_sub(1);
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001877 if (pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && pSemaphore->payload < signal.payload) {
1878 pSemaphore->payload = signal.payload;
1879 }
locke-lunargd556cc32019-09-17 01:21:23 -06001880 }
1881 }
1882
1883 for (auto &semaphore : submission.externalSemaphores) {
1884 auto pSemaphore = GetSemaphoreState(semaphore);
1885 if (pSemaphore) {
1886 pSemaphore->in_use.fetch_sub(1);
1887 }
1888 }
1889
1890 for (auto cb : submission.cbs) {
1891 auto cb_node = GetCBState(cb);
1892 if (!cb_node) {
1893 continue;
1894 }
1895 // First perform decrement on general case bound objects
1896 DecrementBoundResources(cb_node);
1897 for (auto event : cb_node->writeEventsBeforeWait) {
1898 auto eventNode = eventMap.find(event);
1899 if (eventNode != eventMap.end()) {
1900 eventNode->second.write_in_use--;
1901 }
1902 }
Jeff Bolz310775c2019-10-09 00:46:33 -05001903 QueryMap localQueryToStateMap;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001904 VkQueryPool first_pool = VK_NULL_HANDLE;
Jeff Bolz310775c2019-10-09 00:46:33 -05001905 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001906 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &localQueryToStateMap);
Jeff Bolz310775c2019-10-09 00:46:33 -05001907 }
1908
1909 for (auto queryStatePair : localQueryToStateMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001910 if (queryStatePair.second == QUERYSTATE_ENDED) {
1911 queryToStateMap[queryStatePair.first] = QUERYSTATE_AVAILABLE;
1912 }
locke-lunargd556cc32019-09-17 01:21:23 -06001913 }
locke-lunargd556cc32019-09-17 01:21:23 -06001914 cb_node->in_use.fetch_sub(1);
1915 }
1916
1917 auto pFence = GetFenceState(submission.fence);
1918 if (pFence && pFence->scope == kSyncScopeInternal) {
1919 pFence->state = FENCE_RETIRED;
1920 }
1921
1922 pQueue->submissions.pop_front();
1923 pQueue->seq++;
1924 }
1925
1926 // Roll other queues forward to the highest seq we saw a wait for
1927 for (auto qs : otherQueueSeqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001928 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001929 }
1930}
1931
1932// Submit a fence to a queue, delimiting previous fences and previous untracked
1933// work by it.
1934static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1935 pFence->state = FENCE_INFLIGHT;
1936 pFence->signaler.first = pQueue->queue;
1937 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1938}
1939
1940void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1941 VkFence fence, VkResult result) {
Mark Lobodzinski09379db2020-05-07 08:22:01 -06001942 if (result != VK_SUCCESS) return;
locke-lunargd556cc32019-09-17 01:21:23 -06001943 uint64_t early_retire_seq = 0;
1944 auto pQueue = GetQueueState(queue);
1945 auto pFence = GetFenceState(fence);
1946
1947 if (pFence) {
1948 if (pFence->scope == kSyncScopeInternal) {
1949 // Mark fence in use
1950 SubmitFence(pQueue, pFence, std::max(1u, submitCount));
1951 if (!submitCount) {
1952 // If no submissions, but just dropping a fence on the end of the queue,
1953 // record an empty submission with just the fence, so we can determine
1954 // its completion.
1955 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001956 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001957 }
1958 } else {
1959 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1960 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1961 }
1962 }
1963
1964 // Now process each individual submit
1965 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1966 std::vector<VkCommandBuffer> cbs;
1967 const VkSubmitInfo *submit = &pSubmits[submit_idx];
1968 vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001969 vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06001970 vector<VkSemaphore> semaphore_externals;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001971 const uint64_t next_seq = pQueue->seq + pQueue->submissions.size() + 1;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001972 auto *timeline_semaphore_submit = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001973 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
1974 VkSemaphore semaphore = submit->pWaitSemaphores[i];
1975 auto pSemaphore = GetSemaphoreState(semaphore);
1976 if (pSemaphore) {
1977 if (pSemaphore->scope == kSyncScopeInternal) {
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001978 SEMAPHORE_WAIT wait;
1979 wait.semaphore = semaphore;
1980 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
1981 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1982 wait.queue = pSemaphore->signaler.first;
1983 wait.seq = pSemaphore->signaler.second;
1984 semaphore_waits.push_back(wait);
1985 pSemaphore->in_use.fetch_add(1);
1986 }
1987 pSemaphore->signaler.first = VK_NULL_HANDLE;
1988 pSemaphore->signaled = false;
1989 } else if (pSemaphore->payload < timeline_semaphore_submit->pWaitSemaphoreValues[i]) {
1990 wait.queue = queue;
1991 wait.seq = next_seq;
1992 wait.payload = timeline_semaphore_submit->pWaitSemaphoreValues[i];
1993 semaphore_waits.push_back(wait);
locke-lunargd556cc32019-09-17 01:21:23 -06001994 pSemaphore->in_use.fetch_add(1);
1995 }
locke-lunargd556cc32019-09-17 01:21:23 -06001996 } else {
1997 semaphore_externals.push_back(semaphore);
1998 pSemaphore->in_use.fetch_add(1);
1999 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
2000 pSemaphore->scope = kSyncScopeInternal;
2001 }
2002 }
2003 }
2004 }
2005 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
2006 VkSemaphore semaphore = submit->pSignalSemaphores[i];
2007 auto pSemaphore = GetSemaphoreState(semaphore);
2008 if (pSemaphore) {
2009 if (pSemaphore->scope == kSyncScopeInternal) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002010 SEMAPHORE_SIGNAL signal;
2011 signal.semaphore = semaphore;
2012 signal.seq = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002013 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
2014 pSemaphore->signaler.first = queue;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002015 pSemaphore->signaler.second = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002016 pSemaphore->signaled = true;
2017 } else {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002018 signal.payload = timeline_semaphore_submit->pSignalSemaphoreValues[i];
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002019 }
locke-lunargd556cc32019-09-17 01:21:23 -06002020 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002021 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06002022 } else {
2023 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002024 early_retire_seq = std::max(early_retire_seq, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002025 }
2026 }
2027 }
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002028 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
2029 uint32_t perf_pass = perf_submit ? perf_submit->counterPassIndex : 0;
2030
locke-lunargd556cc32019-09-17 01:21:23 -06002031 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
2032 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
2033 if (cb_node) {
2034 cbs.push_back(submit->pCommandBuffers[i]);
2035 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
2036 cbs.push_back(secondaryCmdBuffer->commandBuffer);
2037 IncrementResources(secondaryCmdBuffer);
2038 }
2039 IncrementResources(cb_node);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002040
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002041 VkQueryPool first_pool = VK_NULL_HANDLE;
2042 EventToStageMap localEventToStageMap;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002043 QueryMap localQueryToStateMap;
2044 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002045 function(nullptr, /*do_validate*/ false, first_pool, perf_pass, &localQueryToStateMap);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002046 }
2047
2048 for (auto queryStatePair : localQueryToStateMap) {
2049 queryToStateMap[queryStatePair.first] = queryStatePair.second;
2050 }
2051
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002052 for (auto &function : cb_node->eventUpdates) {
2053 function(nullptr, /*do_validate*/ false, &localEventToStageMap);
2054 }
2055
2056 for (auto eventStagePair : localEventToStageMap) {
2057 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
2058 }
locke-lunargd556cc32019-09-17 01:21:23 -06002059 }
2060 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002061
locke-lunargd556cc32019-09-17 01:21:23 -06002062 pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002063 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, perf_pass);
locke-lunargd556cc32019-09-17 01:21:23 -06002064 }
2065
2066 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002067 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002068 }
2069}
2070
2071void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
2072 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
2073 VkResult result) {
2074 if (VK_SUCCESS == result) {
2075 AddMemObjInfo(device, *pMemory, pAllocateInfo);
2076 }
2077 return;
2078}
2079
2080void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
2081 if (!mem) return;
2082 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
2083 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
2084
2085 // Clear mem binding for any bound objects
2086 for (const auto &obj : mem_info->obj_bindings) {
2087 BINDABLE *bindable_state = nullptr;
2088 switch (obj.type) {
2089 case kVulkanObjectTypeImage:
2090 bindable_state = GetImageState(obj.Cast<VkImage>());
2091 break;
2092 case kVulkanObjectTypeBuffer:
2093 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
2094 break;
2095 case kVulkanObjectTypeAccelerationStructureNV:
2096 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
2097 break;
2098
2099 default:
2100 // Should only have acceleration structure, buffer, or image objects bound to memory
2101 assert(0);
2102 }
2103
2104 if (bindable_state) {
Jeff Bolz41e29052020-03-29 22:33:55 -05002105 // Remove any sparse bindings bound to the resource that use this memory.
2106 for (auto it = bindable_state->sparse_bindings.begin(); it != bindable_state->sparse_bindings.end();) {
2107 auto nextit = it;
2108 nextit++;
2109
2110 auto &sparse_mem_binding = *it;
2111 if (sparse_mem_binding.mem_state.get() == mem_info) {
2112 bindable_state->sparse_bindings.erase(it);
2113 }
2114
2115 it = nextit;
2116 }
locke-lunargd556cc32019-09-17 01:21:23 -06002117 bindable_state->UpdateBoundMemorySet();
2118 }
2119 }
2120 // Any bound cmd buffers are now invalid
2121 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
2122 RemoveAliasingImages(mem_info->bound_images);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002123 mem_info->destroyed = true;
John Zulauf79952712020-04-07 11:25:54 -06002124 fake_memory.Free(mem_info->fake_base_address);
locke-lunargd556cc32019-09-17 01:21:23 -06002125 memObjMap.erase(mem);
2126}
2127
2128void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
2129 VkFence fence, VkResult result) {
2130 if (result != VK_SUCCESS) return;
2131 uint64_t early_retire_seq = 0;
2132 auto pFence = GetFenceState(fence);
2133 auto pQueue = GetQueueState(queue);
2134
2135 if (pFence) {
2136 if (pFence->scope == kSyncScopeInternal) {
2137 SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
2138 if (!bindInfoCount) {
2139 // No work to do, just dropping a fence in the queue by itself.
2140 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002141 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002142 }
2143 } else {
2144 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
2145 early_retire_seq = pQueue->seq + pQueue->submissions.size();
2146 }
2147 }
2148
2149 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
2150 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
2151 // Track objects tied to memory
2152 for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
2153 for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
2154 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07002155 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06002156 VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
2157 }
2158 }
2159 for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
2160 for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
2161 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07002162 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06002163 VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
2164 }
2165 }
2166 for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
2167 for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
2168 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
2169 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
2170 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
locke-lunargcf04d582019-11-26 00:31:50 -07002171 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, size,
locke-lunargd556cc32019-09-17 01:21:23 -06002172 VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
2173 }
2174 }
2175
2176 std::vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002177 std::vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06002178 std::vector<VkSemaphore> semaphore_externals;
2179 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
2180 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
2181 auto pSemaphore = GetSemaphoreState(semaphore);
2182 if (pSemaphore) {
2183 if (pSemaphore->scope == kSyncScopeInternal) {
2184 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
2185 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
2186 pSemaphore->in_use.fetch_add(1);
2187 }
2188 pSemaphore->signaler.first = VK_NULL_HANDLE;
2189 pSemaphore->signaled = false;
2190 } else {
2191 semaphore_externals.push_back(semaphore);
2192 pSemaphore->in_use.fetch_add(1);
2193 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
2194 pSemaphore->scope = kSyncScopeInternal;
2195 }
2196 }
2197 }
2198 }
2199 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
2200 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
2201 auto pSemaphore = GetSemaphoreState(semaphore);
2202 if (pSemaphore) {
2203 if (pSemaphore->scope == kSyncScopeInternal) {
2204 pSemaphore->signaler.first = queue;
2205 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
2206 pSemaphore->signaled = true;
2207 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002208
2209 SEMAPHORE_SIGNAL signal;
2210 signal.semaphore = semaphore;
2211 signal.seq = pSemaphore->signaler.second;
2212 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06002213 } else {
2214 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
2215 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
2216 }
2217 }
2218 }
2219
2220 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002221 bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002222 }
2223
2224 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002225 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002226 }
2227}
2228
2229void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
2230 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
2231 VkResult result) {
2232 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002233 auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002234 semaphore_state->signaler.first = VK_NULL_HANDLE;
2235 semaphore_state->signaler.second = 0;
2236 semaphore_state->signaled = false;
2237 semaphore_state->scope = kSyncScopeInternal;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002238 semaphore_state->type = VK_SEMAPHORE_TYPE_BINARY_KHR;
2239 semaphore_state->payload = 0;
2240 auto semaphore_type_create_info = lvl_find_in_chain<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo->pNext);
2241 if (semaphore_type_create_info) {
2242 semaphore_state->type = semaphore_type_create_info->semaphoreType;
2243 semaphore_state->payload = semaphore_type_create_info->initialValue;
2244 }
locke-lunargd556cc32019-09-17 01:21:23 -06002245 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
2246}
2247
2248void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
2249 VkSemaphoreImportFlagsKHR flags) {
2250 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
2251 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
2252 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
2253 sema_node->scope == kSyncScopeInternal) {
2254 sema_node->scope = kSyncScopeExternalTemporary;
2255 } else {
2256 sema_node->scope = kSyncScopeExternalPermanent;
2257 }
2258 }
2259}
2260
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002261void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR *pSignalInfo,
2262 VkResult result) {
2263 auto *pSemaphore = GetSemaphoreState(pSignalInfo->semaphore);
2264 pSemaphore->payload = pSignalInfo->value;
2265}
2266
locke-lunargd556cc32019-09-17 01:21:23 -06002267void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
2268 auto mem_info = GetDevMemState(mem);
2269 if (mem_info) {
2270 mem_info->mapped_range.offset = offset;
2271 mem_info->mapped_range.size = size;
2272 mem_info->p_driver_data = *ppData;
2273 }
2274}
2275
2276void ValidationStateTracker::RetireFence(VkFence fence) {
2277 auto pFence = GetFenceState(fence);
2278 if (pFence && pFence->scope == kSyncScopeInternal) {
2279 if (pFence->signaler.first != VK_NULL_HANDLE) {
2280 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002281 RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002282 } else {
2283 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
2284 // the fence as retired.
2285 pFence->state = FENCE_RETIRED;
2286 }
2287 }
2288}
2289
2290void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2291 VkBool32 waitAll, uint64_t timeout, VkResult result) {
2292 if (VK_SUCCESS != result) return;
2293
2294 // When we know that all fences are complete we can clean/remove their CBs
2295 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
2296 for (uint32_t i = 0; i < fenceCount; i++) {
2297 RetireFence(pFences[i]);
2298 }
2299 }
2300 // NOTE : Alternate case not handled here is when some fences have completed. In
2301 // this case for app to guarantee which fences completed it will have to call
2302 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
2303}
2304
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002305void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
2306 auto pSemaphore = GetSemaphoreState(semaphore);
2307 if (pSemaphore) {
2308 for (auto &pair : queueMap) {
2309 QUEUE_STATE &queueState = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002310 uint64_t max_seq = 0;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002311 for (const auto &submission : queueState.submissions) {
2312 for (const auto &signalSemaphore : submission.signalSemaphores) {
2313 if (signalSemaphore.semaphore == semaphore && signalSemaphore.payload <= until_payload) {
Tony-LunarG47d5e272020-04-07 15:35:55 -06002314 if (signalSemaphore.seq > max_seq) {
2315 max_seq = signalSemaphore.seq;
2316 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002317 }
2318 }
2319 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06002320 if (max_seq) {
2321 RetireWorkOnQueue(&queueState, max_seq);
2322 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002323 }
2324 }
2325}
2326
John Zulauff89de662020-04-13 18:57:34 -06002327void ValidationStateTracker::RecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2328 VkResult result) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002329 if (VK_SUCCESS != result) return;
2330
2331 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
2332 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
2333 }
2334}
2335
John Zulauff89de662020-04-13 18:57:34 -06002336void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2337 VkResult result) {
2338 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2339}
2340
2341void ValidationStateTracker::PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo,
2342 uint64_t timeout, VkResult result) {
2343 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2344}
2345
locke-lunargd556cc32019-09-17 01:21:23 -06002346void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
2347 if (VK_SUCCESS != result) return;
2348 RetireFence(fence);
2349}
2350
2351void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
2352 // Add queue to tracking set only if it is new
2353 auto queue_is_new = queues.emplace(queue);
2354 if (queue_is_new.second == true) {
2355 QUEUE_STATE *queue_state = &queueMap[queue];
2356 queue_state->queue = queue;
2357 queue_state->queueFamilyIndex = queue_family_index;
2358 queue_state->seq = 0;
2359 }
2360}
2361
2362void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2363 VkQueue *pQueue) {
2364 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2365}
2366
2367void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2368 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2369}
2370
2371void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2372 if (VK_SUCCESS != result) return;
2373 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002374 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002375}
2376
2377void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2378 if (VK_SUCCESS != result) return;
2379 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002380 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002381 }
2382}
2383
2384void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2385 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002386 auto fence_state = GetFenceState(fence);
2387 fence_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002388 fenceMap.erase(fence);
2389}
2390
2391void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2392 const VkAllocationCallbacks *pAllocator) {
2393 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002394 auto semaphore_state = GetSemaphoreState(semaphore);
2395 semaphore_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002396 semaphoreMap.erase(semaphore);
2397}
2398
2399void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2400 if (!event) return;
2401 EVENT_STATE *event_state = GetEventState(event);
2402 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
2403 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
2404 eventMap.erase(event);
2405}
2406
2407void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2408 const VkAllocationCallbacks *pAllocator) {
2409 if (!queryPool) return;
2410 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
2411 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
2412 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002413 qp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002414 queryPoolMap.erase(queryPool);
2415}
2416
2417// Object with given handle is being bound to memory w/ given mem_info struct.
2418// Track the newly bound memory range with given memoryOffset
2419// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
2420// and non-linear range incorrectly overlap.
locke-lunargd556cc32019-09-17 01:21:23 -06002421void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002422 VkDeviceSize memoryOffset) {
locke-lunargd556cc32019-09-17 01:21:23 -06002423 if (typed_handle.type == kVulkanObjectTypeImage) {
2424 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
2425 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002426 mem_info->bound_buffers.insert(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002427 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002428 mem_info->bound_acceleration_structures.insert(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002429 } else {
2430 // Unsupported object type
2431 assert(false);
2432 }
2433}
2434
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002435void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2436 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002437}
2438
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002439void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2440 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002441}
2442
2443void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002444 VkDeviceSize mem_offset) {
2445 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002446}
2447
2448// This function will remove the handle-to-index mapping from the appropriate map.
2449static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
2450 if (typed_handle.type == kVulkanObjectTypeImage) {
2451 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
2452 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002453 mem_info->bound_buffers.erase(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002454 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002455 mem_info->bound_acceleration_structures.erase(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002456 } else {
2457 // Unsupported object type
2458 assert(false);
2459 }
2460}
2461
2462void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
2463 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
2464}
2465
2466void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
2467 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
2468}
2469
2470void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
2471 RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
2472}
2473
2474void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2475 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2476 if (buffer_state) {
2477 // Track bound memory range information
2478 auto mem_info = GetDevMemState(mem);
2479 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002480 InsertBufferMemoryRange(buffer, mem_info, memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06002481 }
2482 // Track objects tied to memory
2483 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
2484 }
2485}
2486
2487void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2488 VkDeviceSize memoryOffset, VkResult result) {
2489 if (VK_SUCCESS != result) return;
2490 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2491}
2492
2493void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
2494 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2495 for (uint32_t i = 0; i < bindInfoCount; i++) {
2496 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2497 }
2498}
2499
2500void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
2501 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2502 for (uint32_t i = 0; i < bindInfoCount; i++) {
2503 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2504 }
2505}
2506
Spencer Fricke6c127102020-04-16 06:25:20 -07002507void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer) {
locke-lunargd556cc32019-09-17 01:21:23 -06002508 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2509 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002510 buffer_state->memory_requirements_checked = true;
2511 }
2512}
2513
2514void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2515 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002516 RecordGetBufferMemoryRequirementsState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002517}
2518
2519void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
2520 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2521 VkMemoryRequirements2KHR *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002522 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002523}
2524
2525void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
2526 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2527 VkMemoryRequirements2KHR *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002528 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002529}
2530
Spencer Fricke6c127102020-04-16 06:25:20 -07002531void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002532 const VkImagePlaneMemoryRequirementsInfo *plane_info =
2533 (pInfo == nullptr) ? nullptr : lvl_find_in_chain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002534 IMAGE_STATE *image_state = GetImageState(image);
2535 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002536 if (plane_info != nullptr) {
2537 // Multi-plane image
2538 image_state->memory_requirements_checked = false; // Each image plane needs to be checked itself
2539 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
2540 image_state->plane0_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002541 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
2542 image_state->plane1_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002543 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
2544 image_state->plane2_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002545 }
2546 } else {
2547 // Single Plane image
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002548 image_state->memory_requirements_checked = true;
2549 }
locke-lunargd556cc32019-09-17 01:21:23 -06002550 }
2551}
2552
2553void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2554 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002555 RecordGetImageMemoryRequirementsState(image, nullptr);
locke-lunargd556cc32019-09-17 01:21:23 -06002556}
2557
2558void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2559 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002560 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002561}
2562
2563void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2564 const VkImageMemoryRequirementsInfo2 *pInfo,
2565 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002566 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002567}
2568
2569static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2570 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2571 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2572 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2573 image_state->sparse_metadata_required = true;
2574 }
2575}
2576
2577void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2578 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2579 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2580 auto image_state = GetImageState(image);
2581 image_state->get_sparse_reqs_called = true;
2582 if (!pSparseMemoryRequirements) return;
2583 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2584 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2585 }
2586}
2587
2588void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
2589 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2590 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2591 auto image_state = GetImageState(pInfo->image);
2592 image_state->get_sparse_reqs_called = true;
2593 if (!pSparseMemoryRequirements) return;
2594 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2595 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2596 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2597 }
2598}
2599
2600void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
2601 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2602 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2603 auto image_state = GetImageState(pInfo->image);
2604 image_state->get_sparse_reqs_called = true;
2605 if (!pSparseMemoryRequirements) return;
2606 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2607 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2608 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2609 }
2610}
2611
2612void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2613 const VkAllocationCallbacks *pAllocator) {
2614 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002615 auto shader_module_state = GetShaderModuleState(shaderModule);
2616 shader_module_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002617 shaderModuleMap.erase(shaderModule);
2618}
2619
2620void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2621 const VkAllocationCallbacks *pAllocator) {
2622 if (!pipeline) return;
2623 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
2624 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
2625 // Any bound cmd buffers are now invalid
2626 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002627 pipeline_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002628 pipelineMap.erase(pipeline);
2629}
2630
2631void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2632 const VkAllocationCallbacks *pAllocator) {
2633 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002634 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
2635 pipeline_layout_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002636 pipelineLayoutMap.erase(pipelineLayout);
2637}
2638
2639void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2640 const VkAllocationCallbacks *pAllocator) {
2641 if (!sampler) return;
2642 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
2643 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
2644 // Any bound cmd buffers are now invalid
2645 if (sampler_state) {
2646 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
Yuly Novikov424cdd52020-05-26 16:45:12 -04002647
2648 if (sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2649 sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
2650 custom_border_color_sampler_count--;
2651 }
2652
2653 sampler_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002654 }
2655 samplerMap.erase(sampler);
2656}
2657
2658void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2659 const VkAllocationCallbacks *pAllocator) {
2660 if (!descriptorSetLayout) return;
2661 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2662 if (layout_it != descriptorSetLayoutMap.end()) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002663 layout_it->second.get()->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002664 descriptorSetLayoutMap.erase(layout_it);
2665 }
2666}
2667
2668void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2669 const VkAllocationCallbacks *pAllocator) {
2670 if (!descriptorPool) return;
2671 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
2672 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
2673 if (desc_pool_state) {
2674 // Any bound cmd buffers are now invalid
2675 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
2676 // Free sets that were in this pool
2677 for (auto ds : desc_pool_state->sets) {
2678 FreeDescriptorSet(ds);
2679 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002680 desc_pool_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002681 descriptorPoolMap.erase(descriptorPool);
2682 }
2683}
2684
2685// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2686void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2687 const VkCommandBuffer *command_buffers) {
2688 for (uint32_t i = 0; i < command_buffer_count; i++) {
John Zulaufd1f85d42020-04-15 12:23:15 -06002689 // Allow any derived class to clean up command buffer state
2690 if (command_buffer_free_callback) {
2691 (*command_buffer_free_callback)(command_buffers[i]);
2692 }
2693
locke-lunargd556cc32019-09-17 01:21:23 -06002694 auto cb_state = GetCBState(command_buffers[i]);
2695 // Remove references to command buffer's state and delete
2696 if (cb_state) {
2697 // reset prior to delete, removing various references to it.
2698 // TODO: fix this, it's insane.
2699 ResetCommandBufferState(cb_state->commandBuffer);
2700 // Remove the cb_state's references from COMMAND_POOL_STATEs
2701 pool_state->commandBuffers.erase(command_buffers[i]);
2702 // Remove the cb debug labels
2703 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
2704 // Remove CBState from CB map
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002705 cb_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002706 commandBufferMap.erase(cb_state->commandBuffer);
2707 }
2708 }
2709}
2710
2711void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2712 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
2713 auto pPool = GetCommandPoolState(commandPool);
2714 FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
2715}
2716
2717void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2718 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2719 VkResult result) {
2720 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002721 auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002722 cmd_pool_state->createFlags = pCreateInfo->flags;
2723 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
sfricke-samsung0c45edc2020-07-01 22:19:53 -07002724 cmd_pool_state->unprotected = ((pCreateInfo->flags & VK_COMMAND_POOL_CREATE_PROTECTED_BIT) == 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002725 commandPoolMap[*pCommandPool] = std::move(cmd_pool_state);
2726}
2727
2728void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2729 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2730 VkResult result) {
2731 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002732 auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002733 query_pool_state->createInfo = *pCreateInfo;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002734 query_pool_state->pool = *pQueryPool;
2735 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
2736 const auto *perf = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
2737 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
2738
2739 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2740 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2741 switch (counter.scope) {
2742 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
2743 query_pool_state->has_perf_scope_command_buffer = true;
2744 break;
2745 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
2746 query_pool_state->has_perf_scope_render_pass = true;
2747 break;
2748 default:
2749 break;
2750 }
2751 }
2752
2753 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
2754 &query_pool_state->n_performance_passes);
2755 }
2756
locke-lunargd556cc32019-09-17 01:21:23 -06002757 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
2758
2759 QueryObject query_obj{*pQueryPool, 0u};
2760 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2761 query_obj.query = i;
2762 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2763 }
2764}
2765
2766void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2767 const VkAllocationCallbacks *pAllocator) {
2768 if (!commandPool) return;
2769 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2770 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2771 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2772 if (cp_state) {
2773 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2774 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2775 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002776 cp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002777 commandPoolMap.erase(commandPool);
2778 }
2779}
2780
2781void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2782 VkCommandPoolResetFlags flags, VkResult result) {
2783 if (VK_SUCCESS != result) return;
2784 // Reset all of the CBs allocated from this pool
2785 auto command_pool_state = GetCommandPoolState(commandPool);
2786 for (auto cmdBuffer : command_pool_state->commandBuffers) {
2787 ResetCommandBufferState(cmdBuffer);
2788 }
2789}
2790
2791void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2792 VkResult result) {
2793 for (uint32_t i = 0; i < fenceCount; ++i) {
2794 auto pFence = GetFenceState(pFences[i]);
2795 if (pFence) {
2796 if (pFence->scope == kSyncScopeInternal) {
2797 pFence->state = FENCE_UNSIGNALED;
2798 } else if (pFence->scope == kSyncScopeExternalTemporary) {
2799 pFence->scope = kSyncScopeInternal;
2800 }
2801 }
2802 }
2803}
2804
Jeff Bolzadbfa852019-10-04 13:53:30 -05002805// For given cb_nodes, invalidate them and track object causing invalidation.
2806// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
2807// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
2808// can also unlink objects from command buffers.
2809void ValidationStateTracker::InvalidateCommandBuffers(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_nodes,
2810 const VulkanTypedHandle &obj, bool unlink) {
2811 for (const auto &cb_node_pair : cb_nodes) {
2812 auto &cb_node = cb_node_pair.first;
2813 if (cb_node->state == CB_RECORDING) {
2814 cb_node->state = CB_INVALID_INCOMPLETE;
2815 } else if (cb_node->state == CB_RECORDED) {
2816 cb_node->state = CB_INVALID_COMPLETE;
2817 }
2818 cb_node->broken_bindings.push_back(obj);
2819
2820 // if secondary, then propagate the invalidation to the primaries that will call us.
2821 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2822 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
2823 }
2824 if (unlink) {
2825 int index = cb_node_pair.second;
2826 assert(cb_node->object_bindings[index] == obj);
2827 cb_node->object_bindings[index] = VulkanTypedHandle();
2828 }
2829 }
2830 if (unlink) {
2831 cb_nodes.clear();
2832 }
2833}
2834
2835void ValidationStateTracker::InvalidateLinkedCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
2836 const VulkanTypedHandle &obj) {
locke-lunargd556cc32019-09-17 01:21:23 -06002837 for (auto cb_node : cb_nodes) {
2838 if (cb_node->state == CB_RECORDING) {
2839 cb_node->state = CB_INVALID_INCOMPLETE;
2840 } else if (cb_node->state == CB_RECORDED) {
2841 cb_node->state = CB_INVALID_COMPLETE;
2842 }
2843 cb_node->broken_bindings.push_back(obj);
2844
2845 // if secondary, then propagate the invalidation to the primaries that will call us.
2846 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002847 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
locke-lunargd556cc32019-09-17 01:21:23 -06002848 }
2849 }
2850}
2851
2852void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2853 const VkAllocationCallbacks *pAllocator) {
2854 if (!framebuffer) return;
2855 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
2856 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
2857 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002858 framebuffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002859 frameBufferMap.erase(framebuffer);
2860}
2861
2862void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2863 const VkAllocationCallbacks *pAllocator) {
2864 if (!renderPass) return;
2865 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
2866 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
2867 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002868 rp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002869 renderPassMap.erase(renderPass);
2870}
2871
2872void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2873 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2874 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002875 auto fence_state = std::make_shared<FENCE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002876 fence_state->fence = *pFence;
2877 fence_state->createInfo = *pCreateInfo;
2878 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
2879 fenceMap[*pFence] = std::move(fence_state);
2880}
2881
2882bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2883 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2884 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002885 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002886 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2887 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2888 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2889 cgpl_state->pipe_state.reserve(count);
2890 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002891 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002892 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002893 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002894 }
2895 return false;
2896}
2897
2898void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2899 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2900 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2901 VkResult result, void *cgpl_state_data) {
2902 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2903 // This API may create pipelines regardless of the return value
2904 for (uint32_t i = 0; i < count; i++) {
2905 if (pPipelines[i] != VK_NULL_HANDLE) {
2906 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2907 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2908 }
2909 }
2910 cgpl_state->pipe_state.clear();
2911}
2912
2913bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2914 const VkComputePipelineCreateInfo *pCreateInfos,
2915 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002916 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002917 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2918 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2919 ccpl_state->pipe_state.reserve(count);
2920 for (uint32_t i = 0; i < count; i++) {
2921 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002922 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002923 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002924 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002925 }
2926 return false;
2927}
2928
2929void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2930 const VkComputePipelineCreateInfo *pCreateInfos,
2931 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2932 VkResult result, void *ccpl_state_data) {
2933 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2934
2935 // This API may create pipelines regardless of the return value
2936 for (uint32_t i = 0; i < count; i++) {
2937 if (pPipelines[i] != VK_NULL_HANDLE) {
2938 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2939 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2940 }
2941 }
2942 ccpl_state->pipe_state.clear();
2943}
2944
2945bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2946 uint32_t count,
2947 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2948 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002949 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002950 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2951 crtpl_state->pipe_state.reserve(count);
2952 for (uint32_t i = 0; i < count; i++) {
2953 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002954 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002955 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002956 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002957 }
2958 return false;
2959}
2960
2961void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2962 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2963 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2964 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2965 // This API may create pipelines regardless of the return value
2966 for (uint32_t i = 0; i < count; i++) {
2967 if (pPipelines[i] != VK_NULL_HANDLE) {
2968 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2969 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2970 }
2971 }
2972 crtpl_state->pipe_state.clear();
2973}
2974
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002975bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkPipelineCache pipelineCache,
2976 uint32_t count,
2977 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2978 const VkAllocationCallbacks *pAllocator,
2979 VkPipeline *pPipelines, void *crtpl_state_data) const {
2980 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2981 crtpl_state->pipe_state.reserve(count);
2982 for (uint32_t i = 0; i < count; i++) {
2983 // Create and initialize internal tracking data structure
2984 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
2985 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
2986 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
2987 }
2988 return false;
2989}
2990
2991void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(
2992 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2993 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2994 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2995 // This API may create pipelines regardless of the return value
2996 for (uint32_t i = 0; i < count; i++) {
2997 if (pPipelines[i] != VK_NULL_HANDLE) {
2998 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2999 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
3000 }
3001 }
3002 crtpl_state->pipe_state.clear();
3003}
3004
locke-lunargd556cc32019-09-17 01:21:23 -06003005void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
3006 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
3007 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003008 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
Tony-LunarG7337b312020-04-15 16:40:25 -06003009 if (pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT || pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT)
3010 custom_border_color_sampler_count++;
locke-lunargd556cc32019-09-17 01:21:23 -06003011}
3012
3013void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
3014 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
3015 const VkAllocationCallbacks *pAllocator,
3016 VkDescriptorSetLayout *pSetLayout, VkResult result) {
3017 if (VK_SUCCESS != result) return;
3018 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
3019}
3020
3021// For repeatable sorting, not very useful for "memory in range" search
3022struct PushConstantRangeCompare {
3023 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
3024 if (lhs->offset == rhs->offset) {
3025 if (lhs->size == rhs->size) {
3026 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
3027 return lhs->stageFlags < rhs->stageFlags;
3028 }
3029 // If the offsets are the same then sorting by the end of range is useful for validation
3030 return lhs->size < rhs->size;
3031 }
3032 return lhs->offset < rhs->offset;
3033 }
3034};
3035
3036static PushConstantRangesDict push_constant_ranges_dict;
3037
3038PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
3039 if (!info->pPushConstantRanges) {
3040 // Hand back the empty entry (creating as needed)...
3041 return push_constant_ranges_dict.look_up(PushConstantRanges());
3042 }
3043
3044 // Sort the input ranges to ensure equivalent ranges map to the same id
3045 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
3046 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
3047 sorted.insert(info->pPushConstantRanges + i);
3048 }
3049
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07003050 PushConstantRanges ranges;
3051 ranges.reserve(sorted.size());
locke-lunargd556cc32019-09-17 01:21:23 -06003052 for (const auto range : sorted) {
3053 ranges.emplace_back(*range);
3054 }
3055 return push_constant_ranges_dict.look_up(std::move(ranges));
3056}
3057
3058// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
3059static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
3060
3061// Dictionary of canonical form of the "compatible for set" records
3062static PipelineLayoutCompatDict pipeline_layout_compat_dict;
3063
3064static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
3065 const PipelineLayoutSetLayoutsId set_layouts_id) {
3066 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
3067}
3068
3069void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
3070 const VkAllocationCallbacks *pAllocator,
3071 VkPipelineLayout *pPipelineLayout, VkResult result) {
3072 if (VK_SUCCESS != result) return;
3073
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003074 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003075 pipeline_layout_state->layout = *pPipelineLayout;
3076 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
3077 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
3078 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05003079 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003080 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
3081 }
3082
3083 // Get canonical form IDs for the "compatible for set" contents
3084 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
3085 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
3086 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
3087
3088 // Create table of "compatible for set N" cannonical forms for trivial accept validation
3089 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
3090 pipeline_layout_state->compat_for_set.emplace_back(
3091 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
3092 }
3093 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
3094}
3095
3096void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
3097 const VkAllocationCallbacks *pAllocator,
3098 VkDescriptorPool *pDescriptorPool, VkResult result) {
3099 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003100 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003101}
3102
3103void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
3104 VkDescriptorPoolResetFlags flags, VkResult result) {
3105 if (VK_SUCCESS != result) return;
3106 DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
3107 // TODO: validate flags
3108 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
3109 for (auto ds : pPool->sets) {
3110 FreeDescriptorSet(ds);
3111 }
3112 pPool->sets.clear();
3113 // Reset available count for each type and available sets for this pool
3114 for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
3115 pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
3116 }
3117 pPool->availableSets = pPool->maxSets;
3118}
3119
3120bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
3121 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003122 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003123 // Always update common data
3124 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
3125 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
3126 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
3127
3128 return false;
3129}
3130
3131// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
3132void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
3133 VkDescriptorSet *pDescriptorSets, VkResult result,
3134 void *ads_state_data) {
3135 if (VK_SUCCESS != result) return;
3136 // All the updates are contained in a single cvdescriptorset function
3137 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
3138 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
3139 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
3140}
3141
3142void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
3143 const VkDescriptorSet *pDescriptorSets) {
3144 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
3145 // Update available descriptor sets in pool
3146 pool_state->availableSets += count;
3147
3148 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
3149 for (uint32_t i = 0; i < count; ++i) {
3150 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
3151 auto descriptor_set = setMap[pDescriptorSets[i]].get();
3152 uint32_t type_index = 0, descriptor_count = 0;
3153 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
3154 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
3155 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
3156 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
3157 }
3158 FreeDescriptorSet(descriptor_set);
3159 pool_state->sets.erase(descriptor_set);
3160 }
3161 }
3162}
3163
3164void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
3165 const VkWriteDescriptorSet *pDescriptorWrites,
3166 uint32_t descriptorCopyCount,
3167 const VkCopyDescriptorSet *pDescriptorCopies) {
3168 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
3169 pDescriptorCopies);
3170}
3171
3172void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
3173 VkCommandBuffer *pCommandBuffer, VkResult result) {
3174 if (VK_SUCCESS != result) return;
Jeff Bolz6835fda2019-10-06 00:15:34 -05003175 auto pPool = GetCommandPoolShared(pCreateInfo->commandPool);
locke-lunargd556cc32019-09-17 01:21:23 -06003176 if (pPool) {
3177 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
3178 // Add command buffer to its commandPool map
3179 pPool->commandBuffers.insert(pCommandBuffer[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003180 auto pCB = std::make_shared<CMD_BUFFER_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003181 pCB->createInfo = *pCreateInfo;
Jeff Bolz6835fda2019-10-06 00:15:34 -05003182 pCB->command_pool = pPool;
sfricke-samsung0c45edc2020-07-01 22:19:53 -07003183 pCB->unprotected = pPool->unprotected;
locke-lunargd556cc32019-09-17 01:21:23 -06003184 // Add command buffer to map
3185 commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
3186 ResetCommandBufferState(pCommandBuffer[i]);
3187 }
3188 }
3189}
3190
3191// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
3192void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003193 AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003194 cb_state);
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -07003195 // If imageless fb, skip fb binding
3196 if (fb_state->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003197 const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
3198 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003199 auto view_state = GetAttachmentImageViewState(cb_state, fb_state, attachment);
locke-lunargd556cc32019-09-17 01:21:23 -06003200 if (view_state) {
3201 AddCommandBufferBindingImageView(cb_state, view_state);
3202 }
3203 }
3204}
3205
3206void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
3207 const VkCommandBufferBeginInfo *pBeginInfo) {
3208 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3209 if (!cb_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003210 if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
3211 // Secondary Command Buffer
3212 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
3213 if (pInfo) {
3214 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
3215 assert(pInfo->renderPass);
3216 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
3217 if (framebuffer) {
3218 // Connect this framebuffer and its children to this cmdBuffer
3219 AddFramebufferBinding(cb_state, framebuffer);
3220 }
3221 }
3222 }
3223 }
3224 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
3225 ResetCommandBufferState(commandBuffer);
3226 }
3227 // Set updated state here in case implicit reset occurs above
3228 cb_state->state = CB_RECORDING;
3229 cb_state->beginInfo = *pBeginInfo;
3230 if (cb_state->beginInfo.pInheritanceInfo) {
3231 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
3232 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
3233 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
3234 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
3235 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
locke-lunargaecf2152020-05-12 17:15:41 -06003236 cb_state->activeRenderPass = GetShared<RENDER_PASS_STATE>(cb_state->beginInfo.pInheritanceInfo->renderPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003237 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
locke-lunargaecf2152020-05-12 17:15:41 -06003238 if (cb_state->beginInfo.pInheritanceInfo->framebuffer) {
3239 cb_state->activeFramebuffer = GetShared<FRAMEBUFFER_STATE>(cb_state->beginInfo.pInheritanceInfo->framebuffer);
3240 if (cb_state->activeFramebuffer) cb_state->framebuffers.insert(cb_state->activeFramebuffer);
3241 }
locke-lunargd556cc32019-09-17 01:21:23 -06003242 }
3243 }
3244
3245 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
3246 if (chained_device_group_struct) {
3247 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
3248 } else {
3249 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
3250 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003251
3252 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06003253}
3254
3255void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
3256 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3257 if (!cb_state) return;
3258 // Cached validation is specific to a specific recording of a specific command buffer.
3259 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
3260 descriptor_set->ClearCachedValidation(cb_state);
3261 }
3262 cb_state->validated_descriptor_sets.clear();
3263 if (VK_SUCCESS == result) {
3264 cb_state->state = CB_RECORDED;
3265 }
3266}
3267
3268void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
3269 VkResult result) {
3270 if (VK_SUCCESS == result) {
3271 ResetCommandBufferState(commandBuffer);
3272 }
3273}
3274
3275CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
3276 // initially assume everything is static state
3277 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
3278
3279 if (ds) {
3280 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
3281 switch (ds->pDynamicStates[i]) {
3282 case VK_DYNAMIC_STATE_LINE_WIDTH:
3283 flags &= ~CBSTATUS_LINE_WIDTH_SET;
3284 break;
3285 case VK_DYNAMIC_STATE_DEPTH_BIAS:
3286 flags &= ~CBSTATUS_DEPTH_BIAS_SET;
3287 break;
3288 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
3289 flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
3290 break;
3291 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
3292 flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
3293 break;
3294 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
3295 flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
3296 break;
3297 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
3298 flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
3299 break;
3300 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
3301 flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
3302 break;
3303 case VK_DYNAMIC_STATE_SCISSOR:
3304 flags &= ~CBSTATUS_SCISSOR_SET;
3305 break;
3306 case VK_DYNAMIC_STATE_VIEWPORT:
3307 flags &= ~CBSTATUS_VIEWPORT_SET;
3308 break;
3309 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
3310 flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
3311 break;
3312 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
3313 flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
3314 break;
3315 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
3316 flags &= ~CBSTATUS_LINE_STIPPLE_SET;
3317 break;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003318 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
3319 flags &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
3320 break;
Piers Daniell39842ee2020-07-10 16:42:33 -06003321 case VK_DYNAMIC_STATE_CULL_MODE_EXT:
3322 flags &= ~CBSTATUS_CULL_MODE_SET;
3323 break;
3324 case VK_DYNAMIC_STATE_FRONT_FACE_EXT:
3325 flags &= ~CBSTATUS_FRONT_FACE_SET;
3326 break;
3327 case VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT:
3328 flags &= ~CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
3329 break;
3330 case VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT:
3331 flags &= ~CBSTATUS_VIEWPORT_WITH_COUNT_SET;
3332 break;
3333 case VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT:
3334 flags &= ~CBSTATUS_SCISSOR_WITH_COUNT_SET;
3335 break;
3336 case VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT:
3337 flags &= ~CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
3338 break;
3339 case VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT:
3340 flags &= ~CBSTATUS_DEPTH_TEST_ENABLE_SET;
3341 break;
3342 case VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT:
3343 flags &= ~CBSTATUS_DEPTH_WRITE_ENABLE_SET;
3344 break;
3345 case VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT:
3346 flags &= ~CBSTATUS_DEPTH_COMPARE_OP_SET;
3347 break;
3348 case VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT:
3349 flags &= ~CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
3350 break;
3351 case VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT:
3352 flags &= ~CBSTATUS_STENCIL_TEST_ENABLE_SET;
3353 break;
3354 case VK_DYNAMIC_STATE_STENCIL_OP_EXT:
3355 flags &= ~CBSTATUS_STENCIL_OP_SET;
3356 break;
locke-lunargd556cc32019-09-17 01:21:23 -06003357 default:
3358 break;
3359 }
3360 }
3361 }
3362
3363 return flags;
3364}
3365
3366// Validation cache:
3367// CV is the bottommost implementor of this extension. Don't pass calls down.
3368// utility function to set collective state for pipeline
3369void SetPipelineState(PIPELINE_STATE *pPipe) {
3370 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
3371 if (pPipe->graphicsPipelineCI.pColorBlendState) {
3372 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
3373 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
3374 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3375 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3376 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3377 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3378 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3379 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3380 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3381 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
3382 pPipe->blendConstantsEnabled = true;
3383 }
3384 }
3385 }
3386 }
sfricke-samsung8f658d42020-05-03 20:12:24 -07003387 // Check if sample location is enabled
3388 if (pPipe->graphicsPipelineCI.pMultisampleState) {
3389 const VkPipelineSampleLocationsStateCreateInfoEXT *sample_location_state =
3390 lvl_find_in_chain<VkPipelineSampleLocationsStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pMultisampleState->pNext);
3391 if (sample_location_state != nullptr) {
3392 pPipe->sample_location_enabled = sample_location_state->sampleLocationsEnable;
3393 }
3394 }
locke-lunargd556cc32019-09-17 01:21:23 -06003395}
3396
3397void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
3398 VkPipeline pipeline) {
3399 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3400 assert(cb_state);
3401
3402 auto pipe_state = GetPipelineState(pipeline);
3403 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
3404 cb_state->status &= ~cb_state->static_status;
3405 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
3406 cb_state->status |= cb_state->static_status;
3407 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003408 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout->layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003409 cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
3410 SetPipelineState(pipe_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003411 AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003412}
3413
3414void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3415 uint32_t viewportCount, const VkViewport *pViewports) {
3416 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3417 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
3418 cb_state->status |= CBSTATUS_VIEWPORT_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003419 cb_state->static_status &= ~CBSTATUS_VIEWPORT_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003420}
3421
3422void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
3423 uint32_t exclusiveScissorCount,
3424 const VkRect2D *pExclusiveScissors) {
3425 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3426 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
3427 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
3428 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003429 cb_state->static_status &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003430}
3431
3432void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3433 VkImageLayout imageLayout) {
3434 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3435
3436 if (imageView != VK_NULL_HANDLE) {
3437 auto view_state = GetImageViewState(imageView);
3438 AddCommandBufferBindingImageView(cb_state, view_state);
3439 }
3440}
3441
3442void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3443 uint32_t viewportCount,
3444 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3445 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3446 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3447 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3448 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003449 cb_state->static_status &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003450}
3451
3452void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3453 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3454 const VkAllocationCallbacks *pAllocator,
3455 VkAccelerationStructureNV *pAccelerationStructure,
3456 VkResult result) {
3457 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003458 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003459
3460 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3461 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
3462 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3463 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
3464 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3465 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3466
3467 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
3468 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3469 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
3470 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3471 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3472 &as_state->build_scratch_memory_requirements);
3473
3474 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
3475 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3476 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
3477 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3478 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3479 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003480 as_state->allocator = pAllocator;
locke-lunargd556cc32019-09-17 01:21:23 -06003481 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3482}
3483
Jeff Bolz95176d02020-04-01 00:36:16 -05003484void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3485 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3486 const VkAllocationCallbacks *pAllocator,
3487 VkAccelerationStructureKHR *pAccelerationStructure,
3488 VkResult result) {
3489 if (VK_SUCCESS != result) return;
3490 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
3491
3492 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3493 VkAccelerationStructureMemoryRequirementsInfoKHR as_memory_requirements_info = {};
3494 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3495 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR;
3496 as_memory_requirements_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3497 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3498 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &as_memory_requirements_info, &as_state->memory_requirements);
3499
3500 VkAccelerationStructureMemoryRequirementsInfoKHR scratch_memory_req_info = {};
3501 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3502 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR;
3503 scratch_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3504 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3505 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &scratch_memory_req_info,
3506 &as_state->build_scratch_memory_requirements);
3507
3508 VkAccelerationStructureMemoryRequirementsInfoKHR update_memory_req_info = {};
3509 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3510 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR;
3511 update_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3512 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3513 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &update_memory_req_info,
3514 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003515 as_state->allocator = pAllocator;
Jeff Bolz95176d02020-04-01 00:36:16 -05003516 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3517}
3518
locke-lunargd556cc32019-09-17 01:21:23 -06003519void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
3520 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
3521 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
3522 if (as_state != nullptr) {
3523 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
3524 as_state->memory_requirements = *pMemoryRequirements;
3525 as_state->memory_requirements_checked = true;
3526 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
3527 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
3528 as_state->build_scratch_memory_requirements_checked = true;
3529 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
3530 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
3531 as_state->update_scratch_memory_requirements_checked = true;
3532 }
3533 }
3534}
3535
Jeff Bolz95176d02020-04-01 00:36:16 -05003536void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryCommon(
3537 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result,
3538 bool isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003539 if (VK_SUCCESS != result) return;
3540 for (uint32_t i = 0; i < bindInfoCount; i++) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003541 const VkBindAccelerationStructureMemoryInfoKHR &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06003542
3543 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
3544 if (as_state) {
3545 // Track bound memory range information
3546 auto mem_info = GetDevMemState(info.memory);
3547 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07003548 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06003549 }
3550 // Track objects tied to memory
3551 SetMemBinding(info.memory, as_state, info.memoryOffset,
Jeff Bolz95176d02020-04-01 00:36:16 -05003552 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR));
locke-lunargd556cc32019-09-17 01:21:23 -06003553
3554 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05003555 // XXX TODO: Query device address for KHR extension
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003556 if (enabled[gpu_validation] && isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003557 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
3558 }
3559 }
3560 }
3561}
3562
Jeff Bolz95176d02020-04-01 00:36:16 -05003563void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
3564 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
3565 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, true);
3566}
3567
3568void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryKHR(
3569 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result) {
3570 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, false);
3571}
3572
locke-lunargd556cc32019-09-17 01:21:23 -06003573void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
3574 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
3575 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
3576 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3577 if (cb_state == nullptr) {
3578 return;
3579 }
3580
3581 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3582 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3583 if (dst_as_state != nullptr) {
3584 dst_as_state->built = true;
3585 dst_as_state->build_info.initialize(pInfo);
3586 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3587 }
3588 if (src_as_state != nullptr) {
3589 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3590 }
3591 cb_state->hasBuildAccelerationStructureCmd = true;
3592}
3593
3594void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
3595 VkAccelerationStructureNV dst,
3596 VkAccelerationStructureNV src,
3597 VkCopyAccelerationStructureModeNV mode) {
3598 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3599 if (cb_state) {
3600 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3601 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3602 if (dst_as_state != nullptr && src_as_state != nullptr) {
3603 dst_as_state->built = true;
3604 dst_as_state->build_info = src_as_state->build_info;
3605 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3606 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3607 }
3608 }
3609}
3610
Jeff Bolz95176d02020-04-01 00:36:16 -05003611void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
3612 VkAccelerationStructureKHR accelerationStructure,
3613 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06003614 if (!accelerationStructure) return;
3615 auto *as_state = GetAccelerationStructureState(accelerationStructure);
3616 if (as_state) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003617 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06003618 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
3619 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07003620 RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -06003621 }
3622 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003623 as_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003624 accelerationStructureMap.erase(accelerationStructure);
3625 }
3626}
3627
Jeff Bolz95176d02020-04-01 00:36:16 -05003628void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
3629 VkAccelerationStructureNV accelerationStructure,
3630 const VkAllocationCallbacks *pAllocator) {
3631 PreCallRecordDestroyAccelerationStructureKHR(device, accelerationStructure, pAllocator);
3632}
3633
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003634void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3635 uint32_t viewportCount,
3636 const VkViewportWScalingNV *pViewportWScalings) {
3637 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3638 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003639 cb_state->static_status &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003640}
3641
locke-lunargd556cc32019-09-17 01:21:23 -06003642void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
3643 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3644 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003645 cb_state->static_status &= ~CBSTATUS_LINE_WIDTH_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003646}
3647
3648void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
3649 uint16_t lineStipplePattern) {
3650 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3651 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003652 cb_state->static_status &= ~CBSTATUS_LINE_STIPPLE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003653}
3654
3655void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
3656 float depthBiasClamp, float depthBiasSlopeFactor) {
3657 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3658 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003659 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003660}
3661
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003662void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
3663 const VkRect2D *pScissors) {
3664 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3665 cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
3666 cb_state->status |= CBSTATUS_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003667 cb_state->static_status &= ~CBSTATUS_SCISSOR_SET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003668}
3669
locke-lunargd556cc32019-09-17 01:21:23 -06003670void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
3671 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3672 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003673 cb_state->static_status &= ~CBSTATUS_BLEND_CONSTANTS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003674}
3675
3676void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
3677 float maxDepthBounds) {
3678 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3679 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003680 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003681}
3682
3683void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3684 uint32_t compareMask) {
3685 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3686 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003687 cb_state->static_status &= ~CBSTATUS_STENCIL_READ_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003688}
3689
3690void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3691 uint32_t writeMask) {
3692 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3693 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003694 cb_state->static_status &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003695}
3696
3697void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3698 uint32_t reference) {
3699 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3700 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003701 cb_state->static_status &= ~CBSTATUS_STENCIL_REFERENCE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003702}
3703
3704// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
3705// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
3706// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
3707void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
3708 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
3709 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
3710 cvdescriptorset::DescriptorSet *push_descriptor_set,
3711 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
3712 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
3713 // Defensive
3714 assert(pipeline_layout);
3715 if (!pipeline_layout) return;
3716
3717 uint32_t required_size = first_set + set_count;
3718 const uint32_t last_binding_index = required_size - 1;
3719 assert(last_binding_index < pipeline_layout->compat_for_set.size());
3720
3721 // Some useful shorthand
3722 auto &last_bound = cb_state->lastBound[pipeline_bind_point];
3723 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
3724 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
3725
3726 // We need this three times in this function, but nowhere else
3727 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
3728 if (ds && ds->IsPushDescriptor()) {
3729 assert(ds == last_bound.push_descriptor_set.get());
3730 last_bound.push_descriptor_set = nullptr;
3731 return true;
3732 }
3733 return false;
3734 };
3735
3736 // Clean up the "disturbed" before and after the range to be set
3737 if (required_size < current_size) {
3738 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
3739 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
3740 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
3741 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
3742 }
3743 } else {
3744 // We're not disturbing past last, so leave the upper binding data alone.
3745 required_size = current_size;
3746 }
3747 }
3748
3749 // We resize if we need more set entries or if those past "last" are disturbed
3750 if (required_size != current_size) {
3751 last_bound.per_set.resize(required_size);
3752 }
3753
3754 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
3755 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
3756 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
3757 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3758 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
3759 last_bound.per_set[set_idx].dynamicOffsets.clear();
3760 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
3761 }
3762 }
3763
3764 // Now update the bound sets with the input sets
3765 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
3766 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
3767 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
3768 cvdescriptorset::DescriptorSet *descriptor_set =
3769 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
3770
3771 // Record binding (or push)
3772 if (descriptor_set != last_bound.push_descriptor_set.get()) {
3773 // Only cleanup the push descriptors if they aren't the currently used set.
3774 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3775 }
3776 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
3777 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
3778
3779 if (descriptor_set) {
3780 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
3781 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
3782 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
3783 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
3784 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
3785 input_dynamic_offsets = end_offset;
3786 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
3787 } else {
3788 last_bound.per_set[set_idx].dynamicOffsets.clear();
3789 }
3790 if (!descriptor_set->IsPushDescriptor()) {
3791 // Can't cache validation of push_descriptors
3792 cb_state->validated_descriptor_sets.insert(descriptor_set);
3793 }
3794 }
3795 }
3796}
3797
3798// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
3799void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
3800 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3801 uint32_t firstSet, uint32_t setCount,
3802 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
3803 const uint32_t *pDynamicOffsets) {
3804 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3805 auto pipeline_layout = GetPipelineLayout(layout);
3806
3807 // Resize binding arrays
3808 uint32_t last_set_index = firstSet + setCount - 1;
3809 if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
3810 cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
3811 }
3812
3813 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
3814 dynamicOffsetCount, pDynamicOffsets);
3815 cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
3816 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3817}
3818
3819void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
3820 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
3821 const VkWriteDescriptorSet *pDescriptorWrites) {
3822 const auto &pipeline_layout = GetPipelineLayout(layout);
3823 // Short circuit invalid updates
3824 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
3825 !pipeline_layout->set_layouts[set]->IsPushDescriptor())
3826 return;
3827
3828 // We need a descriptor set to update the bindings with, compatible with the passed layout
3829 const auto dsl = pipeline_layout->set_layouts[set];
3830 auto &last_bound = cb_state->lastBound[pipelineBindPoint];
3831 auto &push_descriptor_set = last_bound.push_descriptor_set;
3832 // If we are disturbing the current push_desriptor_set clear it
3833 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
John Zulaufd2c3dae2019-12-12 11:02:17 -07003834 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003835 }
3836
3837 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3838 nullptr);
3839 last_bound.pipeline_layout = layout;
3840
3841 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003842 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003843}
3844
3845void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3846 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3847 uint32_t set, uint32_t descriptorWriteCount,
3848 const VkWriteDescriptorSet *pDescriptorWrites) {
3849 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3850 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3851}
3852
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003853void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3854 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3855 const void *pValues) {
3856 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3857 if (cb_state != nullptr) {
3858 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3859
3860 auto &push_constant_data = cb_state->push_constant_data;
3861 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3862 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
3863 }
3864}
3865
locke-lunargd556cc32019-09-17 01:21:23 -06003866void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3867 VkIndexType indexType) {
3868 auto buffer_state = GetBufferState(buffer);
3869 auto cb_state = GetCBState(commandBuffer);
3870
3871 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
Piers Daniell39842ee2020-07-10 16:42:33 -06003872 cb_state->static_status &= ~CBSTATUS_INDEX_BUFFER_BOUND;
locke-lunargd556cc32019-09-17 01:21:23 -06003873 cb_state->index_buffer_binding.buffer = buffer;
3874 cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
3875 cb_state->index_buffer_binding.offset = offset;
3876 cb_state->index_buffer_binding.index_type = indexType;
3877 // Add binding for this index buffer to this commandbuffer
3878 AddCommandBufferBindingBuffer(cb_state, buffer_state);
3879}
3880
3881void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3882 uint32_t bindingCount, const VkBuffer *pBuffers,
3883 const VkDeviceSize *pOffsets) {
3884 auto cb_state = GetCBState(commandBuffer);
3885
3886 uint32_t end = firstBinding + bindingCount;
3887 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3888 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3889 }
3890
3891 for (uint32_t i = 0; i < bindingCount; ++i) {
3892 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
3893 vertex_buffer_binding.buffer = pBuffers[i];
3894 vertex_buffer_binding.offset = pOffsets[i];
Piers Daniell39842ee2020-07-10 16:42:33 -06003895 vertex_buffer_binding.size = VK_WHOLE_SIZE;
3896 vertex_buffer_binding.stride = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06003897 // Add binding for this vertex buffer to this commandbuffer
Jeff Bolz165818a2020-05-08 11:19:03 -05003898 if (pBuffers[i]) {
3899 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
3900 }
locke-lunargd556cc32019-09-17 01:21:23 -06003901 }
3902}
3903
3904void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3905 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
3906 auto cb_state = GetCBState(commandBuffer);
3907 auto dst_buffer_state = GetBufferState(dstBuffer);
3908
3909 // Update bindings between buffer and cmd buffer
3910 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
3911}
3912
Jeff Bolz310775c2019-10-09 00:46:33 -05003913bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags stageMask,
3914 EventToStageMap *localEventToStageMap) {
3915 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003916 return false;
3917}
3918
3919void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3920 VkPipelineStageFlags stageMask) {
3921 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3922 auto event_state = GetEventState(event);
3923 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003924 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003925 }
3926 cb_state->events.push_back(event);
3927 if (!cb_state->waitedEvents.count(event)) {
3928 cb_state->writeEventsBeforeWait.push_back(event);
3929 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003930 cb_state->eventUpdates.emplace_back(
3931 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3932 return SetEventStageMask(event, stageMask, localEventToStageMap);
3933 });
locke-lunargd556cc32019-09-17 01:21:23 -06003934}
3935
3936void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3937 VkPipelineStageFlags stageMask) {
3938 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3939 auto event_state = GetEventState(event);
3940 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003941 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003942 }
3943 cb_state->events.push_back(event);
3944 if (!cb_state->waitedEvents.count(event)) {
3945 cb_state->writeEventsBeforeWait.push_back(event);
3946 }
3947
3948 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003949 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
3950 return SetEventStageMask(event, VkPipelineStageFlags(0), localEventToStageMap);
3951 });
locke-lunargd556cc32019-09-17 01:21:23 -06003952}
3953
3954void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3955 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3956 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3957 uint32_t bufferMemoryBarrierCount,
3958 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3959 uint32_t imageMemoryBarrierCount,
3960 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3961 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3962 for (uint32_t i = 0; i < eventCount; ++i) {
3963 auto event_state = GetEventState(pEvents[i]);
3964 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003965 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
3966 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003967 }
3968 cb_state->waitedEvents.insert(pEvents[i]);
3969 cb_state->events.push_back(pEvents[i]);
3970 }
3971}
3972
Jeff Bolz310775c2019-10-09 00:46:33 -05003973bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
3974 (*localQueryToStateMap)[object] = value;
3975 return false;
3976}
3977
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003978bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
3979 QueryState value, QueryMap *localQueryToStateMap) {
Jeff Bolz310775c2019-10-09 00:46:33 -05003980 for (uint32_t i = 0; i < queryCount; i++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003981 QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
Jeff Bolz310775c2019-10-09 00:46:33 -05003982 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06003983 }
3984 return false;
3985}
3986
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003987QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex,
3988 uint32_t perfPass) const {
3989 QueryObject query = QueryObject(QueryObject(queryPool, queryIndex), perfPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003990
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003991 auto iter = localQueryToStateMap->find(query);
3992 if (iter != localQueryToStateMap->end()) return iter->second;
Jeff Bolz310775c2019-10-09 00:46:33 -05003993
Jeff Bolz310775c2019-10-09 00:46:33 -05003994 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06003995}
3996
3997void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003998 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003999 cb_state->activeQueries.insert(query_obj);
4000 cb_state->startedQueries.insert(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004001 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
4002 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4003 QueryMap *localQueryToStateMap) {
4004 SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
4005 return false;
4006 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004007 auto pool_state = GetQueryPoolState(query_obj.pool);
4008 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
4009 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004010}
4011
4012void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
4013 VkFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004014 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004015 QueryObject query = {queryPool, slot};
4016 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4017 RecordCmdBeginQuery(cb_state, query);
4018}
4019
4020void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004021 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004022 cb_state->activeQueries.erase(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004023 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
4024 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4025 QueryMap *localQueryToStateMap) {
4026 return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
4027 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004028 auto pool_state = GetQueryPoolState(query_obj.pool);
4029 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
4030 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004031}
4032
4033void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004034 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004035 QueryObject query_obj = {queryPool, slot};
4036 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4037 RecordCmdEndQuery(cb_state, query_obj);
4038}
4039
4040void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4041 uint32_t firstQuery, uint32_t queryCount) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004042 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004043 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4044
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02004045 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
4046 QueryObject query = {queryPool, slot};
4047 cb_state->resetQueries.insert(query);
4048 }
4049
Jeff Bolz310775c2019-10-09 00:46:33 -05004050 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004051 bool do_validate, VkQueryPool &firstPerfQueryPool,
4052 uint32_t perfQueryPass,
4053 QueryMap *localQueryToStateMap) {
4054 return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06004055 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004056 auto pool_state = GetQueryPoolState(queryPool);
4057 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004058 cb_state);
4059}
4060
4061void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4062 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
4063 VkDeviceSize dstOffset, VkDeviceSize stride,
4064 VkQueryResultFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004065 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004066 auto cb_state = GetCBState(commandBuffer);
4067 auto dst_buff_state = GetBufferState(dstBuffer);
4068 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05004069 auto pool_state = GetQueryPoolState(queryPool);
4070 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004071 cb_state);
4072}
4073
4074void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
4075 VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004076 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004077 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeff Bolzadbfa852019-10-04 13:53:30 -05004078 auto pool_state = GetQueryPoolState(queryPool);
4079 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004080 cb_state);
4081 QueryObject query = {queryPool, slot};
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004082 cb_state->queryUpdates.emplace_back([query](const ValidationStateTracker *device_data, bool do_validate,
4083 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4084 QueryMap *localQueryToStateMap) {
4085 return SetQueryState(QueryObject(query, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
4086 });
locke-lunargd556cc32019-09-17 01:21:23 -06004087}
4088
4089void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
4090 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
4091 VkResult result) {
4092 if (VK_SUCCESS != result) return;
4093 // Shadow create info and store in map
Jeff Bolz6ae39612019-10-11 20:57:36 -05004094 auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
locke-lunargd556cc32019-09-17 01:21:23 -06004095
4096 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
4097 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
4098 VkImageView view = pCreateInfo->pAttachments[i];
4099 auto view_state = GetImageViewState(view);
4100 if (!view_state) {
4101 continue;
4102 }
4103 }
4104 }
4105 frameBufferMap[*pFramebuffer] = std::move(fb_state);
4106}
4107
4108void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4109 RENDER_PASS_STATE *render_pass) {
4110 auto &subpass_to_node = render_pass->subpassToNode;
4111 subpass_to_node.resize(pCreateInfo->subpassCount);
4112 auto &self_dependencies = render_pass->self_dependencies;
4113 self_dependencies.resize(pCreateInfo->subpassCount);
John Zulauf4aff5d92020-02-21 08:29:35 -07004114 auto &subpass_dependencies = render_pass->subpass_dependencies;
4115 subpass_dependencies.resize(pCreateInfo->subpassCount);
locke-lunargd556cc32019-09-17 01:21:23 -06004116
4117 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
4118 subpass_to_node[i].pass = i;
4119 self_dependencies[i].clear();
John Zulauf4aff5d92020-02-21 08:29:35 -07004120 subpass_dependencies[i].pass = i;
locke-lunargd556cc32019-09-17 01:21:23 -06004121 }
4122 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
4123 const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
John Zulauf4aff5d92020-02-21 08:29:35 -07004124 const auto srcSubpass = dependency.srcSubpass;
4125 const auto dstSubpass = dependency.dstSubpass;
locke-lunargd556cc32019-09-17 01:21:23 -06004126 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
4127 if (dependency.srcSubpass == dependency.dstSubpass) {
4128 self_dependencies[dependency.srcSubpass].push_back(i);
4129 } else {
4130 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
4131 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
4132 }
4133 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004134 if (srcSubpass == VK_SUBPASS_EXTERNAL) {
4135 assert(dstSubpass != VK_SUBPASS_EXTERNAL); // this is invalid per VUID-VkSubpassDependency-srcSubpass-00865
4136 subpass_dependencies[dstSubpass].barrier_from_external = &dependency;
4137 } else if (dstSubpass == VK_SUBPASS_EXTERNAL) {
4138 subpass_dependencies[srcSubpass].barrier_to_external = &dependency;
4139 } else if (dependency.srcSubpass != dependency.dstSubpass) {
4140 // ignore self dependencies in prev and next
4141 subpass_dependencies[srcSubpass].next.emplace_back(&dependency, &subpass_dependencies[dstSubpass]);
4142 subpass_dependencies[dstSubpass].prev.emplace_back(&dependency, &subpass_dependencies[srcSubpass]);
4143 }
4144 }
4145
4146 //
4147 // Determine "asynchrononous" subpassess
4148 // syncronization is only interested in asyncronous stages *earlier* that the current one... so we'll only look towards those.
4149 // NOTE: This is O(N^3), which we could shrink to O(N^2logN) using sets instead of arrays, but given that N is likely to be
4150 // small and the K for |= from the prev is must less than for set, we'll accept the brute force.
4151 std::vector<std::vector<bool>> pass_depends(pCreateInfo->subpassCount);
4152 for (uint32_t i = 1; i < pCreateInfo->subpassCount; ++i) {
4153 auto &depends = pass_depends[i];
4154 depends.resize(i);
4155 auto &subpass_dep = subpass_dependencies[i];
4156 for (const auto &prev : subpass_dep.prev) {
4157 const auto prev_pass = prev.node->pass;
4158 const auto &prev_depends = pass_depends[prev_pass];
4159 for (uint32_t j = 0; j < prev_pass; j++) {
4160 depends[j] = depends[j] | prev_depends[j];
4161 }
4162 depends[prev_pass] = true;
4163 }
4164 for (uint32_t pass = 0; pass < subpass_dep.pass; pass++) {
4165 if (!depends[pass]) {
4166 subpass_dep.async.push_back(pass);
4167 }
4168 }
locke-lunargd556cc32019-09-17 01:21:23 -06004169 }
4170}
4171
John Zulauf4aff5d92020-02-21 08:29:35 -07004172static VkSubpassDependency2 ImplicitDependencyFromExternal(uint32_t subpass) {
4173 VkSubpassDependency2 from_external = {VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
4174 nullptr,
4175 VK_SUBPASS_EXTERNAL,
4176 subpass,
4177 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
4178 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
4179 0,
4180 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
4181 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
4182 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
4183 0,
4184 0};
4185 return from_external;
4186}
4187
4188static VkSubpassDependency2 ImplicitDependencyToExternal(uint32_t subpass) {
4189 VkSubpassDependency2 to_external = {VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
4190 nullptr,
4191 subpass,
4192 VK_SUBPASS_EXTERNAL,
4193 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
4194 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
4195 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
4196 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
4197 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
4198 0,
4199 0,
4200 0};
4201 return to_external;
4202}
4203
locke-lunargd556cc32019-09-17 01:21:23 -06004204void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
4205 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
4206 VkRenderPass *pRenderPass) {
4207 render_pass->renderPass = *pRenderPass;
4208 auto create_info = render_pass->createInfo.ptr();
4209
4210 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
4211
John Zulauf8863c332020-03-20 10:34:33 -06004212 struct AttachmentTracker { // This is really only of local interest, but a bit big for a lambda
4213 RENDER_PASS_STATE *const rp;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004214 std::vector<uint32_t> &first;
John Zulauf1507ee42020-05-18 11:33:09 -06004215 std::vector<bool> &first_is_transition;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004216 std::vector<uint32_t> &last;
John Zulauf8863c332020-03-20 10:34:33 -06004217 std::vector<std::vector<RENDER_PASS_STATE::AttachmentTransition>> &subpass_transitions;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004218 std::unordered_map<uint32_t, bool> &first_read;
4219 const uint32_t attachment_count;
John Zulauf8863c332020-03-20 10:34:33 -06004220 std::vector<VkImageLayout> attachment_layout;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004221 std::vector<std::vector<VkImageLayout>> subpass_attachment_layout;
John Zulauf8863c332020-03-20 10:34:33 -06004222 AttachmentTracker(std::shared_ptr<RENDER_PASS_STATE> &render_pass)
4223 : rp(render_pass.get()),
4224 first(rp->attachment_first_subpass),
John Zulauf1507ee42020-05-18 11:33:09 -06004225 first_is_transition(rp->attachment_first_is_transition),
John Zulauf8863c332020-03-20 10:34:33 -06004226 last(rp->attachment_last_subpass),
4227 subpass_transitions(rp->subpass_transitions),
4228 first_read(rp->attachment_first_read),
4229 attachment_count(rp->createInfo.attachmentCount),
John Zulauf2bc1fde2020-04-24 15:09:51 -06004230 attachment_layout(),
4231 subpass_attachment_layout() {
John Zulaufbb9f07f2020-03-19 16:53:06 -06004232 first.resize(attachment_count, VK_SUBPASS_EXTERNAL);
John Zulauf1507ee42020-05-18 11:33:09 -06004233 first_is_transition.resize(attachment_count, false);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004234 last.resize(attachment_count, VK_SUBPASS_EXTERNAL);
John Zulauf8863c332020-03-20 10:34:33 -06004235 subpass_transitions.resize(rp->createInfo.subpassCount + 1); // Add an extra for EndRenderPass
4236 attachment_layout.reserve(attachment_count);
John Zulauf2bc1fde2020-04-24 15:09:51 -06004237 subpass_attachment_layout.resize(rp->createInfo.subpassCount);
4238 for (auto &subpass_layouts : subpass_attachment_layout) {
4239 subpass_layouts.resize(attachment_count, kInvalidLayout);
4240 }
4241
John Zulauf8863c332020-03-20 10:34:33 -06004242 for (uint32_t j = 0; j < attachment_count; j++) {
4243 attachment_layout.push_back(rp->createInfo.pAttachments[j].initialLayout);
4244 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004245 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004246
John Zulaufbb9f07f2020-03-19 16:53:06 -06004247 void Update(uint32_t subpass, const VkAttachmentReference2 *attach_ref, uint32_t count, bool is_read) {
4248 if (nullptr == attach_ref) return;
4249 for (uint32_t j = 0; j < count; ++j) {
4250 const auto attachment = attach_ref[j].attachment;
4251 if (attachment != VK_ATTACHMENT_UNUSED) {
John Zulauf8863c332020-03-20 10:34:33 -06004252 const auto layout = attach_ref[j].layout;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004253 // Take advantage of the fact that insert won't overwrite, so we'll only write the first time.
4254 first_read.insert(std::make_pair(attachment, is_read));
John Zulauf2bc1fde2020-04-24 15:09:51 -06004255 if (first[attachment] == VK_SUBPASS_EXTERNAL) {
4256 first[attachment] = subpass;
4257 const auto initial_layout = rp->createInfo.pAttachments[attachment].initialLayout;
John Zulauf1507ee42020-05-18 11:33:09 -06004258 if (initial_layout != layout) {
4259 subpass_transitions[subpass].emplace_back(VK_SUBPASS_EXTERNAL, attachment, initial_layout, layout);
4260 first_is_transition[attachment] = true;
4261 }
John Zulauf2bc1fde2020-04-24 15:09:51 -06004262 }
John Zulaufbb9f07f2020-03-19 16:53:06 -06004263 last[attachment] = subpass;
John Zulauf8863c332020-03-20 10:34:33 -06004264
John Zulauf2bc1fde2020-04-24 15:09:51 -06004265 for (const auto &prev : rp->subpass_dependencies[subpass].prev) {
4266 const auto prev_pass = prev.node->pass;
4267 const auto prev_layout = subpass_attachment_layout[prev_pass][attachment];
4268 if ((prev_layout != kInvalidLayout) && (prev_layout != layout)) {
4269 subpass_transitions[subpass].emplace_back(prev_pass, attachment, prev_layout, layout);
4270 }
John Zulauf8863c332020-03-20 10:34:33 -06004271 }
John Zulauf2bc1fde2020-04-24 15:09:51 -06004272 attachment_layout[attachment] = layout;
John Zulauf8863c332020-03-20 10:34:33 -06004273 }
4274 }
4275 }
4276 void FinalTransitions() {
4277 auto &final_transitions = subpass_transitions[rp->createInfo.subpassCount];
4278
4279 for (uint32_t attachment = 0; attachment < attachment_count; ++attachment) {
4280 const auto final_layout = rp->createInfo.pAttachments[attachment].finalLayout;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004281 // Add final transitions for attachments that were used and change layout.
4282 if ((last[attachment] != VK_SUBPASS_EXTERNAL) && final_layout != attachment_layout[attachment]) {
4283 final_transitions.emplace_back(last[attachment], attachment, attachment_layout[attachment], final_layout);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004284 }
locke-lunargd556cc32019-09-17 01:21:23 -06004285 }
4286 }
John Zulaufbb9f07f2020-03-19 16:53:06 -06004287 };
John Zulauf8863c332020-03-20 10:34:33 -06004288 AttachmentTracker attachment_tracker(render_pass);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004289
4290 for (uint32_t subpass_index = 0; subpass_index < create_info->subpassCount; ++subpass_index) {
4291 const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[subpass_index];
John Zulauf8863c332020-03-20 10:34:33 -06004292 attachment_tracker.Update(subpass_index, subpass.pColorAttachments, subpass.colorAttachmentCount, false);
4293 attachment_tracker.Update(subpass_index, subpass.pResolveAttachments, subpass.colorAttachmentCount, false);
4294 attachment_tracker.Update(subpass_index, subpass.pDepthStencilAttachment, 1, false);
4295 attachment_tracker.Update(subpass_index, subpass.pInputAttachments, subpass.inputAttachmentCount, true);
John Zulauf4aff5d92020-02-21 08:29:35 -07004296 }
John Zulauf8863c332020-03-20 10:34:33 -06004297 attachment_tracker.FinalTransitions();
John Zulauf4aff5d92020-02-21 08:29:35 -07004298
John Zulaufbb9f07f2020-03-19 16:53:06 -06004299 // Add implicit dependencies
John Zulauf8863c332020-03-20 10:34:33 -06004300 for (uint32_t attachment = 0; attachment < attachment_tracker.attachment_count; attachment++) {
4301 const auto first_use = attachment_tracker.first[attachment];
John Zulauf4aff5d92020-02-21 08:29:35 -07004302 if (first_use != VK_SUBPASS_EXTERNAL) {
4303 auto &subpass_dep = render_pass->subpass_dependencies[first_use];
4304 if (!subpass_dep.barrier_from_external) {
4305 // Add implicit from barrier
4306 subpass_dep.implicit_barrier_from_external.reset(
4307 new VkSubpassDependency2(ImplicitDependencyFromExternal(first_use)));
4308 subpass_dep.barrier_from_external = subpass_dep.implicit_barrier_from_external.get();
4309 }
4310 }
4311
John Zulauf8863c332020-03-20 10:34:33 -06004312 const auto last_use = attachment_tracker.last[attachment];
John Zulauf4aff5d92020-02-21 08:29:35 -07004313 if (last_use != VK_SUBPASS_EXTERNAL) {
4314 auto &subpass_dep = render_pass->subpass_dependencies[last_use];
4315 if (!render_pass->subpass_dependencies[last_use].barrier_to_external) {
4316 // Add implicit to barrier
4317 subpass_dep.implicit_barrier_to_external.reset(new VkSubpassDependency2(ImplicitDependencyToExternal(last_use)));
4318 subpass_dep.barrier_to_external = subpass_dep.implicit_barrier_to_external.get();
4319 }
locke-lunargd556cc32019-09-17 01:21:23 -06004320 }
4321 }
4322
4323 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
4324 renderPassMap[*pRenderPass] = std::move(render_pass);
4325}
4326
4327// Style note:
4328// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
4329// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
4330// construction or assignment.
4331void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
4332 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4333 VkResult result) {
4334 if (VK_SUCCESS != result) return;
4335 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
4336 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
4337}
4338
Tony-LunarG977448c2019-12-02 14:52:02 -07004339void ValidationStateTracker::RecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4340 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4341 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004342 if (VK_SUCCESS != result) return;
4343 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
4344 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
4345}
4346
Tony-LunarG977448c2019-12-02 14:52:02 -07004347void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4348 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4349 VkResult result) {
4350 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
4351}
4352
4353void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4354 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4355 VkResult result) {
4356 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
4357}
4358
locke-lunargd556cc32019-09-17 01:21:23 -06004359void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
4360 const VkRenderPassBeginInfo *pRenderPassBegin,
4361 const VkSubpassContents contents) {
4362 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunargaecf2152020-05-12 17:15:41 -06004363 auto render_pass_state = pRenderPassBegin ? GetShared<RENDER_PASS_STATE>(pRenderPassBegin->renderPass) : nullptr;
4364 auto framebuffer = pRenderPassBegin ? GetShared<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer) : nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004365
4366 if (render_pass_state) {
locke-lunargaecf2152020-05-12 17:15:41 -06004367 cb_state->activeFramebuffer = framebuffer;
locke-lunargd556cc32019-09-17 01:21:23 -06004368 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07004369 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06004370 cb_state->activeSubpass = 0;
4371 cb_state->activeSubpassContents = contents;
locke-lunargaecf2152020-05-12 17:15:41 -06004372 if (framebuffer) cb_state->framebuffers.insert(framebuffer);
locke-lunargd556cc32019-09-17 01:21:23 -06004373 // Connect this framebuffer and its children to this cmdBuffer
locke-lunargaecf2152020-05-12 17:15:41 -06004374 AddFramebufferBinding(cb_state, framebuffer.get());
locke-lunargd556cc32019-09-17 01:21:23 -06004375 // Connect this RP to cmdBuffer
locke-lunargaecf2152020-05-12 17:15:41 -06004376 AddCommandBufferBinding(
4377 render_pass_state->cb_bindings,
4378 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state.get()), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004379
4380 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
4381 if (chained_device_group_struct) {
4382 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
4383 } else {
4384 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
4385 }
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004386
4387 cb_state->imagelessFramebufferAttachments.clear();
4388 auto attachment_info_struct = lvl_find_in_chain<VkRenderPassAttachmentBeginInfo>(pRenderPassBegin->pNext);
4389 if (attachment_info_struct) {
4390 for (uint32_t i = 0; i < attachment_info_struct->attachmentCount; i++) {
4391 IMAGE_VIEW_STATE *img_view_state = GetImageViewState(attachment_info_struct->pAttachments[i]);
4392 cb_state->imagelessFramebufferAttachments.push_back(img_view_state);
4393 }
4394 }
locke-lunargd556cc32019-09-17 01:21:23 -06004395 }
4396}
4397
4398void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
4399 const VkRenderPassBeginInfo *pRenderPassBegin,
4400 VkSubpassContents contents) {
4401 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
4402}
4403
4404void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
4405 const VkRenderPassBeginInfo *pRenderPassBegin,
4406 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
4407 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4408}
4409
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06004410void ValidationStateTracker::PostCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4411 uint32_t counterBufferCount,
4412 const VkBuffer *pCounterBuffers,
4413 const VkDeviceSize *pCounterBufferOffsets) {
4414 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4415
4416 cb_state->transform_feedback_active = true;
4417}
4418
4419void ValidationStateTracker::PostCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4420 uint32_t counterBufferCount, const VkBuffer *pCounterBuffers,
4421 const VkDeviceSize *pCounterBufferOffsets) {
4422 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4423
4424 cb_state->transform_feedback_active = false;
4425}
4426
Tony-LunarG977448c2019-12-02 14:52:02 -07004427void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
4428 const VkRenderPassBeginInfo *pRenderPassBegin,
4429 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
4430 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4431}
4432
locke-lunargd556cc32019-09-17 01:21:23 -06004433void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4434 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4435 cb_state->activeSubpass++;
4436 cb_state->activeSubpassContents = contents;
4437}
4438
4439void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4440 RecordCmdNextSubpass(commandBuffer, contents);
4441}
4442
4443void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
4444 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
4445 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4446 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4447}
4448
Tony-LunarG977448c2019-12-02 14:52:02 -07004449void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
4450 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
4451 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4452 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4453}
4454
locke-lunargd556cc32019-09-17 01:21:23 -06004455void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
4456 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4457 cb_state->activeRenderPass = nullptr;
4458 cb_state->activeSubpass = 0;
4459 cb_state->activeFramebuffer = VK_NULL_HANDLE;
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004460 cb_state->imagelessFramebufferAttachments.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06004461}
4462
4463void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
4464 RecordCmdEndRenderPassState(commandBuffer);
4465}
4466
4467void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
4468 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4469 RecordCmdEndRenderPassState(commandBuffer);
4470}
4471
Tony-LunarG977448c2019-12-02 14:52:02 -07004472void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
4473 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4474 RecordCmdEndRenderPassState(commandBuffer);
4475}
locke-lunargd556cc32019-09-17 01:21:23 -06004476void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
4477 const VkCommandBuffer *pCommandBuffers) {
4478 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4479
4480 CMD_BUFFER_STATE *sub_cb_state = NULL;
4481 for (uint32_t i = 0; i < commandBuffersCount; i++) {
4482 sub_cb_state = GetCBState(pCommandBuffers[i]);
4483 assert(sub_cb_state);
4484 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
4485 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
4486 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
4487 // from the validation step to the recording step
4488 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
4489 }
4490 }
4491
4492 // Propagate inital layout and current layout state to the primary cmd buffer
4493 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
4494 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
4495 // for those other classes.
4496 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
4497 const auto image = sub_layout_map_entry.first;
4498 const auto *image_state = GetImageState(image);
4499 if (!image_state) continue; // Can't set layouts of a dead image
4500
4501 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
4502 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
4503 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
4504 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
4505 }
4506
4507 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
4508 cb_state->linkedCommandBuffers.insert(sub_cb_state);
4509 sub_cb_state->linkedCommandBuffers.insert(cb_state);
4510 for (auto &function : sub_cb_state->queryUpdates) {
4511 cb_state->queryUpdates.push_back(function);
4512 }
4513 for (auto &function : sub_cb_state->queue_submit_functions) {
4514 cb_state->queue_submit_functions.push_back(function);
4515 }
4516 }
4517}
4518
4519void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
4520 VkFlags flags, void **ppData, VkResult result) {
4521 if (VK_SUCCESS != result) return;
4522 RecordMappedMemory(mem, offset, size, ppData);
4523}
4524
4525void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
4526 auto mem_info = GetDevMemState(mem);
4527 if (mem_info) {
4528 mem_info->mapped_range = MemRange();
4529 mem_info->p_driver_data = nullptr;
4530 }
4531}
4532
4533void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
4534 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
4535 if (image_state) {
locke-lunargae26eac2020-04-16 15:29:05 -06004536 // An Android sepcial image cannot get VkSubresourceLayout until the image binds a memory.
4537 // See: VUID-vkGetImageSubresourceLayout-image-01895
4538 image_state->fragment_encoder =
4539 std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(new subresource_adapter::ImageRangeEncoder(*image_state));
locke-lunargd556cc32019-09-17 01:21:23 -06004540 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
4541 if (swapchain_info) {
4542 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
4543 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -06004544 swapchain->images[swapchain_info->imageIndex].bound_images.emplace(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -06004545 image_state->bind_swapchain = swapchain_info->swapchain;
4546 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
4547 }
4548 } else {
4549 // Track bound memory range information
4550 auto mem_info = GetDevMemState(bindInfo.memory);
4551 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07004552 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06004553 }
4554
4555 // Track objects tied to memory
4556 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
4557 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
4558 }
Tony-LunarG330cf4c2020-03-04 16:29:03 -07004559 if ((image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) || swapchain_info) {
locke-lunargd556cc32019-09-17 01:21:23 -06004560 AddAliasingImage(image_state);
4561 }
4562 }
4563}
4564
4565void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
4566 VkDeviceSize memoryOffset, VkResult result) {
4567 if (VK_SUCCESS != result) return;
4568 VkBindImageMemoryInfo bindInfo = {};
4569 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
4570 bindInfo.image = image;
4571 bindInfo.memory = mem;
4572 bindInfo.memoryOffset = memoryOffset;
4573 UpdateBindImageMemoryState(bindInfo);
4574}
4575
4576void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
4577 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
4578 if (VK_SUCCESS != result) return;
4579 for (uint32_t i = 0; i < bindInfoCount; i++) {
4580 UpdateBindImageMemoryState(pBindInfos[i]);
4581 }
4582}
4583
4584void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
4585 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
4586 if (VK_SUCCESS != result) return;
4587 for (uint32_t i = 0; i < bindInfoCount; i++) {
4588 UpdateBindImageMemoryState(pBindInfos[i]);
4589 }
4590}
4591
4592void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
4593 auto event_state = GetEventState(event);
4594 if (event_state) {
4595 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
4596 }
locke-lunargd556cc32019-09-17 01:21:23 -06004597}
4598
4599void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
4600 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
4601 VkResult result) {
4602 if (VK_SUCCESS != result) return;
4603 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
4604 pImportSemaphoreFdInfo->flags);
4605}
4606
4607void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
4608 VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
4609 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
4610 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4611 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
4612 semaphore_state->scope = kSyncScopeExternalPermanent;
4613 }
4614}
4615
4616#ifdef VK_USE_PLATFORM_WIN32_KHR
4617void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
4618 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
4619 if (VK_SUCCESS != result) return;
4620 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
4621 pImportSemaphoreWin32HandleInfo->flags);
4622}
4623
4624void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
4625 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4626 HANDLE *pHandle, VkResult result) {
4627 if (VK_SUCCESS != result) return;
4628 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
4629}
4630
4631void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
4632 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
4633 if (VK_SUCCESS != result) return;
4634 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
4635 pImportFenceWin32HandleInfo->flags);
4636}
4637
4638void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
4639 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4640 HANDLE *pHandle, VkResult result) {
4641 if (VK_SUCCESS != result) return;
4642 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
4643}
4644#endif
4645
4646void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
4647 VkResult result) {
4648 if (VK_SUCCESS != result) return;
4649 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
4650}
4651
4652void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
4653 VkFenceImportFlagsKHR flags) {
4654 FENCE_STATE *fence_node = GetFenceState(fence);
4655 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
4656 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
4657 fence_node->scope == kSyncScopeInternal) {
4658 fence_node->scope = kSyncScopeExternalTemporary;
4659 } else {
4660 fence_node->scope = kSyncScopeExternalPermanent;
4661 }
4662 }
4663}
4664
4665void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
4666 VkResult result) {
4667 if (VK_SUCCESS != result) return;
4668 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
4669}
4670
4671void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
4672 FENCE_STATE *fence_state = GetFenceState(fence);
4673 if (fence_state) {
4674 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4675 // Export with reference transference becomes external
4676 fence_state->scope = kSyncScopeExternalPermanent;
4677 } else if (fence_state->scope == kSyncScopeInternal) {
4678 // Export with copy transference has a side effect of resetting the fence
4679 fence_state->state = FENCE_UNSIGNALED;
4680 }
4681 }
4682}
4683
4684void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
4685 VkResult result) {
4686 if (VK_SUCCESS != result) return;
4687 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
4688}
4689
4690void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
4691 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
4692 if (VK_SUCCESS != result) return;
4693 eventMap[*pEvent].write_in_use = 0;
4694 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
4695}
4696
4697void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
4698 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
4699 SWAPCHAIN_NODE *old_swapchain_state) {
4700 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004701 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004702 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
4703 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
4704 swapchain_state->shared_presentable = true;
4705 }
4706 surface_state->swapchain = swapchain_state.get();
4707 swapchainMap[*pSwapchain] = std::move(swapchain_state);
4708 } else {
4709 surface_state->swapchain = nullptr;
4710 }
4711 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
4712 if (old_swapchain_state) {
4713 old_swapchain_state->retired = true;
4714 }
4715 return;
4716}
4717
4718void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
4719 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
4720 VkResult result) {
4721 auto surface_state = GetSurfaceState(pCreateInfo->surface);
4722 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
4723 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
4724}
4725
4726void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
4727 const VkAllocationCallbacks *pAllocator) {
4728 if (!swapchain) return;
4729 auto swapchain_data = GetSwapchainState(swapchain);
4730 if (swapchain_data) {
4731 for (const auto &swapchain_image : swapchain_data->images) {
locke-lunargb3584732019-10-28 20:18:36 -06004732 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image, kVulkanObjectTypeImage));
4733 imageMap.erase(swapchain_image.image);
4734 RemoveAliasingImages(swapchain_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06004735 }
4736
4737 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
4738 if (surface_state) {
4739 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
4740 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004741 swapchain_data->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004742 swapchainMap.erase(swapchain);
4743 }
4744}
4745
4746void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
4747 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
4748 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
4749 auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
4750 if (pSemaphore) {
4751 pSemaphore->signaler.first = VK_NULL_HANDLE;
4752 pSemaphore->signaled = false;
4753 }
4754 }
4755
4756 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
4757 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
4758 // confused itself just as much.
4759 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
4760 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
4761 // Mark the image as having been released to the WSI
4762 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
4763 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
locke-lunargb3584732019-10-28 20:18:36 -06004764 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004765 auto image_state = GetImageState(image);
4766 if (image_state) {
4767 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004768 if (image_state->shared_presentable) {
4769 image_state->layout_locked = true;
4770 }
locke-lunargd556cc32019-09-17 01:21:23 -06004771 }
4772 }
4773 }
4774 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
4775 // its semaphore waits) /never/ participate in any completion proof.
4776}
4777
4778void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
4779 const VkSwapchainCreateInfoKHR *pCreateInfos,
4780 const VkAllocationCallbacks *pAllocator,
4781 VkSwapchainKHR *pSwapchains, VkResult result) {
4782 if (pCreateInfos) {
4783 for (uint32_t i = 0; i < swapchainCount; i++) {
4784 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
4785 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
4786 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
4787 }
4788 }
4789}
4790
4791void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4792 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
4793 auto pFence = GetFenceState(fence);
4794 if (pFence && pFence->scope == kSyncScopeInternal) {
4795 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
4796 // import
4797 pFence->state = FENCE_INFLIGHT;
4798 pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
4799 }
4800
4801 auto pSemaphore = GetSemaphoreState(semaphore);
4802 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
4803 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
4804 // temporary import
4805 pSemaphore->signaled = true;
4806 pSemaphore->signaler.first = VK_NULL_HANDLE;
4807 }
4808
4809 // Mark the image as acquired.
4810 auto swapchain_data = GetSwapchainState(swapchain);
4811 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
locke-lunargb3584732019-10-28 20:18:36 -06004812 auto image = swapchain_data->images[*pImageIndex].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004813 auto image_state = GetImageState(image);
4814 if (image_state) {
4815 image_state->acquired = true;
4816 image_state->shared_presentable = swapchain_data->shared_presentable;
4817 }
4818 }
4819}
4820
4821void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4822 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
4823 VkResult result) {
4824 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4825 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
4826}
4827
4828void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
4829 uint32_t *pImageIndex, VkResult result) {
4830 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4831 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
4832 pAcquireInfo->fence, pImageIndex);
4833}
4834
4835void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
4836 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
4837 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
4838 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
4839 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
4840 phys_device_state.phys_device = pPhysicalDevices[i];
4841 // Init actual features for each physical device
4842 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
4843 }
4844 }
4845}
4846
4847// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
4848static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
4849 VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4850 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
4851
4852 if (!pQueueFamilyProperties) {
4853 if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
4854 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
4855 } else { // Save queue family properties
4856 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
4857
4858 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
4859 for (uint32_t i = 0; i < count; ++i) {
4860 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
4861 }
4862 }
4863}
4864
4865void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
4866 uint32_t *pQueueFamilyPropertyCount,
4867 VkQueueFamilyProperties *pQueueFamilyProperties) {
4868 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4869 assert(physical_device_state);
4870 VkQueueFamilyProperties2KHR *pqfp = nullptr;
4871 std::vector<VkQueueFamilyProperties2KHR> qfp;
4872 qfp.resize(*pQueueFamilyPropertyCount);
4873 if (pQueueFamilyProperties) {
4874 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
4875 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
4876 qfp[i].pNext = nullptr;
4877 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
4878 }
4879 pqfp = qfp.data();
4880 }
4881 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
4882}
4883
4884void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
4885 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4886 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4887 assert(physical_device_state);
4888 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4889 pQueueFamilyProperties);
4890}
4891
4892void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
4893 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4894 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4895 assert(physical_device_state);
4896 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4897 pQueueFamilyProperties);
4898}
4899void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
4900 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004901 if (!surface) return;
4902 auto surface_state = GetSurfaceState(surface);
4903 surface_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004904 surface_map.erase(surface);
4905}
4906
4907void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004908 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06004909}
4910
4911void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
4912 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
4913 const VkAllocationCallbacks *pAllocator,
4914 VkSurfaceKHR *pSurface, VkResult result) {
4915 if (VK_SUCCESS != result) return;
4916 RecordVulkanSurface(pSurface);
4917}
4918
4919#ifdef VK_USE_PLATFORM_ANDROID_KHR
4920void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
4921 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
4922 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4923 VkResult result) {
4924 if (VK_SUCCESS != result) return;
4925 RecordVulkanSurface(pSurface);
4926}
4927#endif // VK_USE_PLATFORM_ANDROID_KHR
4928
4929#ifdef VK_USE_PLATFORM_IOS_MVK
4930void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
4931 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4932 VkResult result) {
4933 if (VK_SUCCESS != result) return;
4934 RecordVulkanSurface(pSurface);
4935}
4936#endif // VK_USE_PLATFORM_IOS_MVK
4937
4938#ifdef VK_USE_PLATFORM_MACOS_MVK
4939void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
4940 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
4941 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4942 VkResult result) {
4943 if (VK_SUCCESS != result) return;
4944 RecordVulkanSurface(pSurface);
4945}
4946#endif // VK_USE_PLATFORM_MACOS_MVK
4947
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07004948#ifdef VK_USE_PLATFORM_METAL_EXT
4949void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
4950 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
4951 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4952 VkResult result) {
4953 if (VK_SUCCESS != result) return;
4954 RecordVulkanSurface(pSurface);
4955}
4956#endif // VK_USE_PLATFORM_METAL_EXT
4957
locke-lunargd556cc32019-09-17 01:21:23 -06004958#ifdef VK_USE_PLATFORM_WAYLAND_KHR
4959void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
4960 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
4961 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4962 VkResult result) {
4963 if (VK_SUCCESS != result) return;
4964 RecordVulkanSurface(pSurface);
4965}
4966#endif // VK_USE_PLATFORM_WAYLAND_KHR
4967
4968#ifdef VK_USE_PLATFORM_WIN32_KHR
4969void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
4970 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
4971 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4972 VkResult result) {
4973 if (VK_SUCCESS != result) return;
4974 RecordVulkanSurface(pSurface);
4975}
4976#endif // VK_USE_PLATFORM_WIN32_KHR
4977
4978#ifdef VK_USE_PLATFORM_XCB_KHR
4979void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
4980 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4981 VkResult result) {
4982 if (VK_SUCCESS != result) return;
4983 RecordVulkanSurface(pSurface);
4984}
4985#endif // VK_USE_PLATFORM_XCB_KHR
4986
4987#ifdef VK_USE_PLATFORM_XLIB_KHR
4988void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
4989 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4990 VkResult result) {
4991 if (VK_SUCCESS != result) return;
4992 RecordVulkanSurface(pSurface);
4993}
4994#endif // VK_USE_PLATFORM_XLIB_KHR
4995
Niklas Haas8b84af12020-04-19 22:20:11 +02004996void ValidationStateTracker::PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance,
4997 const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
4998 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4999 VkResult result) {
5000 if (VK_SUCCESS != result) return;
5001 RecordVulkanSurface(pSurface);
5002}
5003
Cort23cf2282019-09-20 18:58:18 +02005004void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005005 VkPhysicalDeviceFeatures *pFeatures) {
5006 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5007 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
Yilong Li358152a2020-07-08 02:16:45 -07005008 // Reset the features2 safe struct before setting up the features field.
5009 physical_device_state->features2 = safe_VkPhysicalDeviceFeatures2();
Cortffba2642019-09-20 22:09:41 +02005010 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02005011}
5012
5013void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005014 VkPhysicalDeviceFeatures2 *pFeatures) {
5015 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5016 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
5017 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02005018}
5019
5020void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005021 VkPhysicalDeviceFeatures2 *pFeatures) {
5022 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5023 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
5024 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02005025}
5026
locke-lunargd556cc32019-09-17 01:21:23 -06005027void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
5028 VkSurfaceKHR surface,
5029 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
5030 VkResult result) {
5031 if (VK_SUCCESS != result) return;
5032 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5033 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08005034 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005035 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
5036}
5037
5038void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
5039 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
5040 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
5041 if (VK_SUCCESS != result) return;
5042 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5043 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08005044 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005045 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
5046}
5047
5048void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
5049 VkSurfaceKHR surface,
5050 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
5051 VkResult result) {
5052 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5053 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08005054 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005055 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
5056 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
5057 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
5058 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
5059 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
5060 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
5061 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
5062 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
5063 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
5064 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
5065}
5066
5067void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
5068 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
5069 VkBool32 *pSupported, VkResult result) {
5070 if (VK_SUCCESS != result) return;
5071 auto surface_state = GetSurfaceState(surface);
5072 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
5073}
5074
5075void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
5076 VkSurfaceKHR surface,
5077 uint32_t *pPresentModeCount,
5078 VkPresentModeKHR *pPresentModes,
5079 VkResult result) {
5080 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5081
5082 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
5083 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5084 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState;
5085
5086 if (*pPresentModeCount) {
5087 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
5088 if (*pPresentModeCount > physical_device_state->present_modes.size())
5089 physical_device_state->present_modes.resize(*pPresentModeCount);
5090 }
5091 if (pPresentModes) {
5092 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
5093 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
5094 physical_device_state->present_modes[i] = pPresentModes[i];
5095 }
5096 }
5097}
5098
5099void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
5100 uint32_t *pSurfaceFormatCount,
5101 VkSurfaceFormatKHR *pSurfaceFormats,
5102 VkResult result) {
5103 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5104
5105 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5106 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
5107
5108 if (*pSurfaceFormatCount) {
5109 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
5110 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
5111 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
5112 }
5113 if (pSurfaceFormats) {
5114 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
5115 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
5116 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
5117 }
5118 }
5119}
5120
5121void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
5122 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
5123 uint32_t *pSurfaceFormatCount,
5124 VkSurfaceFormat2KHR *pSurfaceFormats,
5125 VkResult result) {
5126 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5127
5128 auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
5129 if (*pSurfaceFormatCount) {
5130 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_COUNT) {
5131 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_COUNT;
5132 }
5133 if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
5134 physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
5135 }
5136 if (pSurfaceFormats) {
5137 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_DETAILS) {
5138 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_DETAILS;
5139 }
5140 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
5141 physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
5142 }
5143 }
5144}
5145
5146void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
5147 const VkDebugUtilsLabelEXT *pLabelInfo) {
5148 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
5149}
5150
5151void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
5152 EndCmdDebugUtilsLabel(report_data, commandBuffer);
5153}
5154
5155void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
5156 const VkDebugUtilsLabelEXT *pLabelInfo) {
5157 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
5158
5159 // Squirrel away an easily accessible copy.
5160 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5161 cb_state->debug_label = LoggingLabel(pLabelInfo);
5162}
5163
5164void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
5165 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
5166 if (NULL != pPhysicalDeviceGroupProperties) {
5167 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
5168 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
5169 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
5170 auto &phys_device_state = physical_device_map[cur_phys_dev];
5171 phys_device_state.phys_device = cur_phys_dev;
5172 // Init actual features for each physical device
5173 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
5174 }
5175 }
5176 }
5177}
5178
5179void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
5180 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
5181 VkResult result) {
5182 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5183 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
5184}
5185
5186void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
5187 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
5188 VkResult result) {
5189 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5190 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
5191}
5192
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005193void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
5194 uint32_t queueFamilyIndex,
5195 uint32_t *pCounterCount,
5196 VkPerformanceCounterKHR *pCounters) {
5197 if (NULL == pCounters) return;
5198
5199 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5200 assert(physical_device_state);
5201
5202 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queueFamilyCounters(new QUEUE_FAMILY_PERF_COUNTERS());
5203 queueFamilyCounters->counters.resize(*pCounterCount);
5204 for (uint32_t i = 0; i < *pCounterCount; i++) queueFamilyCounters->counters[i] = pCounters[i];
5205
5206 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queueFamilyCounters);
5207}
5208
5209void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
5210 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
5211 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
5212 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5213 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
5214}
5215
5216void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
5217 VkResult result) {
5218 if (result == VK_SUCCESS) performance_lock_acquired = true;
5219}
5220
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005221void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
5222 performance_lock_acquired = false;
5223 for (auto &cmd_buffer : commandBufferMap) {
5224 cmd_buffer.second->performance_lock_released = true;
5225 }
5226}
5227
locke-lunargd556cc32019-09-17 01:21:23 -06005228void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
5229 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5230 const VkAllocationCallbacks *pAllocator) {
5231 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005232 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5233 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005234 desc_template_map.erase(descriptorUpdateTemplate);
5235}
5236
5237void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
5238 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5239 const VkAllocationCallbacks *pAllocator) {
5240 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005241 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5242 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005243 desc_template_map.erase(descriptorUpdateTemplate);
5244}
5245
5246void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
5247 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
5248 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005249 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06005250 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
5251}
5252
5253void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
5254 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
5255 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
5256 if (VK_SUCCESS != result) return;
5257 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
5258}
5259
5260void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
5261 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
5262 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
5263 if (VK_SUCCESS != result) return;
5264 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
5265}
5266
5267void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
5268 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5269 const void *pData) {
5270 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
5271 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
5272 assert(0);
5273 } else {
5274 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
5275 // TODO: Record template push descriptor updates
5276 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
5277 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
5278 }
5279 }
5280}
5281
5282void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
5283 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
5284 const void *pData) {
5285 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
5286}
5287
5288void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
5289 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5290 const void *pData) {
5291 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
5292}
5293
5294void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
5295 VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
5296 const void *pData) {
5297 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5298
5299 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5300 if (template_state) {
5301 auto layout_data = GetPipelineLayout(layout);
5302 auto dsl = GetDslFromPipelineLayout(layout_data, set);
5303 const auto &template_ci = template_state->create_info;
Jeff Bolz6ae39612019-10-11 20:57:36 -05005304 if (dsl && !dsl->destroyed) {
locke-lunargd556cc32019-09-17 01:21:23 -06005305 // Decode the template into a set of write updates
5306 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
5307 dsl->GetDescriptorSetLayout());
5308 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
5309 static_cast<uint32_t>(decoded_template.desc_writes.size()),
5310 decoded_template.desc_writes.data());
5311 }
5312 }
5313}
5314
5315void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
5316 uint32_t *pPropertyCount, void *pProperties) {
5317 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5318 if (*pPropertyCount) {
5319 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_COUNT) {
5320 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_COUNT;
Camden Stocker61050592019-11-27 12:03:09 -08005321 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005322 }
5323 physical_device_state->display_plane_property_count = *pPropertyCount;
5324 }
5325 if (pProperties) {
5326 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_DETAILS) {
5327 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08005328 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005329 }
5330 }
5331}
5332
5333void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
5334 uint32_t *pPropertyCount,
5335 VkDisplayPlanePropertiesKHR *pProperties,
5336 VkResult result) {
5337 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5338 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5339}
5340
5341void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
5342 uint32_t *pPropertyCount,
5343 VkDisplayPlaneProperties2KHR *pProperties,
5344 VkResult result) {
5345 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5346 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5347}
5348
5349void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5350 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
5351 QueryObject query_obj = {queryPool, query, index};
5352 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5353 RecordCmdBeginQuery(cb_state, query_obj);
5354}
5355
5356void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5357 uint32_t query, uint32_t index) {
5358 QueryObject query_obj = {queryPool, query, index};
5359 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5360 RecordCmdEndQuery(cb_state, query_obj);
5361}
5362
5363void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
5364 VkSamplerYcbcrConversion ycbcr_conversion) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005365 auto ycbcr_state = std::make_shared<SAMPLER_YCBCR_CONVERSION_STATE>();
5366
locke-lunargd556cc32019-09-17 01:21:23 -06005367 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005368 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion, ycbcr_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06005369 }
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005370
5371 const VkFormat conversion_format = create_info->format;
5372
5373 if (conversion_format != VK_FORMAT_UNDEFINED) {
5374 // If format is VK_FORMAT_UNDEFINED, will be set by external AHB features
5375 ycbcr_state->format_features = GetPotentialFormatFeatures(conversion_format);
5376 }
5377
5378 ycbcr_state->chromaFilter = create_info->chromaFilter;
5379 ycbcr_state->format = conversion_format;
5380 samplerYcbcrConversionMap[ycbcr_conversion] = std::move(ycbcr_state);
locke-lunargd556cc32019-09-17 01:21:23 -06005381}
5382
5383void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
5384 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5385 const VkAllocationCallbacks *pAllocator,
5386 VkSamplerYcbcrConversion *pYcbcrConversion,
5387 VkResult result) {
5388 if (VK_SUCCESS != result) return;
5389 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5390}
5391
5392void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
5393 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5394 const VkAllocationCallbacks *pAllocator,
5395 VkSamplerYcbcrConversion *pYcbcrConversion,
5396 VkResult result) {
5397 if (VK_SUCCESS != result) return;
5398 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5399}
5400
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005401void ValidationStateTracker::RecordDestroySamplerYcbcrConversionState(VkSamplerYcbcrConversion ycbcr_conversion) {
5402 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
5403 RecordDestroySamplerYcbcrConversionANDROID(ycbcr_conversion);
5404 }
5405
5406 auto ycbcr_state = GetSamplerYcbcrConversionState(ycbcr_conversion);
5407 ycbcr_state->destroyed = true;
5408 samplerYcbcrConversionMap.erase(ycbcr_conversion);
5409}
5410
locke-lunargd556cc32019-09-17 01:21:23 -06005411void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
5412 const VkAllocationCallbacks *pAllocator) {
5413 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005414 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005415}
5416
5417void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
5418 VkSamplerYcbcrConversion ycbcrConversion,
5419 const VkAllocationCallbacks *pAllocator) {
5420 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005421 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005422}
5423
Tony-LunarG977448c2019-12-02 14:52:02 -07005424void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5425 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005426 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07005427 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06005428
5429 // Do nothing if the query pool has been destroyed.
5430 auto query_pool_state = GetQueryPoolState(queryPool);
5431 if (!query_pool_state) return;
5432
5433 // Reset the state of existing entries.
5434 QueryObject query_obj{queryPool, 0};
5435 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
5436 for (uint32_t i = 0; i < max_query_count; ++i) {
5437 query_obj.query = firstQuery + i;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005438 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005439 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
5440 for (uint32_t passIndex = 0; passIndex < query_pool_state->n_performance_passes; passIndex++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005441 query_obj.perf_pass = passIndex;
5442 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005443 }
5444 }
locke-lunargd556cc32019-09-17 01:21:23 -06005445 }
5446}
5447
Tony-LunarG977448c2019-12-02 14:52:02 -07005448void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5449 uint32_t queryCount) {
5450 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5451}
5452
5453void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5454 uint32_t queryCount) {
5455 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5456}
5457
locke-lunargd556cc32019-09-17 01:21:23 -06005458void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
5459 const TEMPLATE_STATE *template_state, const void *pData) {
5460 // Translate the templated update into a normal update for validation...
5461 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
5462 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
5463 decoded_update.desc_writes.data(), 0, NULL);
5464}
5465
5466// Update the common AllocateDescriptorSetsData
5467void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005468 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005469 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05005470 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06005471 if (layout) {
5472 ds_data->layout_nodes[i] = layout;
5473 // Count total descriptors required per type
5474 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
5475 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
5476 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
5477 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
5478 }
5479 }
5480 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
5481 }
5482}
5483
5484// Decrement allocated sets from the pool and insert new sets into set_map
5485void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
5486 const VkDescriptorSet *descriptor_sets,
5487 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
5488 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
5489 // Account for sets and individual descriptors allocated from pool
5490 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
5491 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
5492 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
5493 }
5494
5495 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
5496 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
5497
5498 // Create tracking object for each descriptor set; insert into global map and the pool's set.
5499 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
5500 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
5501
Jeff Bolz41a1ced2019-10-11 11:40:49 -05005502 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07005503 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06005504 pool_state->sets.insert(new_ds.get());
5505 new_ds->in_use.store(0);
5506 setMap[descriptor_sets[i]] = std::move(new_ds);
5507 }
5508}
5509
5510// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
Jeremy Kniager05631e72020-06-08 14:21:35 -06005511void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type,
5512 VkPipelineBindPoint bind_point) {
5513 UpdateDrawState(cb_state, cmd_type, bind_point);
locke-lunargd556cc32019-09-17 01:21:23 -06005514 cb_state->hasDispatchCmd = true;
5515}
5516
locke-lunargd556cc32019-09-17 01:21:23 -06005517// Generic function to handle state update for all CmdDraw* type functions
Jeremy Kniager05631e72020-06-08 14:21:35 -06005518void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, VkPipelineBindPoint bind_point) {
5519 UpdateStateCmdDrawDispatchType(cb_state, cmd_type, bind_point);
locke-lunargd556cc32019-09-17 01:21:23 -06005520 cb_state->hasDrawCmd = true;
5521}
5522
5523void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
5524 uint32_t firstVertex, uint32_t firstInstance) {
5525 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005526 UpdateStateCmdDrawType(cb_state, CMD_DRAW, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005527}
5528
5529void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
5530 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
5531 uint32_t firstInstance) {
5532 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005533 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXED, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005534}
5535
5536void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5537 uint32_t count, uint32_t stride) {
5538 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5539 BUFFER_STATE *buffer_state = GetBufferState(buffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005540 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005541 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5542}
5543
5544void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5545 VkDeviceSize offset, uint32_t count, uint32_t stride) {
5546 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5547 BUFFER_STATE *buffer_state = GetBufferState(buffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005548 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005549 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5550}
5551
5552void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
5553 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005554 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCH, VK_PIPELINE_BIND_POINT_COMPUTE);
locke-lunargd556cc32019-09-17 01:21:23 -06005555}
5556
5557void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5558 VkDeviceSize offset) {
5559 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005560 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCHINDIRECT, VK_PIPELINE_BIND_POINT_COMPUTE);
locke-lunargd556cc32019-09-17 01:21:23 -06005561 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5562 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5563}
5564
Tony-LunarG977448c2019-12-02 14:52:02 -07005565void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5566 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5567 uint32_t stride) {
5568 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5569 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5570 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005571 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS);
Tony-LunarG977448c2019-12-02 14:52:02 -07005572 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5573 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5574}
5575
locke-lunargd556cc32019-09-17 01:21:23 -06005576void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5577 VkDeviceSize offset, VkBuffer countBuffer,
5578 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5579 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005580 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
5581}
5582
5583void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5584 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5585 uint32_t maxDrawCount, uint32_t stride) {
5586 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
5587}
5588
5589void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5590 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5591 uint32_t maxDrawCount, uint32_t stride) {
locke-lunargd556cc32019-09-17 01:21:23 -06005592 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5593 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5594 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005595 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005596 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5597 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5598}
5599
5600void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5601 VkDeviceSize offset, VkBuffer countBuffer,
5602 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5603 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005604 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
5605}
5606
5607void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
5608 VkDeviceSize offset, VkBuffer countBuffer,
5609 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5610 uint32_t stride) {
5611 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
locke-lunargd556cc32019-09-17 01:21:23 -06005612}
5613
5614void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
5615 uint32_t firstTask) {
5616 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005617 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSNV, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005618}
5619
5620void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5621 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
5622 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005623 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTNV, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005624 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5625 if (buffer_state) {
5626 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5627 }
5628}
5629
5630void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5631 VkDeviceSize offset, VkBuffer countBuffer,
5632 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5633 uint32_t stride) {
5634 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5635 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5636 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005637 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTCOUNTNV, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005638 if (buffer_state) {
5639 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5640 }
5641 if (count_buffer_state) {
5642 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5643 }
5644}
5645
5646void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
5647 const VkAllocationCallbacks *pAllocator,
5648 VkShaderModule *pShaderModule, VkResult result,
5649 void *csm_state_data) {
5650 if (VK_SUCCESS != result) return;
5651 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
5652
Tony-LunarG8a51b7d2020-07-01 15:57:23 -06005653 spv_target_env spirv_environment = PickSpirvEnv(api_version, (device_extensions.vk_khr_spirv_1_4 != kNotEnabled));
locke-lunargd556cc32019-09-17 01:21:23 -06005654 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005655 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
5656 csm_state->unique_shader_id)
5657 : std::make_shared<SHADER_MODULE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06005658 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
5659}
5660
5661void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005662 PIPELINE_STATE::StageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005663 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
5664 auto module = GetShaderModuleState(pStage->module);
5665 if (!module->has_valid_spirv) return;
5666
5667 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
5668 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
5669 if (entrypoint == module->end()) return;
5670
locke-lunarg654e3692020-06-04 17:19:15 -06005671 stage_state->stage_flag = pStage->stage;
5672
locke-lunargd556cc32019-09-17 01:21:23 -06005673 // Mark accessible ids
5674 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
5675 ProcessExecutionModes(module, entrypoint, pipeline);
5676
5677 stage_state->descriptor_uses =
Mark Lobodzinskid8d658e2020-01-30 15:05:51 -07005678 CollectInterfaceByDescriptorSlot(module, stage_state->accessible_ids, &stage_state->has_writable_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06005679 // Capture descriptor uses for the pipeline
5680 for (auto use : stage_state->descriptor_uses) {
5681 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06005682 const uint32_t slot = use.first.first;
5683 auto &reqs = pipeline->active_slots[slot][use.first.second];
locke-lunargd556cc32019-09-17 01:21:23 -06005684 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
John Zulauf649edd52019-10-02 14:39:41 -06005685 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunargd556cc32019-09-17 01:21:23 -06005686 }
locke-lunarg96dc9632020-06-10 17:22:18 -06005687
5688 if (pStage->stage == VK_SHADER_STAGE_FRAGMENT_BIT) {
5689 pipeline->fragmentShader_writable_output_location_list = CollectWritableOutputLocationinFS(*module, *pStage);
5690 }
locke-lunargd556cc32019-09-17 01:21:23 -06005691}
5692
5693void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
5694 if (cb_state == nullptr) {
5695 return;
5696 }
5697
5698 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
5699 if (pipeline_layout_state == nullptr) {
5700 return;
5701 }
5702
5703 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
5704 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
5705 cb_state->push_constant_data.clear();
5706 uint32_t size_needed = 0;
5707 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
5708 size_needed = std::max(size_needed, (push_constant_range.offset + push_constant_range.size));
5709 }
5710 cb_state->push_constant_data.resize(size_needed, 0);
5711 }
5712}
John Zulauf22b0fbe2019-10-15 06:26:16 -06005713
5714void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
5715 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
5716 VkResult result) {
5717 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
5718 auto swapchain_state = GetSwapchainState(swapchain);
5719
5720 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
5721
5722 if (pSwapchainImages) {
5723 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_DETAILS) {
5724 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_DETAILS;
5725 }
5726 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
locke-lunargb3584732019-10-28 20:18:36 -06005727 if (swapchain_state->images[i].image != VK_NULL_HANDLE) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06005728
5729 // Add imageMap entries for each swapchain image
5730 VkImageCreateInfo image_ci;
5731 image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -06005732 image_ci.pNext = nullptr; // to be set later
5733 image_ci.flags = 0; // to be updated below
John Zulauf22b0fbe2019-10-15 06:26:16 -06005734 image_ci.imageType = VK_IMAGE_TYPE_2D;
5735 image_ci.format = swapchain_state->createInfo.imageFormat;
5736 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
5737 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
5738 image_ci.extent.depth = 1;
5739 image_ci.mipLevels = 1;
5740 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
5741 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
5742 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
5743 image_ci.usage = swapchain_state->createInfo.imageUsage;
5744 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
5745 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
5746 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
5747 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5748
5749 image_ci.pNext = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(swapchain_state->createInfo.pNext);
5750
5751 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR)
5752 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
5753 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)
5754 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
5755 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
5756 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR);
5757
locke-lunarg296a3c92020-03-25 01:04:29 -06005758 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(device, pSwapchainImages[i], &image_ci);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005759 auto &image_state = imageMap[pSwapchainImages[i]];
5760 image_state->valid = false;
5761 image_state->create_from_swapchain = swapchain;
5762 image_state->bind_swapchain = swapchain;
5763 image_state->bind_swapchain_imageIndex = i;
Tony-LunarGe64e4fe2020-02-17 16:21:55 -07005764 image_state->is_swapchain_image = true;
locke-lunargb3584732019-10-28 20:18:36 -06005765 swapchain_state->images[i].image = pSwapchainImages[i];
5766 swapchain_state->images[i].bound_images.emplace(pSwapchainImages[i]);
Petr Kraus44f1c482020-04-25 20:09:25 +02005767
5768 AddImageStateProps(*image_state, device, physical_device);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005769 }
5770 }
5771
5772 if (*pSwapchainImageCount) {
5773 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_COUNT) {
5774 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_COUNT;
5775 }
5776 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
5777 }
5778}
sourav parmar35e7a002020-06-09 17:58:44 -07005779
5780void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureKHR(
5781 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
5782 const VkAccelerationStructureBuildOffsetInfoKHR *const *ppOffsetInfos) {
5783 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5784 if (cb_state == nullptr) {
5785 return;
5786 }
5787 for (uint32_t i = 0; i < infoCount; ++i) {
5788 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(pInfos[i].dstAccelerationStructure);
5789 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(pInfos[i].srcAccelerationStructure);
5790 if (dst_as_state != nullptr) {
5791 dst_as_state->built = true;
5792 dst_as_state->build_info_khr.initialize(pInfos);
5793 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
5794 }
5795 if (src_as_state != nullptr) {
5796 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
5797 }
5798 }
5799 cb_state->hasBuildAccelerationStructureCmd = true;
5800}
5801
5802void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer,
5803 const VkCopyAccelerationStructureInfoKHR *pInfo) {
5804 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5805 if (cb_state) {
5806 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(pInfo->src);
5807 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(pInfo->dst);
5808 if (dst_as_state != nullptr && src_as_state != nullptr) {
5809 dst_as_state->built = true;
5810 dst_as_state->build_info_khr = src_as_state->build_info_khr;
5811 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
5812 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
5813 }
5814 }
5815}
Piers Daniell39842ee2020-07-10 16:42:33 -06005816
5817void ValidationStateTracker::PreCallRecordCmdSetCullModeEXT(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) {
5818 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5819 cb_state->status |= CBSTATUS_CULL_MODE_SET;
5820 cb_state->static_status &= ~CBSTATUS_CULL_MODE_SET;
5821}
5822
5823void ValidationStateTracker::PreCallRecordCmdSetFrontFaceEXT(VkCommandBuffer commandBuffer, VkFrontFace frontFace) {
5824 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5825 cb_state->status |= CBSTATUS_FRONT_FACE_SET;
5826 cb_state->static_status &= ~CBSTATUS_FRONT_FACE_SET;
5827}
5828
5829void ValidationStateTracker::PreCallRecordCmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer,
5830 VkPrimitiveTopology primitiveTopology) {
5831 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5832 cb_state->primitiveTopology = primitiveTopology;
5833 cb_state->status |= CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5834 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
5835}
5836
5837void ValidationStateTracker::PreCallRecordCmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer, uint32_t viewportCount,
5838 const VkViewport *pViewports) {
5839 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5840 cb_state->viewportWithCountMask |= (1u << viewportCount) - 1u;
5841 cb_state->status |= CBSTATUS_VIEWPORT_WITH_COUNT_SET;
5842 cb_state->static_status &= ~CBSTATUS_VIEWPORT_WITH_COUNT_SET;
5843}
5844
5845void ValidationStateTracker::PreCallRecordCmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer, uint32_t scissorCount,
5846 const VkRect2D *pScissors) {
5847 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5848 cb_state->scissorWithCountMask |= (1u << scissorCount) - 1u;
5849 cb_state->status |= CBSTATUS_SCISSOR_WITH_COUNT_SET;
5850 cb_state->static_status &= ~CBSTATUS_SCISSOR_WITH_COUNT_SET;
5851}
5852
5853void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer, uint32_t firstBinding,
5854 uint32_t bindingCount, const VkBuffer *pBuffers,
5855 const VkDeviceSize *pOffsets, const VkDeviceSize *pSizes,
5856 const VkDeviceSize *pStrides) {
5857 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5858 if (pStrides) {
5859 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5860 cb_state->static_status &= ~CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
5861 }
5862
5863 uint32_t end = firstBinding + bindingCount;
5864 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
5865 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
5866 }
5867
5868 for (uint32_t i = 0; i < bindingCount; ++i) {
5869 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
5870 vertex_buffer_binding.buffer = pBuffers[i];
5871 vertex_buffer_binding.offset = pOffsets[i];
5872 vertex_buffer_binding.size = (pSizes) ? pSizes[i] : VK_WHOLE_SIZE;
5873 vertex_buffer_binding.stride = (pStrides) ? pStrides[i] : 0;
5874 // Add binding for this vertex buffer to this commandbuffer
5875 if (pBuffers[i]) {
5876 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
5877 }
5878 }
5879}
5880
5881void ValidationStateTracker::PreCallRecordCmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) {
5882 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5883 cb_state->status |= CBSTATUS_DEPTH_TEST_ENABLE_SET;
5884 cb_state->static_status &= ~CBSTATUS_DEPTH_TEST_ENABLE_SET;
5885}
5886
5887void ValidationStateTracker::PreCallRecordCmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) {
5888 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5889 cb_state->status |= CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5890 cb_state->static_status &= ~CBSTATUS_DEPTH_WRITE_ENABLE_SET;
5891}
5892
5893void ValidationStateTracker::PreCallRecordCmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) {
5894 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5895 cb_state->status |= CBSTATUS_DEPTH_COMPARE_OP_SET;
5896 cb_state->static_status &= ~CBSTATUS_DEPTH_COMPARE_OP_SET;
5897}
5898
5899void ValidationStateTracker::PreCallRecordCmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer,
5900 VkBool32 depthBoundsTestEnable) {
5901 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5902 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5903 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
5904}
5905void ValidationStateTracker::PreCallRecordCmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) {
5906 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5907 cb_state->status |= CBSTATUS_STENCIL_TEST_ENABLE_SET;
5908 cb_state->static_status &= ~CBSTATUS_STENCIL_TEST_ENABLE_SET;
5909}
5910
5911void ValidationStateTracker::PreCallRecordCmdSetStencilOpEXT(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
5912 VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp,
5913 VkCompareOp compareOp) {
5914 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5915 cb_state->status |= CBSTATUS_STENCIL_OP_SET;
5916 cb_state->static_status &= ~CBSTATUS_STENCIL_OP_SET;
5917}