blob: c032e435c09e80bef1fc7ae8884e0f3eb6eca602 [file] [log] [blame]
Tony-LunarG73719992020-01-15 10:20:28 -07001/* Copyright (c) 2015-2020 The Khronos Group Inc.
2 * Copyright (c) 2015-2020 Valve Corporation
3 * Copyright (c) 2015-2020 LunarG, Inc.
4 * Copyright (C) 2015-2020 Google Inc.
locke-lunargd556cc32019-09-17 01:21:23 -06005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
locke-lunargd556cc32019-09-17 01:21:23 -060023#include <cmath>
24#include <set>
25#include <sstream>
26#include <string>
27
28#include "vk_enum_string_helper.h"
29#include "vk_format_utils.h"
30#include "vk_layer_data.h"
31#include "vk_layer_utils.h"
32#include "vk_layer_logging.h"
33#include "vk_typemap_helper.h"
34
35#include "chassis.h"
36#include "state_tracker.h"
37#include "shader_validation.h"
38
39using std::max;
40using std::string;
41using std::stringstream;
42using std::unique_ptr;
43using std::unordered_map;
44using std::unordered_set;
45using std::vector;
46
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -060047void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
48 if (add_obj) {
49 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
50 // Call base class
51 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
52 }
53}
54
John Zulauf5c5e88d2019-12-26 11:22:02 -070055uint32_t ResolveRemainingLevels(const VkImageSubresourceRange *range, uint32_t mip_levels) {
56 // Return correct number of mip levels taking into account VK_REMAINING_MIP_LEVELS
57 uint32_t mip_level_count = range->levelCount;
58 if (range->levelCount == VK_REMAINING_MIP_LEVELS) {
59 mip_level_count = mip_levels - range->baseMipLevel;
60 }
61 return mip_level_count;
62}
63
64uint32_t ResolveRemainingLayers(const VkImageSubresourceRange *range, uint32_t layers) {
65 // Return correct number of layers taking into account VK_REMAINING_ARRAY_LAYERS
66 uint32_t array_layer_count = range->layerCount;
67 if (range->layerCount == VK_REMAINING_ARRAY_LAYERS) {
68 array_layer_count = layers - range->baseArrayLayer;
69 }
70 return array_layer_count;
71}
72
73VkImageSubresourceRange NormalizeSubresourceRange(const VkImageCreateInfo &image_create_info,
74 const VkImageSubresourceRange &range) {
75 VkImageSubresourceRange norm = range;
76 norm.levelCount = ResolveRemainingLevels(&range, image_create_info.mipLevels);
77
78 // Special case for 3D images with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR flag bit, where <extent.depth> and
79 // <arrayLayers> can potentially alias.
80 uint32_t layer_limit = (0 != (image_create_info.flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR))
81 ? image_create_info.extent.depth
82 : image_create_info.arrayLayers;
83 norm.layerCount = ResolveRemainingLayers(&range, layer_limit);
84
85 // For multiplanar formats, IMAGE_ASPECT_COLOR is equivalent to adding the aspect of the individual planes
86 VkImageAspectFlags &aspect_mask = norm.aspectMask;
87 if (FormatIsMultiplane(image_create_info.format)) {
88 if (aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) {
89 aspect_mask &= ~VK_IMAGE_ASPECT_COLOR_BIT;
90 aspect_mask |= (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT);
91 if (FormatPlaneCount(image_create_info.format) > 2) {
92 aspect_mask |= VK_IMAGE_ASPECT_PLANE_2_BIT;
93 }
94 }
95 }
96 return norm;
97}
98
99VkImageSubresourceRange NormalizeSubresourceRange(const IMAGE_STATE &image_state, const VkImageSubresourceRange &range) {
100 const VkImageCreateInfo &image_create_info = image_state.createInfo;
101 return NormalizeSubresourceRange(image_create_info, range);
102}
103
locke-lunargd556cc32019-09-17 01:21:23 -0600104#ifdef VK_USE_PLATFORM_ANDROID_KHR
105// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
106// This could also move into a seperate core_validation_android.cpp file... ?
107
108void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
109 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
110 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
Spencer Fricke224c9852020-04-06 07:47:29 -0700111 is_node->external_ahb = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600112 }
113 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
114 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
115 is_node->has_ahb_format = true;
116 is_node->ahb_format = ext_fmt_android->externalFormat;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700117 // VUID 01894 will catch if not found in map
118 auto it = ahb_ext_formats_map.find(ext_fmt_android->externalFormat);
119 if (it != ahb_ext_formats_map.end()) {
120 is_node->format_features = it->second;
121 }
locke-lunargd556cc32019-09-17 01:21:23 -0600122 }
123}
124
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700125void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {
126 const VkExternalMemoryBufferCreateInfo *embci = lvl_find_in_chain<VkExternalMemoryBufferCreateInfo>(create_info->pNext);
127 if (embci && (embci->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
128 bs_node->external_ahb = true;
129 }
130}
131
locke-lunargd556cc32019-09-17 01:21:23 -0600132void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700133 VkSamplerYcbcrConversion ycbcr_conversion,
134 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state) {
locke-lunargd556cc32019-09-17 01:21:23 -0600135 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
136 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
137 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700138 // VUID 01894 will catch if not found in map
139 auto it = ahb_ext_formats_map.find(ext_format_android->externalFormat);
140 if (it != ahb_ext_formats_map.end()) {
141 ycbcr_state->format_features = it->second;
142 }
locke-lunargd556cc32019-09-17 01:21:23 -0600143 }
144};
145
146void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
147 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
148};
149
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700150void ValidationStateTracker::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
151 VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties, VkResult result) {
152 if (VK_SUCCESS != result) return;
153 auto ahb_format_props = lvl_find_in_chain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
154 if (ahb_format_props) {
155 ahb_ext_formats_map.insert({ahb_format_props->externalFormat, ahb_format_props->formatFeatures});
156 }
157}
158
locke-lunargd556cc32019-09-17 01:21:23 -0600159#else
160
161void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
162
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700163void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {}
164
locke-lunargd556cc32019-09-17 01:21:23 -0600165void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700166 VkSamplerYcbcrConversion ycbcr_conversion,
167 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state){};
locke-lunargd556cc32019-09-17 01:21:23 -0600168
169void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
170
171#endif // VK_USE_PLATFORM_ANDROID_KHR
172
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -0600173std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPipelineLayout(PIPELINE_LAYOUT_STATE const *layout_data,
174 uint32_t set) {
175 std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> dsl = nullptr;
176 if (layout_data && (set < layout_data->set_layouts.size())) {
177 dsl = layout_data->set_layouts[set];
178 }
179 return dsl;
180}
181
Petr Kraus44f1c482020-04-25 20:09:25 +0200182void AddImageStateProps(IMAGE_STATE &image_state, const VkDevice device, const VkPhysicalDevice physical_device) {
183 // Add feature support according to Image Format Features (vkspec.html#resources-image-format-features)
184 // if format is AHB external format then the features are already set
185 if (image_state.has_ahb_format == false) {
186 const VkImageTiling image_tiling = image_state.createInfo.tiling;
187 const VkFormat image_format = image_state.createInfo.format;
188 if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
189 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {
190 VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, nullptr};
191 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state.image, &drm_format_properties);
192
193 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
194 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
195 nullptr};
196 format_properties_2.pNext = (void *)&drm_properties_list;
197 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Lionel Landwerlin09351a72020-06-22 18:15:59 +0300198 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
199 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
200 drm_properties_list.pDrmFormatModifierProperties = &drm_properties[0];
201 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Petr Kraus44f1c482020-04-25 20:09:25 +0200202
203 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300204 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier ==
205 drm_format_properties.drmFormatModifier) {
206 image_state.format_features =
Petr Kraus44f1c482020-04-25 20:09:25 +0200207 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300208 break;
Petr Kraus44f1c482020-04-25 20:09:25 +0200209 }
210 }
211 } else {
212 VkFormatProperties format_properties;
213 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_format, &format_properties);
214 image_state.format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
215 : format_properties.optimalTilingFeatures;
216 }
217 }
218}
219
locke-lunargd556cc32019-09-17 01:21:23 -0600220void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
221 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
222 if (VK_SUCCESS != result) return;
locke-lunarg296a3c92020-03-25 01:04:29 -0600223 auto is_node = std::make_shared<IMAGE_STATE>(device, *pImage, pCreateInfo);
sfricke-samsung71bc6572020-04-29 15:49:43 -0700224 is_node->disjoint = ((pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT) != 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600225 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
226 RecordCreateImageANDROID(pCreateInfo, is_node.get());
227 }
228 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
229 if (swapchain_info) {
230 is_node->create_from_swapchain = swapchain_info->swapchain;
231 }
232
locke-lunargd556cc32019-09-17 01:21:23 -0600233 // Record the memory requirements in case they won't be queried
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700234 // External AHB memory can't be queried until after memory is bound
Spencer Fricke224c9852020-04-06 07:47:29 -0700235 if (is_node->external_ahb == false) {
sfricke-samsung71bc6572020-04-29 15:49:43 -0700236 if (is_node->disjoint == false) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700237 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
238 } else {
239 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
240 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
241 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
242 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
243 mem_req_info2.pNext = &image_plane_req;
244 mem_req_info2.image = *pImage;
245
246 assert(plane_count != 0); // assumes each format has at least first plane
247 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
248 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
249 is_node->plane0_requirements = mem_reqs2.memoryRequirements;
250
251 if (plane_count >= 2) {
252 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
253 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
254 is_node->plane1_requirements = mem_reqs2.memoryRequirements;
255 }
256 if (plane_count >= 3) {
257 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
258 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
259 is_node->plane2_requirements = mem_reqs2.memoryRequirements;
260 }
261 }
locke-lunargd556cc32019-09-17 01:21:23 -0600262 }
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700263
Petr Kraus44f1c482020-04-25 20:09:25 +0200264 AddImageStateProps(*is_node, device, physical_device);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700265
sfricke-samsungedce77a2020-07-03 22:35:13 -0700266 is_node->unprotected = ((pCreateInfo->flags & VK_IMAGE_CREATE_PROTECTED_BIT) == 0);
267
locke-lunargd556cc32019-09-17 01:21:23 -0600268 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
269}
270
271void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
272 if (!image) return;
273 IMAGE_STATE *image_state = GetImageState(image);
274 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
275 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
276 // Clean up memory mapping, bindings and range references for image
277 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700278 RemoveImageMemoryRange(image, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600279 }
280 if (image_state->bind_swapchain) {
281 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
282 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -0600283 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -0600284 }
285 }
286 RemoveAliasingImage(image_state);
287 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500288 image_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600289 // Remove image from imageMap
290 imageMap.erase(image);
291}
292
293void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
294 VkImageLayout imageLayout, const VkClearColorValue *pColor,
295 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
296 auto cb_node = GetCBState(commandBuffer);
297 auto image_state = GetImageState(image);
298 if (cb_node && image_state) {
299 AddCommandBufferBindingImage(cb_node, image_state);
300 }
301}
302
303void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
304 VkImageLayout imageLayout,
305 const VkClearDepthStencilValue *pDepthStencil,
306 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
307 auto cb_node = GetCBState(commandBuffer);
308 auto image_state = GetImageState(image);
309 if (cb_node && image_state) {
310 AddCommandBufferBindingImage(cb_node, image_state);
311 }
312}
313
314void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
315 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
316 uint32_t regionCount, const VkImageCopy *pRegions) {
317 auto cb_node = GetCBState(commandBuffer);
318 auto src_image_state = GetImageState(srcImage);
319 auto dst_image_state = GetImageState(dstImage);
320
321 // Update bindings between images and cmd buffer
322 AddCommandBufferBindingImage(cb_node, src_image_state);
323 AddCommandBufferBindingImage(cb_node, dst_image_state);
324}
325
326void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
327 VkImageLayout srcImageLayout, VkImage dstImage,
328 VkImageLayout dstImageLayout, uint32_t regionCount,
329 const VkImageResolve *pRegions) {
330 auto cb_node = GetCBState(commandBuffer);
331 auto src_image_state = GetImageState(srcImage);
332 auto dst_image_state = GetImageState(dstImage);
333
334 // Update bindings between images and cmd buffer
335 AddCommandBufferBindingImage(cb_node, src_image_state);
336 AddCommandBufferBindingImage(cb_node, dst_image_state);
337}
338
339void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
340 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
341 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
342 auto cb_node = GetCBState(commandBuffer);
343 auto src_image_state = GetImageState(srcImage);
344 auto dst_image_state = GetImageState(dstImage);
345
346 // Update bindings between images and cmd buffer
347 AddCommandBufferBindingImage(cb_node, src_image_state);
348 AddCommandBufferBindingImage(cb_node, dst_image_state);
349}
350
351void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
352 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
353 VkResult result) {
354 if (result != VK_SUCCESS) return;
355 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500356 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600357
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700358 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
359 RecordCreateBufferANDROID(pCreateInfo, buffer_state.get());
360 }
locke-lunargd556cc32019-09-17 01:21:23 -0600361 // Get a set of requirements in the case the app does not
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700362 // External AHB memory can't be queried until after memory is bound
363 if (buffer_state->external_ahb == false) {
364 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
365 }
locke-lunargd556cc32019-09-17 01:21:23 -0600366
sfricke-samsungedce77a2020-07-03 22:35:13 -0700367 buffer_state->unprotected = ((pCreateInfo->flags & VK_BUFFER_CREATE_PROTECTED_BIT) == 0);
368
locke-lunargd556cc32019-09-17 01:21:23 -0600369 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
370}
371
372void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
373 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
374 VkResult result) {
375 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500376 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
377 bufferViewMap[*pView] = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600378}
379
380void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
381 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
382 VkResult result) {
383 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500384 auto image_state = GetImageShared(pCreateInfo->image);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700385 auto image_view_state = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
386
387 // Add feature support according to Image View Format Features (vkspec.html#resources-image-view-format-features)
388 const VkImageTiling image_tiling = image_state->createInfo.tiling;
389 const VkFormat image_view_format = pCreateInfo->format;
390 if (image_state->has_ahb_format == true) {
391 // The ImageView uses same Image's format feature since they share same AHB
392 image_view_state->format_features = image_state->format_features;
393 } else if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
394 // Parameter validation should catch if this is used without VK_EXT_image_drm_format_modifier
395 assert(device_extensions.vk_ext_image_drm_format_modifier);
396 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
397 nullptr};
398 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state->image, &drm_format_properties);
399
400 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
401 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
402 nullptr};
403 format_properties_2.pNext = (void *)&drm_properties_list;
404 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
405
406 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300407 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier == drm_format_properties.drmFormatModifier) {
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700408 image_view_state->format_features |=
409 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300410 break;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700411 }
412 }
413 } else {
414 VkFormatProperties format_properties;
415 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_view_format, &format_properties);
416 image_view_state->format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
417 : format_properties.optimalTilingFeatures;
418 }
419
420 imageViewMap.insert(std::make_pair(*pView, std::move(image_view_state)));
locke-lunargd556cc32019-09-17 01:21:23 -0600421}
422
423void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
424 uint32_t regionCount, const VkBufferCopy *pRegions) {
425 auto cb_node = GetCBState(commandBuffer);
426 auto src_buffer_state = GetBufferState(srcBuffer);
427 auto dst_buffer_state = GetBufferState(dstBuffer);
428
429 // Update bindings between buffers and cmd buffer
430 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
431 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
432}
433
434void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
435 const VkAllocationCallbacks *pAllocator) {
436 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
437 if (!image_view_state) return;
438 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
439
440 // Any bound cmd buffers are now invalid
441 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500442 image_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600443 imageViewMap.erase(imageView);
444}
445
446void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
447 if (!buffer) return;
448 auto buffer_state = GetBufferState(buffer);
449 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
450
451 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
452 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700453 RemoveBufferMemoryRange(buffer, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600454 }
455 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500456 buffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600457 bufferMap.erase(buffer_state->buffer);
458}
459
460void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
461 const VkAllocationCallbacks *pAllocator) {
462 if (!bufferView) return;
463 auto buffer_view_state = GetBufferViewState(bufferView);
464 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
465
466 // Any bound cmd buffers are now invalid
467 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500468 buffer_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600469 bufferViewMap.erase(bufferView);
470}
471
472void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
473 VkDeviceSize size, uint32_t data) {
474 auto cb_node = GetCBState(commandBuffer);
475 auto buffer_state = GetBufferState(dstBuffer);
476 // Update bindings between buffer and cmd buffer
477 AddCommandBufferBindingBuffer(cb_node, buffer_state);
478}
479
480void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
481 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
482 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
483 auto cb_node = GetCBState(commandBuffer);
484 auto src_image_state = GetImageState(srcImage);
485 auto dst_buffer_state = GetBufferState(dstBuffer);
486
487 // Update bindings between buffer/image and cmd buffer
488 AddCommandBufferBindingImage(cb_node, src_image_state);
489 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
490}
491
492void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
493 VkImageLayout dstImageLayout, uint32_t regionCount,
494 const VkBufferImageCopy *pRegions) {
495 auto cb_node = GetCBState(commandBuffer);
496 auto src_buffer_state = GetBufferState(srcBuffer);
497 auto dst_image_state = GetImageState(dstImage);
498
499 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
500 AddCommandBufferBindingImage(cb_node, dst_image_state);
501}
502
503// Get the image viewstate for a given framebuffer attachment
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300504IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(CMD_BUFFER_STATE *cb, FRAMEBUFFER_STATE *framebuffer,
505 uint32_t index) {
506 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) {
507 assert(index < cb->imagelessFramebufferAttachments.size());
508 return cb->imagelessFramebufferAttachments[index];
509 }
locke-lunargd556cc32019-09-17 01:21:23 -0600510 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
511 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
512 return GetImageViewState(image_view);
513}
514
515// Get the image viewstate for a given framebuffer attachment
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300516const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const CMD_BUFFER_STATE *cb,
517 const FRAMEBUFFER_STATE *framebuffer,
locke-lunargd556cc32019-09-17 01:21:23 -0600518 uint32_t index) const {
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300519 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) {
520 assert(index < cb->imagelessFramebufferAttachments.size());
521 return cb->imagelessFramebufferAttachments[index];
522 }
locke-lunargd556cc32019-09-17 01:21:23 -0600523 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
524 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
525 return GetImageViewState(image_view);
526}
527
528void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
locke-lunargd556cc32019-09-17 01:21:23 -0600529 std::unordered_set<VkImage> *bound_images = nullptr;
530
locke-lunargb3584732019-10-28 20:18:36 -0600531 if (image_state->bind_swapchain) {
532 auto swapchain_state = GetSwapchainState(image_state->bind_swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -0600533 if (swapchain_state) {
locke-lunargb3584732019-10-28 20:18:36 -0600534 bound_images = &swapchain_state->images[image_state->bind_swapchain_imageIndex].bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600535 }
536 } else {
locke-lunargcf04d582019-11-26 00:31:50 -0700537 if (image_state->binding.mem_state) {
538 bound_images = &image_state->binding.mem_state->bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600539 }
540 }
541
542 if (bound_images) {
543 for (const auto &handle : *bound_images) {
544 if (handle != image_state->image) {
545 auto is = GetImageState(handle);
546 if (is && is->IsCompatibleAliasing(image_state)) {
547 auto inserted = is->aliasing_images.emplace(image_state->image);
548 if (inserted.second) {
549 image_state->aliasing_images.emplace(handle);
550 }
551 }
552 }
553 }
554 }
555}
556
557void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
558 for (const auto &image : image_state->aliasing_images) {
559 auto is = GetImageState(image);
560 if (is) {
561 is->aliasing_images.erase(image_state->image);
562 }
563 }
564 image_state->aliasing_images.clear();
565}
566
567void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
568 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
569 // reference. It doesn't need two ways clear.
570 for (const auto &handle : bound_images) {
571 auto is = GetImageState(handle);
572 if (is) {
573 is->aliasing_images.clear();
574 }
575 }
576}
577
Jeff Bolz310775c2019-10-09 00:46:33 -0500578const EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) const {
579 auto it = eventMap.find(event);
580 if (it == eventMap.end()) {
581 return nullptr;
582 }
583 return &it->second;
584}
585
locke-lunargd556cc32019-09-17 01:21:23 -0600586EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
587 auto it = eventMap.find(event);
588 if (it == eventMap.end()) {
589 return nullptr;
590 }
591 return &it->second;
592}
593
594const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
595 auto it = queueMap.find(queue);
596 if (it == queueMap.cend()) {
597 return nullptr;
598 }
599 return &it->second;
600}
601
602QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
603 auto it = queueMap.find(queue);
604 if (it == queueMap.end()) {
605 return nullptr;
606 }
607 return &it->second;
608}
609
610const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
611 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
612 auto it = phys_dev_map->find(phys);
613 if (it == phys_dev_map->end()) {
614 return nullptr;
615 }
616 return &it->second;
617}
618
619PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
620 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
621 auto it = phys_dev_map->find(phys);
622 if (it == phys_dev_map->end()) {
623 return nullptr;
624 }
625 return &it->second;
626}
627
628PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
629const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
630
631// Return ptr to memory binding for given handle of specified type
632template <typename State, typename Result>
633static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
634 switch (typed_handle.type) {
635 case kVulkanObjectTypeImage:
636 return state->GetImageState(typed_handle.Cast<VkImage>());
637 case kVulkanObjectTypeBuffer:
638 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
639 case kVulkanObjectTypeAccelerationStructureNV:
640 return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
641 default:
642 break;
643 }
644 return nullptr;
645}
646
647const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
648 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
649}
650
651BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
652 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
653}
654
655void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
656 assert(object != NULL);
657
John Zulauf79952712020-04-07 11:25:54 -0600658 auto fake_address = fake_memory.Alloc(pAllocateInfo->allocationSize);
659 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo, fake_address);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500660 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600661
662 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
663 if (dedicated) {
664 mem_info->is_dedicated = true;
665 mem_info->dedicated_buffer = dedicated->buffer;
666 mem_info->dedicated_image = dedicated->image;
667 }
668 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
669 if (export_info) {
670 mem_info->is_export = true;
671 mem_info->export_handle_type_flags = export_info->handleTypes;
672 }
sfricke-samsung23068272020-06-21 14:49:51 -0700673
674 // Assumes validation already for only a single import operation in the pNext
675#ifdef VK_USE_PLATFORM_WIN32_KHR
676 auto win32_import = lvl_find_in_chain<VkImportMemoryWin32HandleInfoKHR>(pAllocateInfo->pNext);
677 if (win32_import) {
678 mem_info->is_import = true;
679 mem_info->import_handle_type_flags = win32_import->handleType;
680 }
681#endif
682 auto fd_import = lvl_find_in_chain<VkImportMemoryFdInfoKHR>(pAllocateInfo->pNext);
683 if (fd_import) {
684 mem_info->is_import = true;
685 mem_info->import_handle_type_flags = fd_import->handleType;
686 }
687 auto host_pointer_import = lvl_find_in_chain<VkImportMemoryHostPointerInfoEXT>(pAllocateInfo->pNext);
688 if (host_pointer_import) {
689 mem_info->is_import = true;
690 mem_info->import_handle_type_flags = host_pointer_import->handleType;
691 }
692#ifdef VK_USE_PLATFORM_ANDROID_KHR
693 // AHB Import doesn't have handle in the pNext struct
694 // It should be assumed that all imported AHB can only have the same, single handleType
695 auto ahb_import = lvl_find_in_chain<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
696 if ((ahb_import) && (ahb_import->buffer != nullptr)) {
697 mem_info->is_import_ahb = true;
698 mem_info->is_import = true;
699 mem_info->import_handle_type_flags = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
700 }
701#endif
sfricke-samsungedce77a2020-07-03 22:35:13 -0700702
703 const VkMemoryType memory_type = phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex];
704 mem_info->unprotected = ((memory_type.propertyFlags & VK_MEMORY_PROPERTY_PROTECTED_BIT) == 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600705}
706
707// Create binding link between given sampler and command buffer node
708void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600709 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600710 return;
711 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500712 AddCommandBufferBinding(sampler_state->cb_bindings,
713 VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600714}
715
716// Create binding link between given image node and command buffer node
717void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600718 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600719 return;
720 }
721 // Skip validation if this image was created through WSI
722 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
723 // First update cb binding for image
Jeff Bolzadbfa852019-10-04 13:53:30 -0500724 if (AddCommandBufferBinding(image_state->cb_bindings,
725 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600726 // Now update CB binding in MemObj mini CB list
727 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700728 // Now update CBInfo's Mem reference list
729 AddCommandBufferBinding(mem_binding->cb_bindings,
730 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600731 }
732 }
733 }
734}
735
736// Create binding link between given image view node and its image with command buffer node
737void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600738 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600739 return;
740 }
741 // First add bindings for imageView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500742 if (AddCommandBufferBinding(view_state->cb_bindings,
743 VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600744 // Only need to continue if this is a new item
Jeff Bolzadbfa852019-10-04 13:53:30 -0500745 auto image_state = view_state->image_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600746 // Add bindings for image within imageView
747 if (image_state) {
748 AddCommandBufferBindingImage(cb_node, image_state);
749 }
750 }
751}
752
753// Create binding link between given buffer node and command buffer node
754void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600755 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600756 return;
757 }
758 // First update cb binding for buffer
Jeff Bolzadbfa852019-10-04 13:53:30 -0500759 if (AddCommandBufferBinding(buffer_state->cb_bindings,
760 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600761 // Now update CB binding in MemObj mini CB list
762 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700763 // Now update CBInfo's Mem reference list
764 AddCommandBufferBinding(mem_binding->cb_bindings,
765 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600766 }
767 }
768}
769
770// Create binding link between given buffer view node and its buffer with command buffer node
771void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600772 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600773 return;
774 }
775 // First add bindings for bufferView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500776 if (AddCommandBufferBinding(view_state->cb_bindings,
777 VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
778 auto buffer_state = view_state->buffer_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600779 // Add bindings for buffer within bufferView
780 if (buffer_state) {
781 AddCommandBufferBindingBuffer(cb_node, buffer_state);
782 }
783 }
784}
785
786// Create binding link between given acceleration structure and command buffer node
787void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
788 ACCELERATION_STRUCTURE_STATE *as_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600789 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600790 return;
791 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500792 if (AddCommandBufferBinding(
793 as_state->cb_bindings,
794 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600795 // Now update CB binding in MemObj mini CB list
796 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700797 // Now update CBInfo's Mem reference list
798 AddCommandBufferBinding(mem_binding->cb_bindings,
799 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600800 }
801 }
802}
803
locke-lunargd556cc32019-09-17 01:21:23 -0600804// Clear a single object binding from given memory object
locke-lunarg5f59e782019-12-19 10:32:23 -0700805void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
locke-lunargd556cc32019-09-17 01:21:23 -0600806 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
807 if (mem_info) {
808 mem_info->obj_bindings.erase(typed_handle);
809 }
810}
811
812// ClearMemoryObjectBindings clears the binding of objects to memory
813// For the given object it pulls the memory bindings and makes sure that the bindings
814// no longer refer to the object being cleared. This occurs when objects are destroyed.
815void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
816 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
817 if (mem_binding) {
818 if (!mem_binding->sparse) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700819 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600820 } else { // Sparse, clear all bindings
821 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700822 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600823 }
824 }
825 }
826}
827
828// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
829// Corresponding valid usage checks are in ValidateSetMemBinding().
830void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
831 const VulkanTypedHandle &typed_handle) {
832 assert(mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600833
834 if (mem != VK_NULL_HANDLE) {
locke-lunargcf04d582019-11-26 00:31:50 -0700835 mem_binding->binding.mem_state = GetShared<DEVICE_MEMORY_STATE>(mem);
836 if (mem_binding->binding.mem_state) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700837 mem_binding->binding.offset = memory_offset;
838 mem_binding->binding.size = mem_binding->requirements.size;
locke-lunargcf04d582019-11-26 00:31:50 -0700839 mem_binding->binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -0600840 // For image objects, make sure default memory state is correctly set
841 // TODO : What's the best/correct way to handle this?
842 if (kVulkanObjectTypeImage == typed_handle.type) {
843 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
844 if (image_state) {
845 VkImageCreateInfo ici = image_state->createInfo;
846 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
847 // TODO:: More memory state transition stuff.
848 }
849 }
850 }
locke-lunargcf04d582019-11-26 00:31:50 -0700851 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
locke-lunargd556cc32019-09-17 01:21:23 -0600852 }
853 }
854}
855
856// For NULL mem case, clear any previous binding Else...
857// Make sure given object is in its object map
858// IF a previous binding existed, update binding
859// Add reference from objectInfo to memoryInfo
860// Add reference off of object's binding info
861// Return VK_TRUE if addition is successful, VK_FALSE otherwise
locke-lunargcf04d582019-11-26 00:31:50 -0700862bool ValidationStateTracker::SetSparseMemBinding(const VkDeviceMemory mem, const VkDeviceSize mem_offset,
863 const VkDeviceSize mem_size, const VulkanTypedHandle &typed_handle) {
locke-lunargd556cc32019-09-17 01:21:23 -0600864 bool skip = VK_FALSE;
865 // Handle NULL case separately, just clear previous binding & decrement reference
locke-lunargcf04d582019-11-26 00:31:50 -0700866 if (mem == VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -0600867 // TODO : This should cause the range of the resource to be unbound according to spec
868 } else {
869 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
870 assert(mem_binding);
871 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
872 assert(mem_binding->sparse);
locke-lunargcf04d582019-11-26 00:31:50 -0700873 MEM_BINDING binding = {GetShared<DEVICE_MEMORY_STATE>(mem), mem_offset, mem_size};
874 if (binding.mem_state) {
875 binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -0600876 // Need to set mem binding for this object
877 mem_binding->sparse_bindings.insert(binding);
878 mem_binding->UpdateBoundMemorySet();
879 }
880 }
881 }
882 return skip;
883}
884
Jeremy Kniager05631e72020-06-08 14:21:35 -0600885void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, const VkPipelineBindPoint bind_point) {
locke-lunargd556cc32019-09-17 01:21:23 -0600886 auto &state = cb_state->lastBound[bind_point];
887 PIPELINE_STATE *pPipe = state.pipeline_state;
888 if (VK_NULL_HANDLE != state.pipeline_layout) {
889 for (const auto &set_binding_pair : pPipe->active_slots) {
890 uint32_t setIndex = set_binding_pair.first;
891 // Pull the set node
892 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
locke-lunargd556cc32019-09-17 01:21:23 -0600893
Tony-LunarG77822802020-05-28 16:35:46 -0600894 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
locke-lunargd556cc32019-09-17 01:21:23 -0600895
Tony-LunarG77822802020-05-28 16:35:46 -0600896 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
897 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
898 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
899 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
900
901 if (reduced_map.IsManyDescriptors()) {
902 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
903 descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
904 }
905
906 // We can skip updating the state if "nothing" has changed since the last validation.
907 // See CoreChecks::ValidateCmdBufDrawState for more details.
908 bool descriptor_set_changed =
909 !reduced_map.IsManyDescriptors() ||
910 // Update if descriptor set (or contents) has changed
911 state.per_set[setIndex].validated_set != descriptor_set ||
912 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
913 (!disabled[image_layout_validation] &&
914 state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
915 bool need_update = descriptor_set_changed ||
916 // Update if previous bindingReqMap doesn't include new bindingReqMap
917 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
918 state.per_set[setIndex].validated_set_binding_req_map.end(), binding_req_map.begin(),
919 binding_req_map.end());
920
921 if (need_update) {
922 // Bind this set and its active descriptor resources to the command buffer
923 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
924 // Only record the bindings that haven't already been recorded
925 BindingReqMap delta_reqs;
926 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
927 state.per_set[setIndex].validated_set_binding_req_map.begin(),
928 state.per_set[setIndex].validated_set_binding_req_map.end(),
929 std::inserter(delta_reqs, delta_reqs.begin()));
Jeremy Kniager05631e72020-06-08 14:21:35 -0600930 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pPipe, delta_reqs);
Tony-LunarG77822802020-05-28 16:35:46 -0600931 } else {
Jeremy Kniager05631e72020-06-08 14:21:35 -0600932 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pPipe, binding_req_map);
locke-lunargd556cc32019-09-17 01:21:23 -0600933 }
934
Tony-LunarG77822802020-05-28 16:35:46 -0600935 state.per_set[setIndex].validated_set = descriptor_set;
936 state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
937 state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
938 if (reduced_map.IsManyDescriptors()) {
939 // Check whether old == new before assigning, the equality check is much cheaper than
940 // freeing and reallocating the map.
941 if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
942 state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
Jeff Bolz56308942019-10-06 22:05:23 -0500943 }
Tony-LunarG77822802020-05-28 16:35:46 -0600944 } else {
945 state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
locke-lunargd556cc32019-09-17 01:21:23 -0600946 }
947 }
948 }
949 }
950 if (!pPipe->vertex_binding_descriptions_.empty()) {
951 cb_state->vertex_buffer_used = true;
952 }
953}
954
955// Remove set from setMap and delete the set
956void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500957 descriptor_set->destroyed = true;
Jeff Bolzadbfa852019-10-04 13:53:30 -0500958 const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500959 // Any bound cmd buffers are now invalid
Jeff Bolzadbfa852019-10-04 13:53:30 -0500960 InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500961
locke-lunargd556cc32019-09-17 01:21:23 -0600962 setMap.erase(descriptor_set->GetSet());
963}
964
965// Free all DS Pools including their Sets & related sub-structs
966// NOTE : Calls to this function should be wrapped in mutex
967void ValidationStateTracker::DeleteDescriptorSetPools() {
968 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
969 // Remove this pools' sets from setMap and delete them
970 for (auto ds : ii->second->sets) {
971 FreeDescriptorSet(ds);
972 }
973 ii->second->sets.clear();
974 ii = descriptorPoolMap.erase(ii);
975 }
976}
977
978// For given object struct return a ptr of BASE_NODE type for its wrapping struct
979BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500980 if (object_struct.node) {
981#ifdef _DEBUG
982 // assert that lookup would find the same object
983 VulkanTypedHandle other = object_struct;
984 other.node = nullptr;
985 assert(object_struct.node == GetStateStructPtrFromObject(other));
986#endif
987 return object_struct.node;
988 }
locke-lunargd556cc32019-09-17 01:21:23 -0600989 BASE_NODE *base_ptr = nullptr;
990 switch (object_struct.type) {
991 case kVulkanObjectTypeDescriptorSet: {
992 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
993 break;
994 }
995 case kVulkanObjectTypeSampler: {
996 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
997 break;
998 }
999 case kVulkanObjectTypeQueryPool: {
1000 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
1001 break;
1002 }
1003 case kVulkanObjectTypePipeline: {
1004 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
1005 break;
1006 }
1007 case kVulkanObjectTypeBuffer: {
1008 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
1009 break;
1010 }
1011 case kVulkanObjectTypeBufferView: {
1012 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
1013 break;
1014 }
1015 case kVulkanObjectTypeImage: {
1016 base_ptr = GetImageState(object_struct.Cast<VkImage>());
1017 break;
1018 }
1019 case kVulkanObjectTypeImageView: {
1020 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
1021 break;
1022 }
1023 case kVulkanObjectTypeEvent: {
1024 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
1025 break;
1026 }
1027 case kVulkanObjectTypeDescriptorPool: {
1028 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
1029 break;
1030 }
1031 case kVulkanObjectTypeCommandPool: {
1032 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
1033 break;
1034 }
1035 case kVulkanObjectTypeFramebuffer: {
1036 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
1037 break;
1038 }
1039 case kVulkanObjectTypeRenderPass: {
1040 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
1041 break;
1042 }
1043 case kVulkanObjectTypeDeviceMemory: {
1044 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
1045 break;
1046 }
1047 case kVulkanObjectTypeAccelerationStructureNV: {
1048 base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
1049 break;
1050 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001051 case kVulkanObjectTypeUnknown:
1052 // This can happen if an element of the object_bindings vector has been
1053 // zeroed out, after an object is destroyed.
1054 break;
locke-lunargd556cc32019-09-17 01:21:23 -06001055 default:
1056 // TODO : Any other objects to be handled here?
1057 assert(0);
1058 break;
1059 }
1060 return base_ptr;
1061}
1062
sfricke-samsungbf1a2ed2020-06-14 23:31:00 -07001063// Gets union of all features defined by Potential Format Features
1064// except, does not handle the external format case for AHB as that only can be used for sampled images
sfricke-samsungbe3584f2020-04-22 14:58:06 -07001065VkFormatFeatureFlags ValidationStateTracker::GetPotentialFormatFeatures(VkFormat format) const {
1066 VkFormatFeatureFlags format_features = 0;
1067
1068 if (format != VK_FORMAT_UNDEFINED) {
1069 VkFormatProperties format_properties;
1070 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
1071 format_features |= format_properties.linearTilingFeatures;
1072 format_features |= format_properties.optimalTilingFeatures;
1073 if (device_extensions.vk_ext_image_drm_format_modifier) {
1074 // VK_KHR_get_physical_device_properties2 is required in this case
1075 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2};
1076 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
1077 nullptr};
1078 format_properties_2.pNext = (void *)&drm_properties_list;
1079 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
1080 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
1081 format_features |= drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
1082 }
1083 }
1084 }
1085
1086 return format_features;
1087}
1088
locke-lunargd556cc32019-09-17 01:21:23 -06001089// Tie the VulkanTypedHandle to the cmd buffer which includes:
1090// Add object_binding to cmd buffer
1091// Add cb_binding to object
Jeff Bolzadbfa852019-10-04 13:53:30 -05001092bool ValidationStateTracker::AddCommandBufferBinding(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_bindings,
locke-lunargd556cc32019-09-17 01:21:23 -06001093 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001094 if (disabled[command_buffer_state]) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001095 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001096 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001097 // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
1098 // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
1099 auto inserted = cb_bindings.insert({cb_node, -1});
1100 if (inserted.second) {
1101 cb_node->object_bindings.push_back(obj);
1102 inserted.first->second = (int)cb_node->object_bindings.size() - 1;
1103 return true;
1104 }
1105 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001106}
1107
1108// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
1109void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
1110 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
1111 if (base_obj) base_obj->cb_bindings.erase(cb_node);
1112}
1113
1114// Reset the command buffer state
1115// Maintain the createInfo and set state to CB_NEW, but clear all other state
1116void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
1117 CMD_BUFFER_STATE *pCB = GetCBState(cb);
1118 if (pCB) {
1119 pCB->in_use.store(0);
1120 // Reset CB state (note that createInfo is not cleared)
1121 pCB->commandBuffer = cb;
1122 memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
1123 memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
1124 pCB->hasDrawCmd = false;
1125 pCB->hasTraceRaysCmd = false;
1126 pCB->hasBuildAccelerationStructureCmd = false;
1127 pCB->hasDispatchCmd = false;
1128 pCB->state = CB_NEW;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001129 pCB->commandCount = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06001130 pCB->submitCount = 0;
1131 pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
1132 pCB->status = 0;
1133 pCB->static_status = 0;
1134 pCB->viewportMask = 0;
1135 pCB->scissorMask = 0;
1136
1137 for (auto &item : pCB->lastBound) {
1138 item.second.reset();
1139 }
1140
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07001141 pCB->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo();
locke-lunargd556cc32019-09-17 01:21:23 -06001142 pCB->activeRenderPass = nullptr;
1143 pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
1144 pCB->activeSubpass = 0;
1145 pCB->broken_bindings.clear();
1146 pCB->waitedEvents.clear();
1147 pCB->events.clear();
1148 pCB->writeEventsBeforeWait.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001149 pCB->activeQueries.clear();
1150 pCB->startedQueries.clear();
1151 pCB->image_layout_map.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001152 pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
1153 pCB->vertex_buffer_used = false;
1154 pCB->primaryCommandBuffer = VK_NULL_HANDLE;
1155 // If secondary, invalidate any primary command buffer that may call us.
1156 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001157 InvalidateLinkedCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -06001158 }
1159
1160 // Remove reverse command buffer links.
1161 for (auto pSubCB : pCB->linkedCommandBuffers) {
1162 pSubCB->linkedCommandBuffers.erase(pCB);
1163 }
1164 pCB->linkedCommandBuffers.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001165 pCB->queue_submit_functions.clear();
1166 pCB->cmd_execute_commands_functions.clear();
1167 pCB->eventUpdates.clear();
1168 pCB->queryUpdates.clear();
1169
1170 // Remove object bindings
1171 for (const auto &obj : pCB->object_bindings) {
1172 RemoveCommandBufferBinding(obj, pCB);
1173 }
1174 pCB->object_bindings.clear();
1175 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
1176 for (auto framebuffer : pCB->framebuffers) {
1177 auto fb_state = GetFramebufferState(framebuffer);
1178 if (fb_state) fb_state->cb_bindings.erase(pCB);
1179 }
1180 pCB->framebuffers.clear();
1181 pCB->activeFramebuffer = VK_NULL_HANDLE;
1182 memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
1183
1184 pCB->qfo_transfer_image_barriers.Reset();
1185 pCB->qfo_transfer_buffer_barriers.Reset();
1186
1187 // Clean up the label data
1188 ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
1189 pCB->debug_label.Reset();
locke-gb3ce08f2019-09-30 12:30:56 -06001190 pCB->validate_descriptorsets_in_queuesubmit.clear();
Attilio Provenzano02859b22020-02-27 14:17:28 +00001191
1192 // Best practices info
1193 pCB->small_indexed_draw_call_count = 0;
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06001194
1195 pCB->transform_feedback_active = false;
locke-lunargd556cc32019-09-17 01:21:23 -06001196 }
1197 if (command_buffer_reset_callback) {
1198 (*command_buffer_reset_callback)(cb);
1199 }
1200}
1201
1202void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
1203 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
1204 VkResult result) {
1205 if (VK_SUCCESS != result) return;
1206
1207 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
1208 if (nullptr == enabled_features_found) {
1209 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
1210 if (features2) {
1211 enabled_features_found = &(features2->features);
1212 }
1213 }
1214
1215 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
1216 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
1217 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
1218
1219 if (nullptr == enabled_features_found) {
1220 state_tracker->enabled_features.core = {};
1221 } else {
1222 state_tracker->enabled_features.core = *enabled_features_found;
1223 }
1224
1225 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
1226 // previously set them through an explicit API call.
1227 uint32_t count;
1228 auto pd_state = GetPhysicalDeviceState(gpu);
1229 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
1230 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
1231 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
1232 // Save local link to this device's physical device state
1233 state_tracker->physical_device_state = pd_state;
1234
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001235 const auto *vulkan_12_features = lvl_find_in_chain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
1236 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001237 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001238 } else {
sfricke-samsung27c70722020-05-02 08:42:39 -07001239 // Set Extension Feature Aliases to false as there is no struct to check
1240 state_tracker->enabled_features.core12.drawIndirectCount = VK_FALSE;
1241 state_tracker->enabled_features.core12.samplerMirrorClampToEdge = VK_FALSE;
1242 state_tracker->enabled_features.core12.descriptorIndexing = VK_FALSE;
1243 state_tracker->enabled_features.core12.samplerFilterMinmax = VK_FALSE;
1244 state_tracker->enabled_features.core12.shaderOutputLayer = VK_FALSE;
1245 state_tracker->enabled_features.core12.shaderOutputViewportIndex = VK_FALSE;
1246
1247 // These structs are only allowed in pNext chain if there is no VkPhysicalDeviceVulkan12Features
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001248
1249 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
1250 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001251 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
1252 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
1253 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
1254 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001255 }
1256
1257 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
1258 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001259 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
1260 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001261 }
1262
1263 const auto *descriptor_indexing_features =
1264 lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
1265 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001266 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
1267 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
1268 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
1269 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
1270 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
1271 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
1272 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
1273 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
1274 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
1275 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
1276 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
1277 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
1278 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
1279 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
1280 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
1281 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
1282 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
1283 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
1284 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
1285 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
1286 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
1287 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
1288 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
1289 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
1290 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
1291 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
1292 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
1293 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
1294 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
1295 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
1296 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
1297 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
1298 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
1299 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
1300 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
1301 descriptor_indexing_features->descriptorBindingPartiallyBound;
1302 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
1303 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
1304 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001305 }
1306
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001307 const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001308 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001309 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001310 }
1311
1312 const auto *imageless_framebuffer_features =
1313 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
1314 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001315 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001316 }
1317
1318 const auto *uniform_buffer_standard_layout_features =
1319 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
1320 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001321 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
1322 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001323 }
1324
1325 const auto *subgroup_extended_types_features =
1326 lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
1327 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001328 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
1329 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001330 }
1331
1332 const auto *separate_depth_stencil_layouts_features =
1333 lvl_find_in_chain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
1334 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001335 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
1336 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001337 }
1338
1339 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
1340 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001341 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001342 }
1343
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001344 const auto *timeline_semaphore_features = lvl_find_in_chain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001345 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001346 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001347 }
1348
1349 const auto *buffer_device_address = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
1350 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001351 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
1352 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1353 buffer_device_address->bufferDeviceAddressCaptureReplay;
1354 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1355 buffer_device_address->bufferDeviceAddressMultiDevice;
1356 }
1357 }
1358
1359 const auto *vulkan_11_features = lvl_find_in_chain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
1360 if (vulkan_11_features) {
1361 state_tracker->enabled_features.core11 = *vulkan_11_features;
1362 } else {
1363 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
1364
1365 const auto *sixteen_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
1366 if (sixteen_bit_storage_features) {
1367 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
1368 sixteen_bit_storage_features->storageBuffer16BitAccess;
1369 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
1370 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
1371 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
1372 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
1373 }
1374
1375 const auto *multiview_features = lvl_find_in_chain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
1376 if (multiview_features) {
1377 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1378 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1379 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1380 }
1381
1382 const auto *variable_pointers_features = lvl_find_in_chain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
1383 if (variable_pointers_features) {
1384 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1385 variable_pointers_features->variablePointersStorageBuffer;
1386 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1387 }
1388
1389 const auto *protected_memory_features = lvl_find_in_chain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
1390 if (protected_memory_features) {
1391 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1392 }
1393
1394 const auto *ycbcr_conversion_features =
1395 lvl_find_in_chain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
1396 if (ycbcr_conversion_features) {
1397 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1398 }
1399
1400 const auto *shader_draw_parameters_features =
1401 lvl_find_in_chain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
1402 if (shader_draw_parameters_features) {
1403 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001404 }
1405 }
1406
locke-lunargd556cc32019-09-17 01:21:23 -06001407 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
1408 state_tracker->physical_device_count =
1409 device_group_ci && device_group_ci->physicalDeviceCount > 0 ? device_group_ci->physicalDeviceCount : 1;
1410
locke-lunargd556cc32019-09-17 01:21:23 -06001411 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
1412 if (exclusive_scissor_features) {
1413 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1414 }
1415
1416 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1417 if (shading_rate_image_features) {
1418 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1419 }
1420
1421 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1422 if (mesh_shader_features) {
1423 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1424 }
1425
1426 const auto *inline_uniform_block_features =
1427 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1428 if (inline_uniform_block_features) {
1429 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1430 }
1431
1432 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1433 if (transform_feedback_features) {
1434 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1435 }
1436
locke-lunargd556cc32019-09-17 01:21:23 -06001437 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1438 if (vtx_attrib_div_features) {
1439 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1440 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001441
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001442 const auto *buffer_device_address_ext = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
1443 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001444 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001445 }
1446
1447 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1448 if (cooperative_matrix_features) {
1449 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1450 }
1451
locke-lunargd556cc32019-09-17 01:21:23 -06001452 const auto *compute_shader_derivatives_features =
1453 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1454 if (compute_shader_derivatives_features) {
1455 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1456 }
1457
1458 const auto *fragment_shader_barycentric_features =
1459 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1460 if (fragment_shader_barycentric_features) {
1461 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1462 }
1463
1464 const auto *shader_image_footprint_features =
1465 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1466 if (shader_image_footprint_features) {
1467 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1468 }
1469
1470 const auto *fragment_shader_interlock_features =
1471 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1472 if (fragment_shader_interlock_features) {
1473 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1474 }
1475
1476 const auto *demote_to_helper_invocation_features =
1477 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1478 if (demote_to_helper_invocation_features) {
1479 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1480 }
1481
1482 const auto *texel_buffer_alignment_features =
1483 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1484 if (texel_buffer_alignment_features) {
1485 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1486 }
1487
locke-lunargd556cc32019-09-17 01:21:23 -06001488 const auto *pipeline_exe_props_features =
1489 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1490 if (pipeline_exe_props_features) {
1491 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1492 }
1493
Jeff Bolz82f854d2019-09-17 14:56:47 -05001494 const auto *dedicated_allocation_image_aliasing_features =
1495 lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
1496 if (dedicated_allocation_image_aliasing_features) {
1497 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1498 *dedicated_allocation_image_aliasing_features;
1499 }
1500
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001501 const auto *performance_query_features = lvl_find_in_chain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
1502 if (performance_query_features) {
1503 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1504 }
1505
Tobias Hector782bcde2019-11-28 16:19:42 +00001506 const auto *device_coherent_memory_features = lvl_find_in_chain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
1507 if (device_coherent_memory_features) {
1508 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1509 }
1510
sfricke-samsungcead0802020-01-30 22:20:10 -08001511 const auto *ycbcr_image_array_features = lvl_find_in_chain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
1512 if (ycbcr_image_array_features) {
1513 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1514 }
1515
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001516 const auto *ray_tracing_features = lvl_find_in_chain<VkPhysicalDeviceRayTracingFeaturesKHR>(pCreateInfo->pNext);
1517 if (ray_tracing_features) {
1518 state_tracker->enabled_features.ray_tracing_features = *ray_tracing_features;
1519 }
1520
Jeff Bolz165818a2020-05-08 11:19:03 -05001521 const auto *robustness2_features = lvl_find_in_chain<VkPhysicalDeviceRobustness2FeaturesEXT>(pCreateInfo->pNext);
1522 if (robustness2_features) {
1523 state_tracker->enabled_features.robustness2_features = *robustness2_features;
1524 }
1525
janharaldfredriksen-arm3b793772020-05-12 18:55:53 +02001526 const auto *fragment_density_map_features =
1527 lvl_find_in_chain<VkPhysicalDeviceFragmentDensityMapFeaturesEXT>(pCreateInfo->pNext);
1528 if (fragment_density_map_features) {
1529 state_tracker->enabled_features.fragment_density_map_features = *fragment_density_map_features;
1530 }
1531
sfricke-samsung0c4a06f2020-06-27 01:24:32 -07001532 const auto *astc_decode_features = lvl_find_in_chain<VkPhysicalDeviceASTCDecodeFeaturesEXT>(pCreateInfo->pNext);
1533 if (astc_decode_features) {
1534 state_tracker->enabled_features.astc_decode_features = *astc_decode_features;
1535 }
1536
Tony-LunarG7337b312020-04-15 16:40:25 -06001537 const auto *custom_border_color_features = lvl_find_in_chain<VkPhysicalDeviceCustomBorderColorFeaturesEXT>(pCreateInfo->pNext);
1538 if (custom_border_color_features) {
1539 state_tracker->enabled_features.custom_border_color_features = *custom_border_color_features;
1540 }
1541
sfricke-samsungfd661d62020-05-16 00:57:27 -07001542 const auto *pipeline_creation_cache_control_features =
1543 lvl_find_in_chain<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT>(pCreateInfo->pNext);
1544 if (pipeline_creation_cache_control_features) {
1545 state_tracker->enabled_features.pipeline_creation_cache_control_features = *pipeline_creation_cache_control_features;
1546 }
1547
locke-lunargd556cc32019-09-17 01:21:23 -06001548 // Store physical device properties and physical device mem limits into CoreChecks structs
1549 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1550 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001551 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1552 &state_tracker->phys_dev_props_core11);
1553 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1554 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001555
1556 const auto &dev_ext = state_tracker->device_extensions;
1557 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1558
1559 if (dev_ext.vk_khr_push_descriptor) {
1560 // Get the needed push_descriptor limits
1561 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1562 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1563 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1564 }
1565
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001566 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
1567 VkPhysicalDeviceDescriptorIndexingPropertiesEXT descriptor_indexing_prop;
1568 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1569 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1570 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1571 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1572 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1573 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1574 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1575 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1576 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1577 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1578 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1579 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1580 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1581 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1582 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1583 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1584 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1585 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1586 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1587 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1588 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1589 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1590 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1591 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1592 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1593 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1594 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
1595 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1596 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
1597 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
1598 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
1599 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
1600 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
1601 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
1602 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1603 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1604 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
1605 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
1606 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1607 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
1608 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
1609 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
1610 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
1611 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
1612 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
1613 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
1614 }
1615
locke-lunargd556cc32019-09-17 01:21:23 -06001616 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1617 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1618 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1619 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001620
1621 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
1622 VkPhysicalDeviceDepthStencilResolvePropertiesKHR depth_stencil_resolve_props;
1623 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
1624 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
1625 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
1626 depth_stencil_resolve_props.supportedStencilResolveModes;
1627 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
1628 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
1629 }
1630
locke-lunargd556cc32019-09-17 01:21:23 -06001631 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001632 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
1633 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing, &phys_dev_props->ray_tracing_propsKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06001634 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1635 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001636 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
sfricke-samsung8f658d42020-05-03 20:12:24 -07001637 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_sample_locations, &phys_dev_props->sample_locations_props);
Tony-LunarG7337b312020-04-15 16:40:25 -06001638 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_custom_border_color, &phys_dev_props->custom_border_color_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001639
1640 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
1641 VkPhysicalDeviceTimelineSemaphorePropertiesKHR timeline_semaphore_props;
1642 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
1643 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
1644 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
1645 }
1646
1647 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
1648 VkPhysicalDeviceFloatControlsPropertiesKHR float_controls_props;
1649 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
1650 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
1651 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
1652 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
1653 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
1654 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
1655 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
1656 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
1657 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
1658 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
1659 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
1660 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
1661 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
1662 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
1663 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
1664 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
1665 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
1666 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
1667 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
1668 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
1669 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
1670 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07001671
locke-lunargd556cc32019-09-17 01:21:23 -06001672 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1673 // Get the needed cooperative_matrix properties
1674 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1675 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
1676 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1677 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1678
1679 uint32_t numCooperativeMatrixProperties = 0;
1680 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
1681 state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
1682 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
1683
1684 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
1685 state_tracker->cooperative_matrix_properties.data());
1686 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001687 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06001688 // Get the needed subgroup limits
1689 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
1690 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
1691 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1692
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001693 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
1694 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
1695 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
1696 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06001697 }
1698
1699 // Store queue family data
1700 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1701 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001702 const VkDeviceQueueCreateInfo &queue_create_info = pCreateInfo->pQueueCreateInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06001703 state_tracker->queue_family_index_map.insert(
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001704 std::make_pair(queue_create_info.queueFamilyIndex, queue_create_info.queueCount));
1705 state_tracker->queue_family_create_flags_map.insert(
1706 std::make_pair(queue_create_info.queueFamilyIndex, queue_create_info.flags));
locke-lunargd556cc32019-09-17 01:21:23 -06001707 }
1708 }
1709}
1710
1711void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1712 if (!device) return;
1713
locke-lunargd556cc32019-09-17 01:21:23 -06001714 // Reset all command buffers before destroying them, to unlink object_bindings.
1715 for (auto &commandBuffer : commandBufferMap) {
1716 ResetCommandBufferState(commandBuffer.first);
1717 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001718 pipelineMap.clear();
1719 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001720 commandBufferMap.clear();
1721
1722 // This will also delete all sets in the pool & remove them from setMap
1723 DeleteDescriptorSetPools();
1724 // All sets should be removed
1725 assert(setMap.empty());
1726 descriptorSetLayoutMap.clear();
1727 imageViewMap.clear();
1728 imageMap.clear();
1729 bufferViewMap.clear();
1730 bufferMap.clear();
1731 // Queues persist until device is destroyed
1732 queueMap.clear();
1733}
1734
1735// Loop through bound objects and increment their in_use counts.
1736void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
1737 for (auto obj : cb_node->object_bindings) {
1738 auto base_obj = GetStateStructPtrFromObject(obj);
1739 if (base_obj) {
1740 base_obj->in_use.fetch_add(1);
1741 }
1742 }
1743}
1744
1745// Track which resources are in-flight by atomically incrementing their "in_use" count
1746void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1747 cb_node->submitCount++;
1748 cb_node->in_use.fetch_add(1);
1749
1750 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
1751 IncrementBoundObjects(cb_node);
1752 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1753 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1754 // should then be flagged prior to calling this function
1755 for (auto event : cb_node->writeEventsBeforeWait) {
1756 auto event_state = GetEventState(event);
1757 if (event_state) event_state->write_in_use++;
1758 }
1759}
1760
1761// Decrement in-use count for objects bound to command buffer
1762void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
1763 BASE_NODE *base_obj = nullptr;
1764 for (auto obj : cb_node->object_bindings) {
1765 base_obj = GetStateStructPtrFromObject(obj);
1766 if (base_obj) {
1767 base_obj->in_use.fetch_sub(1);
1768 }
1769 }
1770}
1771
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001772void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
locke-lunargd556cc32019-09-17 01:21:23 -06001773 std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
1774
1775 // Roll this queue forward, one submission at a time.
1776 while (pQueue->seq < seq) {
1777 auto &submission = pQueue->submissions.front();
1778
1779 for (auto &wait : submission.waitSemaphores) {
1780 auto pSemaphore = GetSemaphoreState(wait.semaphore);
1781 if (pSemaphore) {
1782 pSemaphore->in_use.fetch_sub(1);
1783 }
1784 auto &lastSeq = otherQueueSeqs[wait.queue];
1785 lastSeq = std::max(lastSeq, wait.seq);
1786 }
1787
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001788 for (auto &signal : submission.signalSemaphores) {
1789 auto pSemaphore = GetSemaphoreState(signal.semaphore);
locke-lunargd556cc32019-09-17 01:21:23 -06001790 if (pSemaphore) {
1791 pSemaphore->in_use.fetch_sub(1);
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001792 if (pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && pSemaphore->payload < signal.payload) {
1793 pSemaphore->payload = signal.payload;
1794 }
locke-lunargd556cc32019-09-17 01:21:23 -06001795 }
1796 }
1797
1798 for (auto &semaphore : submission.externalSemaphores) {
1799 auto pSemaphore = GetSemaphoreState(semaphore);
1800 if (pSemaphore) {
1801 pSemaphore->in_use.fetch_sub(1);
1802 }
1803 }
1804
1805 for (auto cb : submission.cbs) {
1806 auto cb_node = GetCBState(cb);
1807 if (!cb_node) {
1808 continue;
1809 }
1810 // First perform decrement on general case bound objects
1811 DecrementBoundResources(cb_node);
1812 for (auto event : cb_node->writeEventsBeforeWait) {
1813 auto eventNode = eventMap.find(event);
1814 if (eventNode != eventMap.end()) {
1815 eventNode->second.write_in_use--;
1816 }
1817 }
Jeff Bolz310775c2019-10-09 00:46:33 -05001818 QueryMap localQueryToStateMap;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001819 VkQueryPool first_pool = VK_NULL_HANDLE;
Jeff Bolz310775c2019-10-09 00:46:33 -05001820 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001821 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &localQueryToStateMap);
Jeff Bolz310775c2019-10-09 00:46:33 -05001822 }
1823
1824 for (auto queryStatePair : localQueryToStateMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001825 if (queryStatePair.second == QUERYSTATE_ENDED) {
1826 queryToStateMap[queryStatePair.first] = QUERYSTATE_AVAILABLE;
1827 }
locke-lunargd556cc32019-09-17 01:21:23 -06001828 }
locke-lunargd556cc32019-09-17 01:21:23 -06001829 cb_node->in_use.fetch_sub(1);
1830 }
1831
1832 auto pFence = GetFenceState(submission.fence);
1833 if (pFence && pFence->scope == kSyncScopeInternal) {
1834 pFence->state = FENCE_RETIRED;
1835 }
1836
1837 pQueue->submissions.pop_front();
1838 pQueue->seq++;
1839 }
1840
1841 // Roll other queues forward to the highest seq we saw a wait for
1842 for (auto qs : otherQueueSeqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001843 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001844 }
1845}
1846
1847// Submit a fence to a queue, delimiting previous fences and previous untracked
1848// work by it.
1849static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1850 pFence->state = FENCE_INFLIGHT;
1851 pFence->signaler.first = pQueue->queue;
1852 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1853}
1854
1855void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1856 VkFence fence, VkResult result) {
Mark Lobodzinski09379db2020-05-07 08:22:01 -06001857 if (result != VK_SUCCESS) return;
locke-lunargd556cc32019-09-17 01:21:23 -06001858 uint64_t early_retire_seq = 0;
1859 auto pQueue = GetQueueState(queue);
1860 auto pFence = GetFenceState(fence);
1861
1862 if (pFence) {
1863 if (pFence->scope == kSyncScopeInternal) {
1864 // Mark fence in use
1865 SubmitFence(pQueue, pFence, std::max(1u, submitCount));
1866 if (!submitCount) {
1867 // If no submissions, but just dropping a fence on the end of the queue,
1868 // record an empty submission with just the fence, so we can determine
1869 // its completion.
1870 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001871 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001872 }
1873 } else {
1874 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1875 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1876 }
1877 }
1878
1879 // Now process each individual submit
1880 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1881 std::vector<VkCommandBuffer> cbs;
1882 const VkSubmitInfo *submit = &pSubmits[submit_idx];
1883 vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001884 vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06001885 vector<VkSemaphore> semaphore_externals;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001886 const uint64_t next_seq = pQueue->seq + pQueue->submissions.size() + 1;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001887 auto *timeline_semaphore_submit = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001888 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
1889 VkSemaphore semaphore = submit->pWaitSemaphores[i];
1890 auto pSemaphore = GetSemaphoreState(semaphore);
1891 if (pSemaphore) {
1892 if (pSemaphore->scope == kSyncScopeInternal) {
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001893 SEMAPHORE_WAIT wait;
1894 wait.semaphore = semaphore;
1895 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
1896 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1897 wait.queue = pSemaphore->signaler.first;
1898 wait.seq = pSemaphore->signaler.second;
1899 semaphore_waits.push_back(wait);
1900 pSemaphore->in_use.fetch_add(1);
1901 }
1902 pSemaphore->signaler.first = VK_NULL_HANDLE;
1903 pSemaphore->signaled = false;
1904 } else if (pSemaphore->payload < timeline_semaphore_submit->pWaitSemaphoreValues[i]) {
1905 wait.queue = queue;
1906 wait.seq = next_seq;
1907 wait.payload = timeline_semaphore_submit->pWaitSemaphoreValues[i];
1908 semaphore_waits.push_back(wait);
locke-lunargd556cc32019-09-17 01:21:23 -06001909 pSemaphore->in_use.fetch_add(1);
1910 }
locke-lunargd556cc32019-09-17 01:21:23 -06001911 } else {
1912 semaphore_externals.push_back(semaphore);
1913 pSemaphore->in_use.fetch_add(1);
1914 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1915 pSemaphore->scope = kSyncScopeInternal;
1916 }
1917 }
1918 }
1919 }
1920 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
1921 VkSemaphore semaphore = submit->pSignalSemaphores[i];
1922 auto pSemaphore = GetSemaphoreState(semaphore);
1923 if (pSemaphore) {
1924 if (pSemaphore->scope == kSyncScopeInternal) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001925 SEMAPHORE_SIGNAL signal;
1926 signal.semaphore = semaphore;
1927 signal.seq = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001928 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
1929 pSemaphore->signaler.first = queue;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001930 pSemaphore->signaler.second = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001931 pSemaphore->signaled = true;
1932 } else {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001933 signal.payload = timeline_semaphore_submit->pSignalSemaphoreValues[i];
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001934 }
locke-lunargd556cc32019-09-17 01:21:23 -06001935 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001936 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06001937 } else {
1938 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001939 early_retire_seq = std::max(early_retire_seq, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001940 }
1941 }
1942 }
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001943 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
1944 uint32_t perf_pass = perf_submit ? perf_submit->counterPassIndex : 0;
1945
locke-lunargd556cc32019-09-17 01:21:23 -06001946 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
1947 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
1948 if (cb_node) {
1949 cbs.push_back(submit->pCommandBuffers[i]);
1950 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
1951 cbs.push_back(secondaryCmdBuffer->commandBuffer);
1952 IncrementResources(secondaryCmdBuffer);
1953 }
1954 IncrementResources(cb_node);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001955
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001956 VkQueryPool first_pool = VK_NULL_HANDLE;
1957 EventToStageMap localEventToStageMap;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001958 QueryMap localQueryToStateMap;
1959 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001960 function(nullptr, /*do_validate*/ false, first_pool, perf_pass, &localQueryToStateMap);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001961 }
1962
1963 for (auto queryStatePair : localQueryToStateMap) {
1964 queryToStateMap[queryStatePair.first] = queryStatePair.second;
1965 }
1966
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001967 for (auto &function : cb_node->eventUpdates) {
1968 function(nullptr, /*do_validate*/ false, &localEventToStageMap);
1969 }
1970
1971 for (auto eventStagePair : localEventToStageMap) {
1972 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
1973 }
locke-lunargd556cc32019-09-17 01:21:23 -06001974 }
1975 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001976
locke-lunargd556cc32019-09-17 01:21:23 -06001977 pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001978 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, perf_pass);
locke-lunargd556cc32019-09-17 01:21:23 -06001979 }
1980
1981 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001982 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001983 }
1984}
1985
1986void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1987 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1988 VkResult result) {
1989 if (VK_SUCCESS == result) {
1990 AddMemObjInfo(device, *pMemory, pAllocateInfo);
1991 }
1992 return;
1993}
1994
1995void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1996 if (!mem) return;
1997 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
1998 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
1999
2000 // Clear mem binding for any bound objects
2001 for (const auto &obj : mem_info->obj_bindings) {
2002 BINDABLE *bindable_state = nullptr;
2003 switch (obj.type) {
2004 case kVulkanObjectTypeImage:
2005 bindable_state = GetImageState(obj.Cast<VkImage>());
2006 break;
2007 case kVulkanObjectTypeBuffer:
2008 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
2009 break;
2010 case kVulkanObjectTypeAccelerationStructureNV:
2011 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
2012 break;
2013
2014 default:
2015 // Should only have acceleration structure, buffer, or image objects bound to memory
2016 assert(0);
2017 }
2018
2019 if (bindable_state) {
Jeff Bolz41e29052020-03-29 22:33:55 -05002020 // Remove any sparse bindings bound to the resource that use this memory.
2021 for (auto it = bindable_state->sparse_bindings.begin(); it != bindable_state->sparse_bindings.end();) {
2022 auto nextit = it;
2023 nextit++;
2024
2025 auto &sparse_mem_binding = *it;
2026 if (sparse_mem_binding.mem_state.get() == mem_info) {
2027 bindable_state->sparse_bindings.erase(it);
2028 }
2029
2030 it = nextit;
2031 }
locke-lunargd556cc32019-09-17 01:21:23 -06002032 bindable_state->UpdateBoundMemorySet();
2033 }
2034 }
2035 // Any bound cmd buffers are now invalid
2036 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
2037 RemoveAliasingImages(mem_info->bound_images);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002038 mem_info->destroyed = true;
John Zulauf79952712020-04-07 11:25:54 -06002039 fake_memory.Free(mem_info->fake_base_address);
locke-lunargd556cc32019-09-17 01:21:23 -06002040 memObjMap.erase(mem);
2041}
2042
2043void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
2044 VkFence fence, VkResult result) {
2045 if (result != VK_SUCCESS) return;
2046 uint64_t early_retire_seq = 0;
2047 auto pFence = GetFenceState(fence);
2048 auto pQueue = GetQueueState(queue);
2049
2050 if (pFence) {
2051 if (pFence->scope == kSyncScopeInternal) {
2052 SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
2053 if (!bindInfoCount) {
2054 // No work to do, just dropping a fence in the queue by itself.
2055 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002056 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002057 }
2058 } else {
2059 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
2060 early_retire_seq = pQueue->seq + pQueue->submissions.size();
2061 }
2062 }
2063
2064 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
2065 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
2066 // Track objects tied to memory
2067 for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
2068 for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
2069 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07002070 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06002071 VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
2072 }
2073 }
2074 for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
2075 for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
2076 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07002077 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06002078 VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
2079 }
2080 }
2081 for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
2082 for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
2083 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
2084 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
2085 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
locke-lunargcf04d582019-11-26 00:31:50 -07002086 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, size,
locke-lunargd556cc32019-09-17 01:21:23 -06002087 VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
2088 }
2089 }
2090
2091 std::vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002092 std::vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06002093 std::vector<VkSemaphore> semaphore_externals;
2094 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
2095 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
2096 auto pSemaphore = GetSemaphoreState(semaphore);
2097 if (pSemaphore) {
2098 if (pSemaphore->scope == kSyncScopeInternal) {
2099 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
2100 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
2101 pSemaphore->in_use.fetch_add(1);
2102 }
2103 pSemaphore->signaler.first = VK_NULL_HANDLE;
2104 pSemaphore->signaled = false;
2105 } else {
2106 semaphore_externals.push_back(semaphore);
2107 pSemaphore->in_use.fetch_add(1);
2108 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
2109 pSemaphore->scope = kSyncScopeInternal;
2110 }
2111 }
2112 }
2113 }
2114 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
2115 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
2116 auto pSemaphore = GetSemaphoreState(semaphore);
2117 if (pSemaphore) {
2118 if (pSemaphore->scope == kSyncScopeInternal) {
2119 pSemaphore->signaler.first = queue;
2120 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
2121 pSemaphore->signaled = true;
2122 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002123
2124 SEMAPHORE_SIGNAL signal;
2125 signal.semaphore = semaphore;
2126 signal.seq = pSemaphore->signaler.second;
2127 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06002128 } else {
2129 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
2130 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
2131 }
2132 }
2133 }
2134
2135 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002136 bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002137 }
2138
2139 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002140 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002141 }
2142}
2143
2144void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
2145 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
2146 VkResult result) {
2147 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002148 auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002149 semaphore_state->signaler.first = VK_NULL_HANDLE;
2150 semaphore_state->signaler.second = 0;
2151 semaphore_state->signaled = false;
2152 semaphore_state->scope = kSyncScopeInternal;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002153 semaphore_state->type = VK_SEMAPHORE_TYPE_BINARY_KHR;
2154 semaphore_state->payload = 0;
2155 auto semaphore_type_create_info = lvl_find_in_chain<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo->pNext);
2156 if (semaphore_type_create_info) {
2157 semaphore_state->type = semaphore_type_create_info->semaphoreType;
2158 semaphore_state->payload = semaphore_type_create_info->initialValue;
2159 }
locke-lunargd556cc32019-09-17 01:21:23 -06002160 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
2161}
2162
2163void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
2164 VkSemaphoreImportFlagsKHR flags) {
2165 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
2166 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
2167 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
2168 sema_node->scope == kSyncScopeInternal) {
2169 sema_node->scope = kSyncScopeExternalTemporary;
2170 } else {
2171 sema_node->scope = kSyncScopeExternalPermanent;
2172 }
2173 }
2174}
2175
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002176void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR *pSignalInfo,
2177 VkResult result) {
2178 auto *pSemaphore = GetSemaphoreState(pSignalInfo->semaphore);
2179 pSemaphore->payload = pSignalInfo->value;
2180}
2181
locke-lunargd556cc32019-09-17 01:21:23 -06002182void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
2183 auto mem_info = GetDevMemState(mem);
2184 if (mem_info) {
2185 mem_info->mapped_range.offset = offset;
2186 mem_info->mapped_range.size = size;
2187 mem_info->p_driver_data = *ppData;
2188 }
2189}
2190
2191void ValidationStateTracker::RetireFence(VkFence fence) {
2192 auto pFence = GetFenceState(fence);
2193 if (pFence && pFence->scope == kSyncScopeInternal) {
2194 if (pFence->signaler.first != VK_NULL_HANDLE) {
2195 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002196 RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002197 } else {
2198 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
2199 // the fence as retired.
2200 pFence->state = FENCE_RETIRED;
2201 }
2202 }
2203}
2204
2205void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2206 VkBool32 waitAll, uint64_t timeout, VkResult result) {
2207 if (VK_SUCCESS != result) return;
2208
2209 // When we know that all fences are complete we can clean/remove their CBs
2210 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
2211 for (uint32_t i = 0; i < fenceCount; i++) {
2212 RetireFence(pFences[i]);
2213 }
2214 }
2215 // NOTE : Alternate case not handled here is when some fences have completed. In
2216 // this case for app to guarantee which fences completed it will have to call
2217 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
2218}
2219
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002220void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
2221 auto pSemaphore = GetSemaphoreState(semaphore);
2222 if (pSemaphore) {
2223 for (auto &pair : queueMap) {
2224 QUEUE_STATE &queueState = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002225 uint64_t max_seq = 0;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002226 for (const auto &submission : queueState.submissions) {
2227 for (const auto &signalSemaphore : submission.signalSemaphores) {
2228 if (signalSemaphore.semaphore == semaphore && signalSemaphore.payload <= until_payload) {
Tony-LunarG47d5e272020-04-07 15:35:55 -06002229 if (signalSemaphore.seq > max_seq) {
2230 max_seq = signalSemaphore.seq;
2231 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002232 }
2233 }
2234 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06002235 if (max_seq) {
2236 RetireWorkOnQueue(&queueState, max_seq);
2237 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002238 }
2239 }
2240}
2241
John Zulauff89de662020-04-13 18:57:34 -06002242void ValidationStateTracker::RecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2243 VkResult result) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002244 if (VK_SUCCESS != result) return;
2245
2246 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
2247 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
2248 }
2249}
2250
John Zulauff89de662020-04-13 18:57:34 -06002251void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2252 VkResult result) {
2253 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2254}
2255
2256void ValidationStateTracker::PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo,
2257 uint64_t timeout, VkResult result) {
2258 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2259}
2260
locke-lunargd556cc32019-09-17 01:21:23 -06002261void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
2262 if (VK_SUCCESS != result) return;
2263 RetireFence(fence);
2264}
2265
2266void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
2267 // Add queue to tracking set only if it is new
2268 auto queue_is_new = queues.emplace(queue);
2269 if (queue_is_new.second == true) {
2270 QUEUE_STATE *queue_state = &queueMap[queue];
2271 queue_state->queue = queue;
2272 queue_state->queueFamilyIndex = queue_family_index;
2273 queue_state->seq = 0;
2274 }
2275}
2276
2277void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2278 VkQueue *pQueue) {
2279 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2280}
2281
2282void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2283 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2284}
2285
2286void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2287 if (VK_SUCCESS != result) return;
2288 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002289 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002290}
2291
2292void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2293 if (VK_SUCCESS != result) return;
2294 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002295 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002296 }
2297}
2298
2299void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2300 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002301 auto fence_state = GetFenceState(fence);
2302 fence_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002303 fenceMap.erase(fence);
2304}
2305
2306void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2307 const VkAllocationCallbacks *pAllocator) {
2308 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002309 auto semaphore_state = GetSemaphoreState(semaphore);
2310 semaphore_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002311 semaphoreMap.erase(semaphore);
2312}
2313
2314void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2315 if (!event) return;
2316 EVENT_STATE *event_state = GetEventState(event);
2317 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
2318 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
2319 eventMap.erase(event);
2320}
2321
2322void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2323 const VkAllocationCallbacks *pAllocator) {
2324 if (!queryPool) return;
2325 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
2326 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
2327 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002328 qp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002329 queryPoolMap.erase(queryPool);
2330}
2331
2332// Object with given handle is being bound to memory w/ given mem_info struct.
2333// Track the newly bound memory range with given memoryOffset
2334// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
2335// and non-linear range incorrectly overlap.
locke-lunargd556cc32019-09-17 01:21:23 -06002336void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002337 VkDeviceSize memoryOffset) {
locke-lunargd556cc32019-09-17 01:21:23 -06002338 if (typed_handle.type == kVulkanObjectTypeImage) {
2339 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
2340 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002341 mem_info->bound_buffers.insert(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002342 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002343 mem_info->bound_acceleration_structures.insert(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002344 } else {
2345 // Unsupported object type
2346 assert(false);
2347 }
2348}
2349
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002350void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2351 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002352}
2353
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002354void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2355 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002356}
2357
2358void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002359 VkDeviceSize mem_offset) {
2360 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002361}
2362
2363// This function will remove the handle-to-index mapping from the appropriate map.
2364static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
2365 if (typed_handle.type == kVulkanObjectTypeImage) {
2366 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
2367 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002368 mem_info->bound_buffers.erase(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002369 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002370 mem_info->bound_acceleration_structures.erase(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002371 } else {
2372 // Unsupported object type
2373 assert(false);
2374 }
2375}
2376
2377void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
2378 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
2379}
2380
2381void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
2382 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
2383}
2384
2385void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
2386 RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
2387}
2388
2389void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2390 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2391 if (buffer_state) {
2392 // Track bound memory range information
2393 auto mem_info = GetDevMemState(mem);
2394 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002395 InsertBufferMemoryRange(buffer, mem_info, memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06002396 }
2397 // Track objects tied to memory
2398 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
2399 }
2400}
2401
2402void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2403 VkDeviceSize memoryOffset, VkResult result) {
2404 if (VK_SUCCESS != result) return;
2405 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2406}
2407
2408void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
2409 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2410 for (uint32_t i = 0; i < bindInfoCount; i++) {
2411 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2412 }
2413}
2414
2415void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
2416 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2417 for (uint32_t i = 0; i < bindInfoCount; i++) {
2418 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2419 }
2420}
2421
Spencer Fricke6c127102020-04-16 06:25:20 -07002422void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer) {
locke-lunargd556cc32019-09-17 01:21:23 -06002423 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2424 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002425 buffer_state->memory_requirements_checked = true;
2426 }
2427}
2428
2429void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2430 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002431 RecordGetBufferMemoryRequirementsState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002432}
2433
2434void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
2435 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2436 VkMemoryRequirements2KHR *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002437 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002438}
2439
2440void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
2441 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2442 VkMemoryRequirements2KHR *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002443 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002444}
2445
Spencer Fricke6c127102020-04-16 06:25:20 -07002446void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002447 const VkImagePlaneMemoryRequirementsInfo *plane_info =
2448 (pInfo == nullptr) ? nullptr : lvl_find_in_chain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002449 IMAGE_STATE *image_state = GetImageState(image);
2450 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002451 if (plane_info != nullptr) {
2452 // Multi-plane image
2453 image_state->memory_requirements_checked = false; // Each image plane needs to be checked itself
2454 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
2455 image_state->plane0_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002456 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
2457 image_state->plane1_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002458 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
2459 image_state->plane2_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002460 }
2461 } else {
2462 // Single Plane image
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002463 image_state->memory_requirements_checked = true;
2464 }
locke-lunargd556cc32019-09-17 01:21:23 -06002465 }
2466}
2467
2468void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2469 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002470 RecordGetImageMemoryRequirementsState(image, nullptr);
locke-lunargd556cc32019-09-17 01:21:23 -06002471}
2472
2473void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2474 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002475 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002476}
2477
2478void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2479 const VkImageMemoryRequirementsInfo2 *pInfo,
2480 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002481 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002482}
2483
2484static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2485 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2486 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2487 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2488 image_state->sparse_metadata_required = true;
2489 }
2490}
2491
2492void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2493 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2494 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2495 auto image_state = GetImageState(image);
2496 image_state->get_sparse_reqs_called = true;
2497 if (!pSparseMemoryRequirements) return;
2498 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2499 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2500 }
2501}
2502
2503void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
2504 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2505 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2506 auto image_state = GetImageState(pInfo->image);
2507 image_state->get_sparse_reqs_called = true;
2508 if (!pSparseMemoryRequirements) return;
2509 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2510 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2511 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2512 }
2513}
2514
2515void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
2516 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2517 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2518 auto image_state = GetImageState(pInfo->image);
2519 image_state->get_sparse_reqs_called = true;
2520 if (!pSparseMemoryRequirements) return;
2521 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2522 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2523 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2524 }
2525}
2526
2527void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2528 const VkAllocationCallbacks *pAllocator) {
2529 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002530 auto shader_module_state = GetShaderModuleState(shaderModule);
2531 shader_module_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002532 shaderModuleMap.erase(shaderModule);
2533}
2534
2535void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2536 const VkAllocationCallbacks *pAllocator) {
2537 if (!pipeline) return;
2538 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
2539 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
2540 // Any bound cmd buffers are now invalid
2541 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002542 pipeline_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002543 pipelineMap.erase(pipeline);
2544}
2545
2546void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2547 const VkAllocationCallbacks *pAllocator) {
2548 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002549 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
2550 pipeline_layout_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002551 pipelineLayoutMap.erase(pipelineLayout);
2552}
2553
2554void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2555 const VkAllocationCallbacks *pAllocator) {
2556 if (!sampler) return;
2557 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
2558 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
2559 // Any bound cmd buffers are now invalid
2560 if (sampler_state) {
2561 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
Yuly Novikov424cdd52020-05-26 16:45:12 -04002562
2563 if (sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2564 sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
2565 custom_border_color_sampler_count--;
2566 }
2567
2568 sampler_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002569 }
2570 samplerMap.erase(sampler);
2571}
2572
2573void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2574 const VkAllocationCallbacks *pAllocator) {
2575 if (!descriptorSetLayout) return;
2576 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2577 if (layout_it != descriptorSetLayoutMap.end()) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002578 layout_it->second.get()->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002579 descriptorSetLayoutMap.erase(layout_it);
2580 }
2581}
2582
2583void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2584 const VkAllocationCallbacks *pAllocator) {
2585 if (!descriptorPool) return;
2586 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
2587 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
2588 if (desc_pool_state) {
2589 // Any bound cmd buffers are now invalid
2590 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
2591 // Free sets that were in this pool
2592 for (auto ds : desc_pool_state->sets) {
2593 FreeDescriptorSet(ds);
2594 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002595 desc_pool_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002596 descriptorPoolMap.erase(descriptorPool);
2597 }
2598}
2599
2600// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2601void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2602 const VkCommandBuffer *command_buffers) {
2603 for (uint32_t i = 0; i < command_buffer_count; i++) {
John Zulaufd1f85d42020-04-15 12:23:15 -06002604 // Allow any derived class to clean up command buffer state
2605 if (command_buffer_free_callback) {
2606 (*command_buffer_free_callback)(command_buffers[i]);
2607 }
2608
locke-lunargd556cc32019-09-17 01:21:23 -06002609 auto cb_state = GetCBState(command_buffers[i]);
2610 // Remove references to command buffer's state and delete
2611 if (cb_state) {
2612 // reset prior to delete, removing various references to it.
2613 // TODO: fix this, it's insane.
2614 ResetCommandBufferState(cb_state->commandBuffer);
2615 // Remove the cb_state's references from COMMAND_POOL_STATEs
2616 pool_state->commandBuffers.erase(command_buffers[i]);
2617 // Remove the cb debug labels
2618 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
2619 // Remove CBState from CB map
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002620 cb_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002621 commandBufferMap.erase(cb_state->commandBuffer);
2622 }
2623 }
2624}
2625
2626void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2627 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
2628 auto pPool = GetCommandPoolState(commandPool);
2629 FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
2630}
2631
2632void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2633 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2634 VkResult result) {
2635 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002636 auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002637 cmd_pool_state->createFlags = pCreateInfo->flags;
2638 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
sfricke-samsung0c45edc2020-07-01 22:19:53 -07002639 cmd_pool_state->unprotected = ((pCreateInfo->flags & VK_COMMAND_POOL_CREATE_PROTECTED_BIT) == 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002640 commandPoolMap[*pCommandPool] = std::move(cmd_pool_state);
2641}
2642
2643void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2644 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2645 VkResult result) {
2646 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002647 auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002648 query_pool_state->createInfo = *pCreateInfo;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002649 query_pool_state->pool = *pQueryPool;
2650 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
2651 const auto *perf = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
2652 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
2653
2654 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2655 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2656 switch (counter.scope) {
2657 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
2658 query_pool_state->has_perf_scope_command_buffer = true;
2659 break;
2660 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
2661 query_pool_state->has_perf_scope_render_pass = true;
2662 break;
2663 default:
2664 break;
2665 }
2666 }
2667
2668 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
2669 &query_pool_state->n_performance_passes);
2670 }
2671
locke-lunargd556cc32019-09-17 01:21:23 -06002672 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
2673
2674 QueryObject query_obj{*pQueryPool, 0u};
2675 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2676 query_obj.query = i;
2677 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2678 }
2679}
2680
2681void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2682 const VkAllocationCallbacks *pAllocator) {
2683 if (!commandPool) return;
2684 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2685 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2686 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2687 if (cp_state) {
2688 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2689 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2690 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002691 cp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002692 commandPoolMap.erase(commandPool);
2693 }
2694}
2695
2696void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2697 VkCommandPoolResetFlags flags, VkResult result) {
2698 if (VK_SUCCESS != result) return;
2699 // Reset all of the CBs allocated from this pool
2700 auto command_pool_state = GetCommandPoolState(commandPool);
2701 for (auto cmdBuffer : command_pool_state->commandBuffers) {
2702 ResetCommandBufferState(cmdBuffer);
2703 }
2704}
2705
2706void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2707 VkResult result) {
2708 for (uint32_t i = 0; i < fenceCount; ++i) {
2709 auto pFence = GetFenceState(pFences[i]);
2710 if (pFence) {
2711 if (pFence->scope == kSyncScopeInternal) {
2712 pFence->state = FENCE_UNSIGNALED;
2713 } else if (pFence->scope == kSyncScopeExternalTemporary) {
2714 pFence->scope = kSyncScopeInternal;
2715 }
2716 }
2717 }
2718}
2719
Jeff Bolzadbfa852019-10-04 13:53:30 -05002720// For given cb_nodes, invalidate them and track object causing invalidation.
2721// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
2722// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
2723// can also unlink objects from command buffers.
2724void ValidationStateTracker::InvalidateCommandBuffers(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_nodes,
2725 const VulkanTypedHandle &obj, bool unlink) {
2726 for (const auto &cb_node_pair : cb_nodes) {
2727 auto &cb_node = cb_node_pair.first;
2728 if (cb_node->state == CB_RECORDING) {
2729 cb_node->state = CB_INVALID_INCOMPLETE;
2730 } else if (cb_node->state == CB_RECORDED) {
2731 cb_node->state = CB_INVALID_COMPLETE;
2732 }
2733 cb_node->broken_bindings.push_back(obj);
2734
2735 // if secondary, then propagate the invalidation to the primaries that will call us.
2736 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2737 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
2738 }
2739 if (unlink) {
2740 int index = cb_node_pair.second;
2741 assert(cb_node->object_bindings[index] == obj);
2742 cb_node->object_bindings[index] = VulkanTypedHandle();
2743 }
2744 }
2745 if (unlink) {
2746 cb_nodes.clear();
2747 }
2748}
2749
2750void ValidationStateTracker::InvalidateLinkedCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
2751 const VulkanTypedHandle &obj) {
locke-lunargd556cc32019-09-17 01:21:23 -06002752 for (auto cb_node : cb_nodes) {
2753 if (cb_node->state == CB_RECORDING) {
2754 cb_node->state = CB_INVALID_INCOMPLETE;
2755 } else if (cb_node->state == CB_RECORDED) {
2756 cb_node->state = CB_INVALID_COMPLETE;
2757 }
2758 cb_node->broken_bindings.push_back(obj);
2759
2760 // if secondary, then propagate the invalidation to the primaries that will call us.
2761 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002762 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
locke-lunargd556cc32019-09-17 01:21:23 -06002763 }
2764 }
2765}
2766
2767void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2768 const VkAllocationCallbacks *pAllocator) {
2769 if (!framebuffer) return;
2770 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
2771 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
2772 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002773 framebuffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002774 frameBufferMap.erase(framebuffer);
2775}
2776
2777void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2778 const VkAllocationCallbacks *pAllocator) {
2779 if (!renderPass) return;
2780 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
2781 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
2782 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002783 rp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002784 renderPassMap.erase(renderPass);
2785}
2786
2787void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2788 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2789 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002790 auto fence_state = std::make_shared<FENCE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002791 fence_state->fence = *pFence;
2792 fence_state->createInfo = *pCreateInfo;
2793 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
2794 fenceMap[*pFence] = std::move(fence_state);
2795}
2796
2797bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2798 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2799 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002800 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002801 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2802 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2803 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2804 cgpl_state->pipe_state.reserve(count);
2805 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002806 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002807 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002808 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002809 }
2810 return false;
2811}
2812
2813void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2814 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2815 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2816 VkResult result, void *cgpl_state_data) {
2817 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2818 // This API may create pipelines regardless of the return value
2819 for (uint32_t i = 0; i < count; i++) {
2820 if (pPipelines[i] != VK_NULL_HANDLE) {
2821 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2822 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2823 }
2824 }
2825 cgpl_state->pipe_state.clear();
2826}
2827
2828bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2829 const VkComputePipelineCreateInfo *pCreateInfos,
2830 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002831 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002832 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2833 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2834 ccpl_state->pipe_state.reserve(count);
2835 for (uint32_t i = 0; i < count; i++) {
2836 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002837 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002838 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002839 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002840 }
2841 return false;
2842}
2843
2844void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2845 const VkComputePipelineCreateInfo *pCreateInfos,
2846 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2847 VkResult result, void *ccpl_state_data) {
2848 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2849
2850 // This API may create pipelines regardless of the return value
2851 for (uint32_t i = 0; i < count; i++) {
2852 if (pPipelines[i] != VK_NULL_HANDLE) {
2853 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2854 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2855 }
2856 }
2857 ccpl_state->pipe_state.clear();
2858}
2859
2860bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2861 uint32_t count,
2862 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2863 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002864 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002865 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2866 crtpl_state->pipe_state.reserve(count);
2867 for (uint32_t i = 0; i < count; i++) {
2868 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002869 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002870 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002871 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002872 }
2873 return false;
2874}
2875
2876void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2877 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2878 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2879 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2880 // This API may create pipelines regardless of the return value
2881 for (uint32_t i = 0; i < count; i++) {
2882 if (pPipelines[i] != VK_NULL_HANDLE) {
2883 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2884 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2885 }
2886 }
2887 crtpl_state->pipe_state.clear();
2888}
2889
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002890bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkPipelineCache pipelineCache,
2891 uint32_t count,
2892 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2893 const VkAllocationCallbacks *pAllocator,
2894 VkPipeline *pPipelines, void *crtpl_state_data) const {
2895 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2896 crtpl_state->pipe_state.reserve(count);
2897 for (uint32_t i = 0; i < count; i++) {
2898 // Create and initialize internal tracking data structure
2899 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
2900 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
2901 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
2902 }
2903 return false;
2904}
2905
2906void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(
2907 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2908 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2909 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2910 // This API may create pipelines regardless of the return value
2911 for (uint32_t i = 0; i < count; i++) {
2912 if (pPipelines[i] != VK_NULL_HANDLE) {
2913 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2914 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2915 }
2916 }
2917 crtpl_state->pipe_state.clear();
2918}
2919
locke-lunargd556cc32019-09-17 01:21:23 -06002920void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2921 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2922 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002923 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
Tony-LunarG7337b312020-04-15 16:40:25 -06002924 if (pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT || pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT)
2925 custom_border_color_sampler_count++;
locke-lunargd556cc32019-09-17 01:21:23 -06002926}
2927
2928void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2929 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2930 const VkAllocationCallbacks *pAllocator,
2931 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2932 if (VK_SUCCESS != result) return;
2933 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2934}
2935
2936// For repeatable sorting, not very useful for "memory in range" search
2937struct PushConstantRangeCompare {
2938 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2939 if (lhs->offset == rhs->offset) {
2940 if (lhs->size == rhs->size) {
2941 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2942 return lhs->stageFlags < rhs->stageFlags;
2943 }
2944 // If the offsets are the same then sorting by the end of range is useful for validation
2945 return lhs->size < rhs->size;
2946 }
2947 return lhs->offset < rhs->offset;
2948 }
2949};
2950
2951static PushConstantRangesDict push_constant_ranges_dict;
2952
2953PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2954 if (!info->pPushConstantRanges) {
2955 // Hand back the empty entry (creating as needed)...
2956 return push_constant_ranges_dict.look_up(PushConstantRanges());
2957 }
2958
2959 // Sort the input ranges to ensure equivalent ranges map to the same id
2960 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2961 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2962 sorted.insert(info->pPushConstantRanges + i);
2963 }
2964
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07002965 PushConstantRanges ranges;
2966 ranges.reserve(sorted.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002967 for (const auto range : sorted) {
2968 ranges.emplace_back(*range);
2969 }
2970 return push_constant_ranges_dict.look_up(std::move(ranges));
2971}
2972
2973// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2974static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2975
2976// Dictionary of canonical form of the "compatible for set" records
2977static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2978
2979static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2980 const PipelineLayoutSetLayoutsId set_layouts_id) {
2981 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2982}
2983
2984void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2985 const VkAllocationCallbacks *pAllocator,
2986 VkPipelineLayout *pPipelineLayout, VkResult result) {
2987 if (VK_SUCCESS != result) return;
2988
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002989 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002990 pipeline_layout_state->layout = *pPipelineLayout;
2991 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2992 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2993 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002994 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002995 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2996 }
2997
2998 // Get canonical form IDs for the "compatible for set" contents
2999 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
3000 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
3001 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
3002
3003 // Create table of "compatible for set N" cannonical forms for trivial accept validation
3004 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
3005 pipeline_layout_state->compat_for_set.emplace_back(
3006 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
3007 }
3008 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
3009}
3010
3011void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
3012 const VkAllocationCallbacks *pAllocator,
3013 VkDescriptorPool *pDescriptorPool, VkResult result) {
3014 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003015 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003016}
3017
3018void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
3019 VkDescriptorPoolResetFlags flags, VkResult result) {
3020 if (VK_SUCCESS != result) return;
3021 DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
3022 // TODO: validate flags
3023 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
3024 for (auto ds : pPool->sets) {
3025 FreeDescriptorSet(ds);
3026 }
3027 pPool->sets.clear();
3028 // Reset available count for each type and available sets for this pool
3029 for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
3030 pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
3031 }
3032 pPool->availableSets = pPool->maxSets;
3033}
3034
3035bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
3036 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003037 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003038 // Always update common data
3039 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
3040 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
3041 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
3042
3043 return false;
3044}
3045
3046// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
3047void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
3048 VkDescriptorSet *pDescriptorSets, VkResult result,
3049 void *ads_state_data) {
3050 if (VK_SUCCESS != result) return;
3051 // All the updates are contained in a single cvdescriptorset function
3052 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
3053 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
3054 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
3055}
3056
3057void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
3058 const VkDescriptorSet *pDescriptorSets) {
3059 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
3060 // Update available descriptor sets in pool
3061 pool_state->availableSets += count;
3062
3063 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
3064 for (uint32_t i = 0; i < count; ++i) {
3065 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
3066 auto descriptor_set = setMap[pDescriptorSets[i]].get();
3067 uint32_t type_index = 0, descriptor_count = 0;
3068 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
3069 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
3070 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
3071 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
3072 }
3073 FreeDescriptorSet(descriptor_set);
3074 pool_state->sets.erase(descriptor_set);
3075 }
3076 }
3077}
3078
3079void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
3080 const VkWriteDescriptorSet *pDescriptorWrites,
3081 uint32_t descriptorCopyCount,
3082 const VkCopyDescriptorSet *pDescriptorCopies) {
3083 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
3084 pDescriptorCopies);
3085}
3086
3087void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
3088 VkCommandBuffer *pCommandBuffer, VkResult result) {
3089 if (VK_SUCCESS != result) return;
Jeff Bolz6835fda2019-10-06 00:15:34 -05003090 auto pPool = GetCommandPoolShared(pCreateInfo->commandPool);
locke-lunargd556cc32019-09-17 01:21:23 -06003091 if (pPool) {
3092 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
3093 // Add command buffer to its commandPool map
3094 pPool->commandBuffers.insert(pCommandBuffer[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003095 auto pCB = std::make_shared<CMD_BUFFER_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003096 pCB->createInfo = *pCreateInfo;
Jeff Bolz6835fda2019-10-06 00:15:34 -05003097 pCB->command_pool = pPool;
sfricke-samsung0c45edc2020-07-01 22:19:53 -07003098 pCB->unprotected = pPool->unprotected;
locke-lunargd556cc32019-09-17 01:21:23 -06003099 // Add command buffer to map
3100 commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
3101 ResetCommandBufferState(pCommandBuffer[i]);
3102 }
3103 }
3104}
3105
3106// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
3107void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003108 AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003109 cb_state);
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -07003110 // If imageless fb, skip fb binding
3111 if (fb_state->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003112 const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
3113 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003114 auto view_state = GetAttachmentImageViewState(cb_state, fb_state, attachment);
locke-lunargd556cc32019-09-17 01:21:23 -06003115 if (view_state) {
3116 AddCommandBufferBindingImageView(cb_state, view_state);
3117 }
3118 }
3119}
3120
3121void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
3122 const VkCommandBufferBeginInfo *pBeginInfo) {
3123 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3124 if (!cb_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003125 if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
3126 // Secondary Command Buffer
3127 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
3128 if (pInfo) {
3129 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
3130 assert(pInfo->renderPass);
3131 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
3132 if (framebuffer) {
3133 // Connect this framebuffer and its children to this cmdBuffer
3134 AddFramebufferBinding(cb_state, framebuffer);
3135 }
3136 }
3137 }
3138 }
3139 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
3140 ResetCommandBufferState(commandBuffer);
3141 }
3142 // Set updated state here in case implicit reset occurs above
3143 cb_state->state = CB_RECORDING;
3144 cb_state->beginInfo = *pBeginInfo;
3145 if (cb_state->beginInfo.pInheritanceInfo) {
3146 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
3147 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
3148 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
3149 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
3150 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
3151 cb_state->activeRenderPass = GetRenderPassState(cb_state->beginInfo.pInheritanceInfo->renderPass);
3152 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
3153 cb_state->activeFramebuffer = cb_state->beginInfo.pInheritanceInfo->framebuffer;
3154 cb_state->framebuffers.insert(cb_state->beginInfo.pInheritanceInfo->framebuffer);
3155 }
3156 }
3157
3158 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
3159 if (chained_device_group_struct) {
3160 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
3161 } else {
3162 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
3163 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003164
3165 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06003166}
3167
3168void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
3169 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3170 if (!cb_state) return;
3171 // Cached validation is specific to a specific recording of a specific command buffer.
3172 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
3173 descriptor_set->ClearCachedValidation(cb_state);
3174 }
3175 cb_state->validated_descriptor_sets.clear();
3176 if (VK_SUCCESS == result) {
3177 cb_state->state = CB_RECORDED;
3178 }
3179}
3180
3181void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
3182 VkResult result) {
3183 if (VK_SUCCESS == result) {
3184 ResetCommandBufferState(commandBuffer);
3185 }
3186}
3187
3188CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
3189 // initially assume everything is static state
3190 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
3191
3192 if (ds) {
3193 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
3194 switch (ds->pDynamicStates[i]) {
3195 case VK_DYNAMIC_STATE_LINE_WIDTH:
3196 flags &= ~CBSTATUS_LINE_WIDTH_SET;
3197 break;
3198 case VK_DYNAMIC_STATE_DEPTH_BIAS:
3199 flags &= ~CBSTATUS_DEPTH_BIAS_SET;
3200 break;
3201 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
3202 flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
3203 break;
3204 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
3205 flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
3206 break;
3207 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
3208 flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
3209 break;
3210 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
3211 flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
3212 break;
3213 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
3214 flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
3215 break;
3216 case VK_DYNAMIC_STATE_SCISSOR:
3217 flags &= ~CBSTATUS_SCISSOR_SET;
3218 break;
3219 case VK_DYNAMIC_STATE_VIEWPORT:
3220 flags &= ~CBSTATUS_VIEWPORT_SET;
3221 break;
3222 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
3223 flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
3224 break;
3225 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
3226 flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
3227 break;
3228 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
3229 flags &= ~CBSTATUS_LINE_STIPPLE_SET;
3230 break;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003231 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
3232 flags &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
3233 break;
locke-lunargd556cc32019-09-17 01:21:23 -06003234 default:
3235 break;
3236 }
3237 }
3238 }
3239
3240 return flags;
3241}
3242
3243// Validation cache:
3244// CV is the bottommost implementor of this extension. Don't pass calls down.
3245// utility function to set collective state for pipeline
3246void SetPipelineState(PIPELINE_STATE *pPipe) {
3247 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
3248 if (pPipe->graphicsPipelineCI.pColorBlendState) {
3249 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
3250 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
3251 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3252 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3253 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3254 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3255 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3256 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3257 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3258 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
3259 pPipe->blendConstantsEnabled = true;
3260 }
3261 }
3262 }
3263 }
sfricke-samsung8f658d42020-05-03 20:12:24 -07003264 // Check if sample location is enabled
3265 if (pPipe->graphicsPipelineCI.pMultisampleState) {
3266 const VkPipelineSampleLocationsStateCreateInfoEXT *sample_location_state =
3267 lvl_find_in_chain<VkPipelineSampleLocationsStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pMultisampleState->pNext);
3268 if (sample_location_state != nullptr) {
3269 pPipe->sample_location_enabled = sample_location_state->sampleLocationsEnable;
3270 }
3271 }
locke-lunargd556cc32019-09-17 01:21:23 -06003272}
3273
3274void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
3275 VkPipeline pipeline) {
3276 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3277 assert(cb_state);
3278
3279 auto pipe_state = GetPipelineState(pipeline);
3280 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
3281 cb_state->status &= ~cb_state->static_status;
3282 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
3283 cb_state->status |= cb_state->static_status;
3284 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003285 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout->layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003286 cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
3287 SetPipelineState(pipe_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003288 AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003289}
3290
3291void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3292 uint32_t viewportCount, const VkViewport *pViewports) {
3293 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3294 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
3295 cb_state->status |= CBSTATUS_VIEWPORT_SET;
3296}
3297
3298void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
3299 uint32_t exclusiveScissorCount,
3300 const VkRect2D *pExclusiveScissors) {
3301 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3302 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
3303 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
3304 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
3305}
3306
3307void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3308 VkImageLayout imageLayout) {
3309 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3310
3311 if (imageView != VK_NULL_HANDLE) {
3312 auto view_state = GetImageViewState(imageView);
3313 AddCommandBufferBindingImageView(cb_state, view_state);
3314 }
3315}
3316
3317void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3318 uint32_t viewportCount,
3319 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3320 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3321 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3322 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3323 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
3324}
3325
3326void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3327 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3328 const VkAllocationCallbacks *pAllocator,
3329 VkAccelerationStructureNV *pAccelerationStructure,
3330 VkResult result) {
3331 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003332 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003333
3334 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3335 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
3336 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3337 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
3338 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3339 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3340
3341 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
3342 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3343 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
3344 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3345 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3346 &as_state->build_scratch_memory_requirements);
3347
3348 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
3349 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3350 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
3351 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3352 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3353 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003354 as_state->allocator = pAllocator;
locke-lunargd556cc32019-09-17 01:21:23 -06003355 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3356}
3357
Jeff Bolz95176d02020-04-01 00:36:16 -05003358void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3359 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3360 const VkAllocationCallbacks *pAllocator,
3361 VkAccelerationStructureKHR *pAccelerationStructure,
3362 VkResult result) {
3363 if (VK_SUCCESS != result) return;
3364 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
3365
3366 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3367 VkAccelerationStructureMemoryRequirementsInfoKHR as_memory_requirements_info = {};
3368 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3369 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR;
3370 as_memory_requirements_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3371 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3372 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &as_memory_requirements_info, &as_state->memory_requirements);
3373
3374 VkAccelerationStructureMemoryRequirementsInfoKHR scratch_memory_req_info = {};
3375 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3376 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR;
3377 scratch_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3378 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3379 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &scratch_memory_req_info,
3380 &as_state->build_scratch_memory_requirements);
3381
3382 VkAccelerationStructureMemoryRequirementsInfoKHR update_memory_req_info = {};
3383 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3384 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR;
3385 update_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3386 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3387 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &update_memory_req_info,
3388 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003389 as_state->allocator = pAllocator;
Jeff Bolz95176d02020-04-01 00:36:16 -05003390 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3391}
3392
locke-lunargd556cc32019-09-17 01:21:23 -06003393void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
3394 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
3395 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
3396 if (as_state != nullptr) {
3397 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
3398 as_state->memory_requirements = *pMemoryRequirements;
3399 as_state->memory_requirements_checked = true;
3400 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
3401 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
3402 as_state->build_scratch_memory_requirements_checked = true;
3403 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
3404 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
3405 as_state->update_scratch_memory_requirements_checked = true;
3406 }
3407 }
3408}
3409
Jeff Bolz95176d02020-04-01 00:36:16 -05003410void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryCommon(
3411 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result,
3412 bool isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003413 if (VK_SUCCESS != result) return;
3414 for (uint32_t i = 0; i < bindInfoCount; i++) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003415 const VkBindAccelerationStructureMemoryInfoKHR &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06003416
3417 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
3418 if (as_state) {
3419 // Track bound memory range information
3420 auto mem_info = GetDevMemState(info.memory);
3421 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07003422 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06003423 }
3424 // Track objects tied to memory
3425 SetMemBinding(info.memory, as_state, info.memoryOffset,
Jeff Bolz95176d02020-04-01 00:36:16 -05003426 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR));
locke-lunargd556cc32019-09-17 01:21:23 -06003427
3428 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05003429 // XXX TODO: Query device address for KHR extension
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003430 if (enabled[gpu_validation] && isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003431 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
3432 }
3433 }
3434 }
3435}
3436
Jeff Bolz95176d02020-04-01 00:36:16 -05003437void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
3438 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
3439 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, true);
3440}
3441
3442void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryKHR(
3443 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result) {
3444 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, false);
3445}
3446
locke-lunargd556cc32019-09-17 01:21:23 -06003447void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
3448 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
3449 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
3450 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3451 if (cb_state == nullptr) {
3452 return;
3453 }
3454
3455 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3456 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3457 if (dst_as_state != nullptr) {
3458 dst_as_state->built = true;
3459 dst_as_state->build_info.initialize(pInfo);
3460 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3461 }
3462 if (src_as_state != nullptr) {
3463 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3464 }
3465 cb_state->hasBuildAccelerationStructureCmd = true;
3466}
3467
3468void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
3469 VkAccelerationStructureNV dst,
3470 VkAccelerationStructureNV src,
3471 VkCopyAccelerationStructureModeNV mode) {
3472 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3473 if (cb_state) {
3474 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3475 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3476 if (dst_as_state != nullptr && src_as_state != nullptr) {
3477 dst_as_state->built = true;
3478 dst_as_state->build_info = src_as_state->build_info;
3479 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3480 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3481 }
3482 }
3483}
3484
Jeff Bolz95176d02020-04-01 00:36:16 -05003485void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
3486 VkAccelerationStructureKHR accelerationStructure,
3487 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06003488 if (!accelerationStructure) return;
3489 auto *as_state = GetAccelerationStructureState(accelerationStructure);
3490 if (as_state) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003491 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06003492 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
3493 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07003494 RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -06003495 }
3496 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003497 as_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003498 accelerationStructureMap.erase(accelerationStructure);
3499 }
3500}
3501
Jeff Bolz95176d02020-04-01 00:36:16 -05003502void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
3503 VkAccelerationStructureNV accelerationStructure,
3504 const VkAllocationCallbacks *pAllocator) {
3505 PreCallRecordDestroyAccelerationStructureKHR(device, accelerationStructure, pAllocator);
3506}
3507
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003508void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3509 uint32_t viewportCount,
3510 const VkViewportWScalingNV *pViewportWScalings) {
3511 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3512 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
3513}
3514
locke-lunargd556cc32019-09-17 01:21:23 -06003515void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
3516 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3517 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
3518}
3519
3520void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
3521 uint16_t lineStipplePattern) {
3522 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3523 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
3524}
3525
3526void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
3527 float depthBiasClamp, float depthBiasSlopeFactor) {
3528 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3529 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
3530}
3531
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003532void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
3533 const VkRect2D *pScissors) {
3534 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3535 cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
3536 cb_state->status |= CBSTATUS_SCISSOR_SET;
3537}
3538
locke-lunargd556cc32019-09-17 01:21:23 -06003539void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
3540 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3541 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
3542}
3543
3544void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
3545 float maxDepthBounds) {
3546 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3547 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
3548}
3549
3550void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3551 uint32_t compareMask) {
3552 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3553 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
3554}
3555
3556void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3557 uint32_t writeMask) {
3558 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3559 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
3560}
3561
3562void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3563 uint32_t reference) {
3564 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3565 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
3566}
3567
3568// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
3569// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
3570// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
3571void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
3572 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
3573 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
3574 cvdescriptorset::DescriptorSet *push_descriptor_set,
3575 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
3576 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
3577 // Defensive
3578 assert(pipeline_layout);
3579 if (!pipeline_layout) return;
3580
3581 uint32_t required_size = first_set + set_count;
3582 const uint32_t last_binding_index = required_size - 1;
3583 assert(last_binding_index < pipeline_layout->compat_for_set.size());
3584
3585 // Some useful shorthand
3586 auto &last_bound = cb_state->lastBound[pipeline_bind_point];
3587 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
3588 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
3589
3590 // We need this three times in this function, but nowhere else
3591 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
3592 if (ds && ds->IsPushDescriptor()) {
3593 assert(ds == last_bound.push_descriptor_set.get());
3594 last_bound.push_descriptor_set = nullptr;
3595 return true;
3596 }
3597 return false;
3598 };
3599
3600 // Clean up the "disturbed" before and after the range to be set
3601 if (required_size < current_size) {
3602 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
3603 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
3604 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
3605 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
3606 }
3607 } else {
3608 // We're not disturbing past last, so leave the upper binding data alone.
3609 required_size = current_size;
3610 }
3611 }
3612
3613 // We resize if we need more set entries or if those past "last" are disturbed
3614 if (required_size != current_size) {
3615 last_bound.per_set.resize(required_size);
3616 }
3617
3618 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
3619 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
3620 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
3621 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3622 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
3623 last_bound.per_set[set_idx].dynamicOffsets.clear();
3624 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
3625 }
3626 }
3627
3628 // Now update the bound sets with the input sets
3629 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
3630 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
3631 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
3632 cvdescriptorset::DescriptorSet *descriptor_set =
3633 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
3634
3635 // Record binding (or push)
3636 if (descriptor_set != last_bound.push_descriptor_set.get()) {
3637 // Only cleanup the push descriptors if they aren't the currently used set.
3638 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3639 }
3640 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
3641 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
3642
3643 if (descriptor_set) {
3644 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
3645 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
3646 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
3647 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
3648 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
3649 input_dynamic_offsets = end_offset;
3650 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
3651 } else {
3652 last_bound.per_set[set_idx].dynamicOffsets.clear();
3653 }
3654 if (!descriptor_set->IsPushDescriptor()) {
3655 // Can't cache validation of push_descriptors
3656 cb_state->validated_descriptor_sets.insert(descriptor_set);
3657 }
3658 }
3659 }
3660}
3661
3662// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
3663void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
3664 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3665 uint32_t firstSet, uint32_t setCount,
3666 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
3667 const uint32_t *pDynamicOffsets) {
3668 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3669 auto pipeline_layout = GetPipelineLayout(layout);
3670
3671 // Resize binding arrays
3672 uint32_t last_set_index = firstSet + setCount - 1;
3673 if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
3674 cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
3675 }
3676
3677 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
3678 dynamicOffsetCount, pDynamicOffsets);
3679 cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
3680 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3681}
3682
3683void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
3684 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
3685 const VkWriteDescriptorSet *pDescriptorWrites) {
3686 const auto &pipeline_layout = GetPipelineLayout(layout);
3687 // Short circuit invalid updates
3688 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
3689 !pipeline_layout->set_layouts[set]->IsPushDescriptor())
3690 return;
3691
3692 // We need a descriptor set to update the bindings with, compatible with the passed layout
3693 const auto dsl = pipeline_layout->set_layouts[set];
3694 auto &last_bound = cb_state->lastBound[pipelineBindPoint];
3695 auto &push_descriptor_set = last_bound.push_descriptor_set;
3696 // If we are disturbing the current push_desriptor_set clear it
3697 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
John Zulaufd2c3dae2019-12-12 11:02:17 -07003698 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003699 }
3700
3701 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3702 nullptr);
3703 last_bound.pipeline_layout = layout;
3704
3705 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003706 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003707}
3708
3709void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3710 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3711 uint32_t set, uint32_t descriptorWriteCount,
3712 const VkWriteDescriptorSet *pDescriptorWrites) {
3713 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3714 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3715}
3716
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003717void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3718 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3719 const void *pValues) {
3720 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3721 if (cb_state != nullptr) {
3722 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3723
3724 auto &push_constant_data = cb_state->push_constant_data;
3725 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3726 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
3727 }
3728}
3729
locke-lunargd556cc32019-09-17 01:21:23 -06003730void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3731 VkIndexType indexType) {
3732 auto buffer_state = GetBufferState(buffer);
3733 auto cb_state = GetCBState(commandBuffer);
3734
3735 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
3736 cb_state->index_buffer_binding.buffer = buffer;
3737 cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
3738 cb_state->index_buffer_binding.offset = offset;
3739 cb_state->index_buffer_binding.index_type = indexType;
3740 // Add binding for this index buffer to this commandbuffer
3741 AddCommandBufferBindingBuffer(cb_state, buffer_state);
3742}
3743
3744void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3745 uint32_t bindingCount, const VkBuffer *pBuffers,
3746 const VkDeviceSize *pOffsets) {
3747 auto cb_state = GetCBState(commandBuffer);
3748
3749 uint32_t end = firstBinding + bindingCount;
3750 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3751 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3752 }
3753
3754 for (uint32_t i = 0; i < bindingCount; ++i) {
3755 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
3756 vertex_buffer_binding.buffer = pBuffers[i];
3757 vertex_buffer_binding.offset = pOffsets[i];
3758 // Add binding for this vertex buffer to this commandbuffer
Jeff Bolz165818a2020-05-08 11:19:03 -05003759 if (pBuffers[i]) {
3760 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
3761 }
locke-lunargd556cc32019-09-17 01:21:23 -06003762 }
3763}
3764
3765void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3766 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
3767 auto cb_state = GetCBState(commandBuffer);
3768 auto dst_buffer_state = GetBufferState(dstBuffer);
3769
3770 // Update bindings between buffer and cmd buffer
3771 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
3772}
3773
Jeff Bolz310775c2019-10-09 00:46:33 -05003774bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags stageMask,
3775 EventToStageMap *localEventToStageMap) {
3776 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003777 return false;
3778}
3779
3780void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3781 VkPipelineStageFlags stageMask) {
3782 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3783 auto event_state = GetEventState(event);
3784 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003785 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003786 }
3787 cb_state->events.push_back(event);
3788 if (!cb_state->waitedEvents.count(event)) {
3789 cb_state->writeEventsBeforeWait.push_back(event);
3790 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003791 cb_state->eventUpdates.emplace_back(
3792 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3793 return SetEventStageMask(event, stageMask, localEventToStageMap);
3794 });
locke-lunargd556cc32019-09-17 01:21:23 -06003795}
3796
3797void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3798 VkPipelineStageFlags stageMask) {
3799 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3800 auto event_state = GetEventState(event);
3801 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003802 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003803 }
3804 cb_state->events.push_back(event);
3805 if (!cb_state->waitedEvents.count(event)) {
3806 cb_state->writeEventsBeforeWait.push_back(event);
3807 }
3808
3809 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003810 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
3811 return SetEventStageMask(event, VkPipelineStageFlags(0), localEventToStageMap);
3812 });
locke-lunargd556cc32019-09-17 01:21:23 -06003813}
3814
3815void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3816 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3817 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3818 uint32_t bufferMemoryBarrierCount,
3819 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3820 uint32_t imageMemoryBarrierCount,
3821 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3822 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3823 for (uint32_t i = 0; i < eventCount; ++i) {
3824 auto event_state = GetEventState(pEvents[i]);
3825 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003826 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
3827 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003828 }
3829 cb_state->waitedEvents.insert(pEvents[i]);
3830 cb_state->events.push_back(pEvents[i]);
3831 }
3832}
3833
Jeff Bolz310775c2019-10-09 00:46:33 -05003834bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
3835 (*localQueryToStateMap)[object] = value;
3836 return false;
3837}
3838
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003839bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
3840 QueryState value, QueryMap *localQueryToStateMap) {
Jeff Bolz310775c2019-10-09 00:46:33 -05003841 for (uint32_t i = 0; i < queryCount; i++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003842 QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
Jeff Bolz310775c2019-10-09 00:46:33 -05003843 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06003844 }
3845 return false;
3846}
3847
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003848QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex,
3849 uint32_t perfPass) const {
3850 QueryObject query = QueryObject(QueryObject(queryPool, queryIndex), perfPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003851
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003852 auto iter = localQueryToStateMap->find(query);
3853 if (iter != localQueryToStateMap->end()) return iter->second;
Jeff Bolz310775c2019-10-09 00:46:33 -05003854
Jeff Bolz310775c2019-10-09 00:46:33 -05003855 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06003856}
3857
3858void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003859 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003860 cb_state->activeQueries.insert(query_obj);
3861 cb_state->startedQueries.insert(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003862 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3863 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3864 QueryMap *localQueryToStateMap) {
3865 SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
3866 return false;
3867 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003868 auto pool_state = GetQueryPoolState(query_obj.pool);
3869 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3870 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003871}
3872
3873void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3874 VkFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003875 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003876 QueryObject query = {queryPool, slot};
3877 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3878 RecordCmdBeginQuery(cb_state, query);
3879}
3880
3881void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003882 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003883 cb_state->activeQueries.erase(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003884 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3885 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3886 QueryMap *localQueryToStateMap) {
3887 return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3888 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003889 auto pool_state = GetQueryPoolState(query_obj.pool);
3890 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3891 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003892}
3893
3894void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003895 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003896 QueryObject query_obj = {queryPool, slot};
3897 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3898 RecordCmdEndQuery(cb_state, query_obj);
3899}
3900
3901void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3902 uint32_t firstQuery, uint32_t queryCount) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003903 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003904 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3905
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02003906 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
3907 QueryObject query = {queryPool, slot};
3908 cb_state->resetQueries.insert(query);
3909 }
3910
Jeff Bolz310775c2019-10-09 00:46:33 -05003911 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003912 bool do_validate, VkQueryPool &firstPerfQueryPool,
3913 uint32_t perfQueryPass,
3914 QueryMap *localQueryToStateMap) {
3915 return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06003916 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003917 auto pool_state = GetQueryPoolState(queryPool);
3918 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003919 cb_state);
3920}
3921
3922void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3923 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3924 VkDeviceSize dstOffset, VkDeviceSize stride,
3925 VkQueryResultFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003926 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003927 auto cb_state = GetCBState(commandBuffer);
3928 auto dst_buff_state = GetBufferState(dstBuffer);
3929 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003930 auto pool_state = GetQueryPoolState(queryPool);
3931 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003932 cb_state);
3933}
3934
3935void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3936 VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003937 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003938 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003939 auto pool_state = GetQueryPoolState(queryPool);
3940 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003941 cb_state);
3942 QueryObject query = {queryPool, slot};
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003943 cb_state->queryUpdates.emplace_back([query](const ValidationStateTracker *device_data, bool do_validate,
3944 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3945 QueryMap *localQueryToStateMap) {
3946 return SetQueryState(QueryObject(query, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3947 });
locke-lunargd556cc32019-09-17 01:21:23 -06003948}
3949
3950void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3951 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3952 VkResult result) {
3953 if (VK_SUCCESS != result) return;
3954 // Shadow create info and store in map
Jeff Bolz6ae39612019-10-11 20:57:36 -05003955 auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
locke-lunargd556cc32019-09-17 01:21:23 -06003956
3957 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
3958 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
3959 VkImageView view = pCreateInfo->pAttachments[i];
3960 auto view_state = GetImageViewState(view);
3961 if (!view_state) {
3962 continue;
3963 }
3964 }
3965 }
3966 frameBufferMap[*pFramebuffer] = std::move(fb_state);
3967}
3968
3969void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3970 RENDER_PASS_STATE *render_pass) {
3971 auto &subpass_to_node = render_pass->subpassToNode;
3972 subpass_to_node.resize(pCreateInfo->subpassCount);
3973 auto &self_dependencies = render_pass->self_dependencies;
3974 self_dependencies.resize(pCreateInfo->subpassCount);
John Zulauf4aff5d92020-02-21 08:29:35 -07003975 auto &subpass_dependencies = render_pass->subpass_dependencies;
3976 subpass_dependencies.resize(pCreateInfo->subpassCount);
locke-lunargd556cc32019-09-17 01:21:23 -06003977
3978 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
3979 subpass_to_node[i].pass = i;
3980 self_dependencies[i].clear();
John Zulauf4aff5d92020-02-21 08:29:35 -07003981 subpass_dependencies[i].pass = i;
locke-lunargd556cc32019-09-17 01:21:23 -06003982 }
3983 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
3984 const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
John Zulauf4aff5d92020-02-21 08:29:35 -07003985 const auto srcSubpass = dependency.srcSubpass;
3986 const auto dstSubpass = dependency.dstSubpass;
locke-lunargd556cc32019-09-17 01:21:23 -06003987 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
3988 if (dependency.srcSubpass == dependency.dstSubpass) {
3989 self_dependencies[dependency.srcSubpass].push_back(i);
3990 } else {
3991 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
3992 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
3993 }
3994 }
John Zulauf4aff5d92020-02-21 08:29:35 -07003995 if (srcSubpass == VK_SUBPASS_EXTERNAL) {
3996 assert(dstSubpass != VK_SUBPASS_EXTERNAL); // this is invalid per VUID-VkSubpassDependency-srcSubpass-00865
3997 subpass_dependencies[dstSubpass].barrier_from_external = &dependency;
3998 } else if (dstSubpass == VK_SUBPASS_EXTERNAL) {
3999 subpass_dependencies[srcSubpass].barrier_to_external = &dependency;
4000 } else if (dependency.srcSubpass != dependency.dstSubpass) {
4001 // ignore self dependencies in prev and next
4002 subpass_dependencies[srcSubpass].next.emplace_back(&dependency, &subpass_dependencies[dstSubpass]);
4003 subpass_dependencies[dstSubpass].prev.emplace_back(&dependency, &subpass_dependencies[srcSubpass]);
4004 }
4005 }
4006
4007 //
4008 // Determine "asynchrononous" subpassess
4009 // syncronization is only interested in asyncronous stages *earlier* that the current one... so we'll only look towards those.
4010 // NOTE: This is O(N^3), which we could shrink to O(N^2logN) using sets instead of arrays, but given that N is likely to be
4011 // small and the K for |= from the prev is must less than for set, we'll accept the brute force.
4012 std::vector<std::vector<bool>> pass_depends(pCreateInfo->subpassCount);
4013 for (uint32_t i = 1; i < pCreateInfo->subpassCount; ++i) {
4014 auto &depends = pass_depends[i];
4015 depends.resize(i);
4016 auto &subpass_dep = subpass_dependencies[i];
4017 for (const auto &prev : subpass_dep.prev) {
4018 const auto prev_pass = prev.node->pass;
4019 const auto &prev_depends = pass_depends[prev_pass];
4020 for (uint32_t j = 0; j < prev_pass; j++) {
4021 depends[j] = depends[j] | prev_depends[j];
4022 }
4023 depends[prev_pass] = true;
4024 }
4025 for (uint32_t pass = 0; pass < subpass_dep.pass; pass++) {
4026 if (!depends[pass]) {
4027 subpass_dep.async.push_back(pass);
4028 }
4029 }
locke-lunargd556cc32019-09-17 01:21:23 -06004030 }
4031}
4032
locke-lunargd556cc32019-09-17 01:21:23 -06004033
John Zulauf4aff5d92020-02-21 08:29:35 -07004034static VkSubpassDependency2 ImplicitDependencyFromExternal(uint32_t subpass) {
4035 VkSubpassDependency2 from_external = {VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
4036 nullptr,
4037 VK_SUBPASS_EXTERNAL,
4038 subpass,
4039 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
4040 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
4041 0,
4042 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
4043 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
4044 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
4045 0,
4046 0};
4047 return from_external;
4048}
4049
4050static VkSubpassDependency2 ImplicitDependencyToExternal(uint32_t subpass) {
4051 VkSubpassDependency2 to_external = {VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
4052 nullptr,
4053 subpass,
4054 VK_SUBPASS_EXTERNAL,
4055 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
4056 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
4057 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
4058 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
4059 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
4060 0,
4061 0,
4062 0};
4063 return to_external;
4064}
4065
locke-lunargd556cc32019-09-17 01:21:23 -06004066void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
4067 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
4068 VkRenderPass *pRenderPass) {
4069 render_pass->renderPass = *pRenderPass;
4070 auto create_info = render_pass->createInfo.ptr();
4071
4072 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
4073
John Zulauf8863c332020-03-20 10:34:33 -06004074 struct AttachmentTracker { // This is really only of local interest, but a bit big for a lambda
4075 RENDER_PASS_STATE *const rp;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004076 std::vector<uint32_t> &first;
4077 std::vector<uint32_t> &last;
John Zulauf8863c332020-03-20 10:34:33 -06004078 std::vector<std::vector<RENDER_PASS_STATE::AttachmentTransition>> &subpass_transitions;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004079 std::unordered_map<uint32_t, bool> &first_read;
4080 const uint32_t attachment_count;
John Zulauf8863c332020-03-20 10:34:33 -06004081 std::vector<VkImageLayout> attachment_layout;
4082 AttachmentTracker(std::shared_ptr<RENDER_PASS_STATE> &render_pass)
4083 : rp(render_pass.get()),
4084 first(rp->attachment_first_subpass),
4085 last(rp->attachment_last_subpass),
4086 subpass_transitions(rp->subpass_transitions),
4087 first_read(rp->attachment_first_read),
4088 attachment_count(rp->createInfo.attachmentCount),
4089 attachment_layout() {
John Zulaufbb9f07f2020-03-19 16:53:06 -06004090 first.resize(attachment_count, VK_SUBPASS_EXTERNAL);
4091 last.resize(attachment_count, VK_SUBPASS_EXTERNAL);
John Zulauf8863c332020-03-20 10:34:33 -06004092 subpass_transitions.resize(rp->createInfo.subpassCount + 1); // Add an extra for EndRenderPass
4093 attachment_layout.reserve(attachment_count);
4094 for (uint32_t j = 0; j < attachment_count; j++) {
4095 attachment_layout.push_back(rp->createInfo.pAttachments[j].initialLayout);
4096 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004097 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004098
John Zulaufbb9f07f2020-03-19 16:53:06 -06004099 void Update(uint32_t subpass, const VkAttachmentReference2 *attach_ref, uint32_t count, bool is_read) {
4100 if (nullptr == attach_ref) return;
4101 for (uint32_t j = 0; j < count; ++j) {
4102 const auto attachment = attach_ref[j].attachment;
4103 if (attachment != VK_ATTACHMENT_UNUSED) {
John Zulauf8863c332020-03-20 10:34:33 -06004104 const auto layout = attach_ref[j].layout;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004105 // Take advantage of the fact that insert won't overwrite, so we'll only write the first time.
4106 first_read.insert(std::make_pair(attachment, is_read));
4107 if (first[attachment] == VK_SUBPASS_EXTERNAL) first[attachment] = subpass;
4108 last[attachment] = subpass;
John Zulauf8863c332020-03-20 10:34:33 -06004109
4110 if (layout != attachment_layout[attachment]) {
4111 subpass_transitions[subpass].emplace_back(attachment, attachment_layout[attachment], layout);
4112 // TODO: Determine if this simple minded tracking is sufficient (it is for correct definitions)
4113 attachment_layout[attachment] = layout;
4114 }
4115 }
4116 }
4117 }
4118 void FinalTransitions() {
4119 auto &final_transitions = subpass_transitions[rp->createInfo.subpassCount];
4120
4121 for (uint32_t attachment = 0; attachment < attachment_count; ++attachment) {
4122 const auto final_layout = rp->createInfo.pAttachments[attachment].finalLayout;
4123 if (final_layout != attachment_layout[attachment]) {
4124 final_transitions.emplace_back(attachment, attachment_layout[attachment], final_layout);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004125 }
locke-lunargd556cc32019-09-17 01:21:23 -06004126 }
4127 }
John Zulaufbb9f07f2020-03-19 16:53:06 -06004128 };
John Zulauf8863c332020-03-20 10:34:33 -06004129 AttachmentTracker attachment_tracker(render_pass);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004130
4131 for (uint32_t subpass_index = 0; subpass_index < create_info->subpassCount; ++subpass_index) {
4132 const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[subpass_index];
John Zulauf8863c332020-03-20 10:34:33 -06004133 attachment_tracker.Update(subpass_index, subpass.pColorAttachments, subpass.colorAttachmentCount, false);
4134 attachment_tracker.Update(subpass_index, subpass.pResolveAttachments, subpass.colorAttachmentCount, false);
4135 attachment_tracker.Update(subpass_index, subpass.pDepthStencilAttachment, 1, false);
4136 attachment_tracker.Update(subpass_index, subpass.pInputAttachments, subpass.inputAttachmentCount, true);
John Zulauf4aff5d92020-02-21 08:29:35 -07004137 }
John Zulauf8863c332020-03-20 10:34:33 -06004138 attachment_tracker.FinalTransitions();
John Zulauf4aff5d92020-02-21 08:29:35 -07004139
John Zulaufbb9f07f2020-03-19 16:53:06 -06004140 // Add implicit dependencies
John Zulauf8863c332020-03-20 10:34:33 -06004141 for (uint32_t attachment = 0; attachment < attachment_tracker.attachment_count; attachment++) {
4142 const auto first_use = attachment_tracker.first[attachment];
John Zulauf4aff5d92020-02-21 08:29:35 -07004143 if (first_use != VK_SUBPASS_EXTERNAL) {
4144 auto &subpass_dep = render_pass->subpass_dependencies[first_use];
4145 if (!subpass_dep.barrier_from_external) {
4146 // Add implicit from barrier
4147 subpass_dep.implicit_barrier_from_external.reset(
4148 new VkSubpassDependency2(ImplicitDependencyFromExternal(first_use)));
4149 subpass_dep.barrier_from_external = subpass_dep.implicit_barrier_from_external.get();
4150 }
4151 }
4152
John Zulauf8863c332020-03-20 10:34:33 -06004153 const auto last_use = attachment_tracker.last[attachment];
John Zulauf4aff5d92020-02-21 08:29:35 -07004154 if (last_use != VK_SUBPASS_EXTERNAL) {
4155 auto &subpass_dep = render_pass->subpass_dependencies[last_use];
4156 if (!render_pass->subpass_dependencies[last_use].barrier_to_external) {
4157 // Add implicit to barrier
4158 subpass_dep.implicit_barrier_to_external.reset(new VkSubpassDependency2(ImplicitDependencyToExternal(last_use)));
4159 subpass_dep.barrier_to_external = subpass_dep.implicit_barrier_to_external.get();
4160 }
locke-lunargd556cc32019-09-17 01:21:23 -06004161 }
4162 }
4163
4164 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
4165 renderPassMap[*pRenderPass] = std::move(render_pass);
4166}
4167
4168// Style note:
4169// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
4170// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
4171// construction or assignment.
4172void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
4173 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4174 VkResult result) {
4175 if (VK_SUCCESS != result) return;
4176 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
4177 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
4178}
4179
Tony-LunarG977448c2019-12-02 14:52:02 -07004180void ValidationStateTracker::RecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4181 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4182 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004183 if (VK_SUCCESS != result) return;
4184 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
4185 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
4186}
4187
Tony-LunarG977448c2019-12-02 14:52:02 -07004188void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4189 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4190 VkResult result) {
4191 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
4192}
4193
4194void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4195 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4196 VkResult result) {
4197 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
4198}
4199
locke-lunargd556cc32019-09-17 01:21:23 -06004200void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
4201 const VkRenderPassBeginInfo *pRenderPassBegin,
4202 const VkSubpassContents contents) {
4203 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4204 auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
4205 auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
4206
4207 if (render_pass_state) {
4208 cb_state->activeFramebuffer = pRenderPassBegin->framebuffer;
4209 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07004210 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06004211 cb_state->activeSubpass = 0;
4212 cb_state->activeSubpassContents = contents;
4213 cb_state->framebuffers.insert(pRenderPassBegin->framebuffer);
4214 // Connect this framebuffer and its children to this cmdBuffer
4215 AddFramebufferBinding(cb_state, framebuffer);
4216 // Connect this RP to cmdBuffer
Jeff Bolzadbfa852019-10-04 13:53:30 -05004217 AddCommandBufferBinding(render_pass_state->cb_bindings,
4218 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state),
4219 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004220
4221 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
4222 if (chained_device_group_struct) {
4223 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
4224 } else {
4225 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
4226 }
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004227
4228 cb_state->imagelessFramebufferAttachments.clear();
4229 auto attachment_info_struct = lvl_find_in_chain<VkRenderPassAttachmentBeginInfo>(pRenderPassBegin->pNext);
4230 if (attachment_info_struct) {
4231 for (uint32_t i = 0; i < attachment_info_struct->attachmentCount; i++) {
4232 IMAGE_VIEW_STATE *img_view_state = GetImageViewState(attachment_info_struct->pAttachments[i]);
4233 cb_state->imagelessFramebufferAttachments.push_back(img_view_state);
4234 }
4235 }
locke-lunargd556cc32019-09-17 01:21:23 -06004236 }
4237}
4238
4239void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
4240 const VkRenderPassBeginInfo *pRenderPassBegin,
4241 VkSubpassContents contents) {
4242 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
4243}
4244
4245void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
4246 const VkRenderPassBeginInfo *pRenderPassBegin,
4247 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
4248 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4249}
4250
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06004251void ValidationStateTracker::PostCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4252 uint32_t counterBufferCount,
4253 const VkBuffer *pCounterBuffers,
4254 const VkDeviceSize *pCounterBufferOffsets) {
4255 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4256
4257 cb_state->transform_feedback_active = true;
4258}
4259
4260void ValidationStateTracker::PostCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4261 uint32_t counterBufferCount, const VkBuffer *pCounterBuffers,
4262 const VkDeviceSize *pCounterBufferOffsets) {
4263 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4264
4265 cb_state->transform_feedback_active = false;
4266}
4267
Tony-LunarG977448c2019-12-02 14:52:02 -07004268void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
4269 const VkRenderPassBeginInfo *pRenderPassBegin,
4270 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
4271 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4272}
4273
locke-lunargd556cc32019-09-17 01:21:23 -06004274void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4275 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4276 cb_state->activeSubpass++;
4277 cb_state->activeSubpassContents = contents;
4278}
4279
4280void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4281 RecordCmdNextSubpass(commandBuffer, contents);
4282}
4283
4284void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
4285 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
4286 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4287 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4288}
4289
Tony-LunarG977448c2019-12-02 14:52:02 -07004290void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
4291 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
4292 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4293 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4294}
4295
locke-lunargd556cc32019-09-17 01:21:23 -06004296void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
4297 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4298 cb_state->activeRenderPass = nullptr;
4299 cb_state->activeSubpass = 0;
4300 cb_state->activeFramebuffer = VK_NULL_HANDLE;
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004301 cb_state->imagelessFramebufferAttachments.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06004302}
4303
4304void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
4305 RecordCmdEndRenderPassState(commandBuffer);
4306}
4307
4308void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
4309 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4310 RecordCmdEndRenderPassState(commandBuffer);
4311}
4312
Tony-LunarG977448c2019-12-02 14:52:02 -07004313void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
4314 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4315 RecordCmdEndRenderPassState(commandBuffer);
4316}
locke-lunargd556cc32019-09-17 01:21:23 -06004317void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
4318 const VkCommandBuffer *pCommandBuffers) {
4319 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4320
4321 CMD_BUFFER_STATE *sub_cb_state = NULL;
4322 for (uint32_t i = 0; i < commandBuffersCount; i++) {
4323 sub_cb_state = GetCBState(pCommandBuffers[i]);
4324 assert(sub_cb_state);
4325 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
4326 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
4327 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
4328 // from the validation step to the recording step
4329 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
4330 }
4331 }
4332
4333 // Propagate inital layout and current layout state to the primary cmd buffer
4334 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
4335 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
4336 // for those other classes.
4337 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
4338 const auto image = sub_layout_map_entry.first;
4339 const auto *image_state = GetImageState(image);
4340 if (!image_state) continue; // Can't set layouts of a dead image
4341
4342 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
4343 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
4344 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
4345 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
4346 }
4347
4348 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
4349 cb_state->linkedCommandBuffers.insert(sub_cb_state);
4350 sub_cb_state->linkedCommandBuffers.insert(cb_state);
4351 for (auto &function : sub_cb_state->queryUpdates) {
4352 cb_state->queryUpdates.push_back(function);
4353 }
4354 for (auto &function : sub_cb_state->queue_submit_functions) {
4355 cb_state->queue_submit_functions.push_back(function);
4356 }
4357 }
4358}
4359
4360void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
4361 VkFlags flags, void **ppData, VkResult result) {
4362 if (VK_SUCCESS != result) return;
4363 RecordMappedMemory(mem, offset, size, ppData);
4364}
4365
4366void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
4367 auto mem_info = GetDevMemState(mem);
4368 if (mem_info) {
4369 mem_info->mapped_range = MemRange();
4370 mem_info->p_driver_data = nullptr;
4371 }
4372}
4373
4374void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
4375 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
4376 if (image_state) {
locke-lunargae26eac2020-04-16 15:29:05 -06004377 // An Android sepcial image cannot get VkSubresourceLayout until the image binds a memory.
4378 // See: VUID-vkGetImageSubresourceLayout-image-01895
4379 image_state->fragment_encoder =
4380 std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(new subresource_adapter::ImageRangeEncoder(*image_state));
locke-lunargd556cc32019-09-17 01:21:23 -06004381 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
4382 if (swapchain_info) {
4383 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
4384 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -06004385 swapchain->images[swapchain_info->imageIndex].bound_images.emplace(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -06004386 image_state->bind_swapchain = swapchain_info->swapchain;
4387 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
4388 }
4389 } else {
4390 // Track bound memory range information
4391 auto mem_info = GetDevMemState(bindInfo.memory);
4392 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07004393 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06004394 }
4395
4396 // Track objects tied to memory
4397 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
4398 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
4399 }
Tony-LunarG330cf4c2020-03-04 16:29:03 -07004400 if ((image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) || swapchain_info) {
locke-lunargd556cc32019-09-17 01:21:23 -06004401 AddAliasingImage(image_state);
4402 }
4403 }
4404}
4405
4406void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
4407 VkDeviceSize memoryOffset, VkResult result) {
4408 if (VK_SUCCESS != result) return;
4409 VkBindImageMemoryInfo bindInfo = {};
4410 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
4411 bindInfo.image = image;
4412 bindInfo.memory = mem;
4413 bindInfo.memoryOffset = memoryOffset;
4414 UpdateBindImageMemoryState(bindInfo);
4415}
4416
4417void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
4418 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
4419 if (VK_SUCCESS != result) return;
4420 for (uint32_t i = 0; i < bindInfoCount; i++) {
4421 UpdateBindImageMemoryState(pBindInfos[i]);
4422 }
4423}
4424
4425void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
4426 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
4427 if (VK_SUCCESS != result) return;
4428 for (uint32_t i = 0; i < bindInfoCount; i++) {
4429 UpdateBindImageMemoryState(pBindInfos[i]);
4430 }
4431}
4432
4433void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
4434 auto event_state = GetEventState(event);
4435 if (event_state) {
4436 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
4437 }
locke-lunargd556cc32019-09-17 01:21:23 -06004438}
4439
4440void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
4441 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
4442 VkResult result) {
4443 if (VK_SUCCESS != result) return;
4444 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
4445 pImportSemaphoreFdInfo->flags);
4446}
4447
4448void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
4449 VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
4450 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
4451 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4452 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
4453 semaphore_state->scope = kSyncScopeExternalPermanent;
4454 }
4455}
4456
4457#ifdef VK_USE_PLATFORM_WIN32_KHR
4458void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
4459 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
4460 if (VK_SUCCESS != result) return;
4461 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
4462 pImportSemaphoreWin32HandleInfo->flags);
4463}
4464
4465void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
4466 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4467 HANDLE *pHandle, VkResult result) {
4468 if (VK_SUCCESS != result) return;
4469 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
4470}
4471
4472void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
4473 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
4474 if (VK_SUCCESS != result) return;
4475 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
4476 pImportFenceWin32HandleInfo->flags);
4477}
4478
4479void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
4480 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4481 HANDLE *pHandle, VkResult result) {
4482 if (VK_SUCCESS != result) return;
4483 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
4484}
4485#endif
4486
4487void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
4488 VkResult result) {
4489 if (VK_SUCCESS != result) return;
4490 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
4491}
4492
4493void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
4494 VkFenceImportFlagsKHR flags) {
4495 FENCE_STATE *fence_node = GetFenceState(fence);
4496 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
4497 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
4498 fence_node->scope == kSyncScopeInternal) {
4499 fence_node->scope = kSyncScopeExternalTemporary;
4500 } else {
4501 fence_node->scope = kSyncScopeExternalPermanent;
4502 }
4503 }
4504}
4505
4506void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
4507 VkResult result) {
4508 if (VK_SUCCESS != result) return;
4509 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
4510}
4511
4512void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
4513 FENCE_STATE *fence_state = GetFenceState(fence);
4514 if (fence_state) {
4515 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4516 // Export with reference transference becomes external
4517 fence_state->scope = kSyncScopeExternalPermanent;
4518 } else if (fence_state->scope == kSyncScopeInternal) {
4519 // Export with copy transference has a side effect of resetting the fence
4520 fence_state->state = FENCE_UNSIGNALED;
4521 }
4522 }
4523}
4524
4525void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
4526 VkResult result) {
4527 if (VK_SUCCESS != result) return;
4528 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
4529}
4530
4531void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
4532 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
4533 if (VK_SUCCESS != result) return;
4534 eventMap[*pEvent].write_in_use = 0;
4535 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
4536}
4537
4538void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
4539 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
4540 SWAPCHAIN_NODE *old_swapchain_state) {
4541 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004542 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004543 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
4544 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
4545 swapchain_state->shared_presentable = true;
4546 }
4547 surface_state->swapchain = swapchain_state.get();
4548 swapchainMap[*pSwapchain] = std::move(swapchain_state);
4549 } else {
4550 surface_state->swapchain = nullptr;
4551 }
4552 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
4553 if (old_swapchain_state) {
4554 old_swapchain_state->retired = true;
4555 }
4556 return;
4557}
4558
4559void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
4560 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
4561 VkResult result) {
4562 auto surface_state = GetSurfaceState(pCreateInfo->surface);
4563 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
4564 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
4565}
4566
4567void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
4568 const VkAllocationCallbacks *pAllocator) {
4569 if (!swapchain) return;
4570 auto swapchain_data = GetSwapchainState(swapchain);
4571 if (swapchain_data) {
4572 for (const auto &swapchain_image : swapchain_data->images) {
locke-lunargb3584732019-10-28 20:18:36 -06004573 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image, kVulkanObjectTypeImage));
4574 imageMap.erase(swapchain_image.image);
4575 RemoveAliasingImages(swapchain_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06004576 }
4577
4578 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
4579 if (surface_state) {
4580 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
4581 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004582 swapchain_data->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004583 swapchainMap.erase(swapchain);
4584 }
4585}
4586
4587void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
4588 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
4589 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
4590 auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
4591 if (pSemaphore) {
4592 pSemaphore->signaler.first = VK_NULL_HANDLE;
4593 pSemaphore->signaled = false;
4594 }
4595 }
4596
4597 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
4598 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
4599 // confused itself just as much.
4600 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
4601 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
4602 // Mark the image as having been released to the WSI
4603 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
4604 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
locke-lunargb3584732019-10-28 20:18:36 -06004605 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004606 auto image_state = GetImageState(image);
4607 if (image_state) {
4608 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004609 if (image_state->shared_presentable) {
4610 image_state->layout_locked = true;
4611 }
locke-lunargd556cc32019-09-17 01:21:23 -06004612 }
4613 }
4614 }
4615 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
4616 // its semaphore waits) /never/ participate in any completion proof.
4617}
4618
4619void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
4620 const VkSwapchainCreateInfoKHR *pCreateInfos,
4621 const VkAllocationCallbacks *pAllocator,
4622 VkSwapchainKHR *pSwapchains, VkResult result) {
4623 if (pCreateInfos) {
4624 for (uint32_t i = 0; i < swapchainCount; i++) {
4625 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
4626 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
4627 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
4628 }
4629 }
4630}
4631
4632void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4633 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
4634 auto pFence = GetFenceState(fence);
4635 if (pFence && pFence->scope == kSyncScopeInternal) {
4636 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
4637 // import
4638 pFence->state = FENCE_INFLIGHT;
4639 pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
4640 }
4641
4642 auto pSemaphore = GetSemaphoreState(semaphore);
4643 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
4644 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
4645 // temporary import
4646 pSemaphore->signaled = true;
4647 pSemaphore->signaler.first = VK_NULL_HANDLE;
4648 }
4649
4650 // Mark the image as acquired.
4651 auto swapchain_data = GetSwapchainState(swapchain);
4652 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
locke-lunargb3584732019-10-28 20:18:36 -06004653 auto image = swapchain_data->images[*pImageIndex].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004654 auto image_state = GetImageState(image);
4655 if (image_state) {
4656 image_state->acquired = true;
4657 image_state->shared_presentable = swapchain_data->shared_presentable;
4658 }
4659 }
4660}
4661
4662void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4663 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
4664 VkResult result) {
4665 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4666 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
4667}
4668
4669void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
4670 uint32_t *pImageIndex, VkResult result) {
4671 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4672 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
4673 pAcquireInfo->fence, pImageIndex);
4674}
4675
4676void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
4677 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
4678 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
4679 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
4680 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
4681 phys_device_state.phys_device = pPhysicalDevices[i];
4682 // Init actual features for each physical device
4683 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
4684 }
4685 }
4686}
4687
4688// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
4689static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
4690 VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4691 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
4692
4693 if (!pQueueFamilyProperties) {
4694 if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
4695 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
4696 } else { // Save queue family properties
4697 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
4698
4699 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
4700 for (uint32_t i = 0; i < count; ++i) {
4701 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
4702 }
4703 }
4704}
4705
4706void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
4707 uint32_t *pQueueFamilyPropertyCount,
4708 VkQueueFamilyProperties *pQueueFamilyProperties) {
4709 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4710 assert(physical_device_state);
4711 VkQueueFamilyProperties2KHR *pqfp = nullptr;
4712 std::vector<VkQueueFamilyProperties2KHR> qfp;
4713 qfp.resize(*pQueueFamilyPropertyCount);
4714 if (pQueueFamilyProperties) {
4715 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
4716 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
4717 qfp[i].pNext = nullptr;
4718 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
4719 }
4720 pqfp = qfp.data();
4721 }
4722 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
4723}
4724
4725void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
4726 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4727 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4728 assert(physical_device_state);
4729 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4730 pQueueFamilyProperties);
4731}
4732
4733void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
4734 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4735 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4736 assert(physical_device_state);
4737 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4738 pQueueFamilyProperties);
4739}
4740void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
4741 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004742 if (!surface) return;
4743 auto surface_state = GetSurfaceState(surface);
4744 surface_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004745 surface_map.erase(surface);
4746}
4747
4748void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004749 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06004750}
4751
4752void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
4753 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
4754 const VkAllocationCallbacks *pAllocator,
4755 VkSurfaceKHR *pSurface, VkResult result) {
4756 if (VK_SUCCESS != result) return;
4757 RecordVulkanSurface(pSurface);
4758}
4759
4760#ifdef VK_USE_PLATFORM_ANDROID_KHR
4761void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
4762 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
4763 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4764 VkResult result) {
4765 if (VK_SUCCESS != result) return;
4766 RecordVulkanSurface(pSurface);
4767}
4768#endif // VK_USE_PLATFORM_ANDROID_KHR
4769
4770#ifdef VK_USE_PLATFORM_IOS_MVK
4771void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
4772 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4773 VkResult result) {
4774 if (VK_SUCCESS != result) return;
4775 RecordVulkanSurface(pSurface);
4776}
4777#endif // VK_USE_PLATFORM_IOS_MVK
4778
4779#ifdef VK_USE_PLATFORM_MACOS_MVK
4780void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
4781 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
4782 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4783 VkResult result) {
4784 if (VK_SUCCESS != result) return;
4785 RecordVulkanSurface(pSurface);
4786}
4787#endif // VK_USE_PLATFORM_MACOS_MVK
4788
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07004789#ifdef VK_USE_PLATFORM_METAL_EXT
4790void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
4791 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
4792 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4793 VkResult result) {
4794 if (VK_SUCCESS != result) return;
4795 RecordVulkanSurface(pSurface);
4796}
4797#endif // VK_USE_PLATFORM_METAL_EXT
4798
locke-lunargd556cc32019-09-17 01:21:23 -06004799#ifdef VK_USE_PLATFORM_WAYLAND_KHR
4800void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
4801 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
4802 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4803 VkResult result) {
4804 if (VK_SUCCESS != result) return;
4805 RecordVulkanSurface(pSurface);
4806}
4807#endif // VK_USE_PLATFORM_WAYLAND_KHR
4808
4809#ifdef VK_USE_PLATFORM_WIN32_KHR
4810void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
4811 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
4812 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4813 VkResult result) {
4814 if (VK_SUCCESS != result) return;
4815 RecordVulkanSurface(pSurface);
4816}
4817#endif // VK_USE_PLATFORM_WIN32_KHR
4818
4819#ifdef VK_USE_PLATFORM_XCB_KHR
4820void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
4821 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4822 VkResult result) {
4823 if (VK_SUCCESS != result) return;
4824 RecordVulkanSurface(pSurface);
4825}
4826#endif // VK_USE_PLATFORM_XCB_KHR
4827
4828#ifdef VK_USE_PLATFORM_XLIB_KHR
4829void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
4830 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4831 VkResult result) {
4832 if (VK_SUCCESS != result) return;
4833 RecordVulkanSurface(pSurface);
4834}
4835#endif // VK_USE_PLATFORM_XLIB_KHR
4836
Niklas Haas8b84af12020-04-19 22:20:11 +02004837void ValidationStateTracker::PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance,
4838 const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
4839 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4840 VkResult result) {
4841 if (VK_SUCCESS != result) return;
4842 RecordVulkanSurface(pSurface);
4843}
4844
Cort23cf2282019-09-20 18:58:18 +02004845void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004846 VkPhysicalDeviceFeatures *pFeatures) {
4847 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4848 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
Yilong Li358152a2020-07-08 02:16:45 -07004849 // Reset the features2 safe struct before setting up the features field.
4850 physical_device_state->features2 = safe_VkPhysicalDeviceFeatures2();
Cortffba2642019-09-20 22:09:41 +02004851 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02004852}
4853
4854void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004855 VkPhysicalDeviceFeatures2 *pFeatures) {
4856 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4857 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4858 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004859}
4860
4861void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004862 VkPhysicalDeviceFeatures2 *pFeatures) {
4863 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4864 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4865 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004866}
4867
locke-lunargd556cc32019-09-17 01:21:23 -06004868void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
4869 VkSurfaceKHR surface,
4870 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
4871 VkResult result) {
4872 if (VK_SUCCESS != result) return;
4873 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4874 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004875 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004876 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
4877}
4878
4879void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
4880 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4881 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
4882 if (VK_SUCCESS != result) return;
4883 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4884 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004885 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004886 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
4887}
4888
4889void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
4890 VkSurfaceKHR surface,
4891 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
4892 VkResult result) {
4893 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4894 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004895 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004896 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
4897 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
4898 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
4899 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
4900 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
4901 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
4902 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
4903 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
4904 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
4905 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
4906}
4907
4908void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
4909 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
4910 VkBool32 *pSupported, VkResult result) {
4911 if (VK_SUCCESS != result) return;
4912 auto surface_state = GetSurfaceState(surface);
4913 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
4914}
4915
4916void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
4917 VkSurfaceKHR surface,
4918 uint32_t *pPresentModeCount,
4919 VkPresentModeKHR *pPresentModes,
4920 VkResult result) {
4921 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4922
4923 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
4924 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4925 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState;
4926
4927 if (*pPresentModeCount) {
4928 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4929 if (*pPresentModeCount > physical_device_state->present_modes.size())
4930 physical_device_state->present_modes.resize(*pPresentModeCount);
4931 }
4932 if (pPresentModes) {
4933 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4934 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
4935 physical_device_state->present_modes[i] = pPresentModes[i];
4936 }
4937 }
4938}
4939
4940void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
4941 uint32_t *pSurfaceFormatCount,
4942 VkSurfaceFormatKHR *pSurfaceFormats,
4943 VkResult result) {
4944 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4945
4946 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4947 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
4948
4949 if (*pSurfaceFormatCount) {
4950 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4951 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
4952 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
4953 }
4954 if (pSurfaceFormats) {
4955 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4956 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4957 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
4958 }
4959 }
4960}
4961
4962void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
4963 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4964 uint32_t *pSurfaceFormatCount,
4965 VkSurfaceFormat2KHR *pSurfaceFormats,
4966 VkResult result) {
4967 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4968
4969 auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
4970 if (*pSurfaceFormatCount) {
4971 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_COUNT) {
4972 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_COUNT;
4973 }
4974 if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
4975 physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
4976 }
4977 if (pSurfaceFormats) {
4978 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_DETAILS) {
4979 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_DETAILS;
4980 }
4981 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4982 physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
4983 }
4984 }
4985}
4986
4987void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4988 const VkDebugUtilsLabelEXT *pLabelInfo) {
4989 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4990}
4991
4992void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
4993 EndCmdDebugUtilsLabel(report_data, commandBuffer);
4994}
4995
4996void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4997 const VkDebugUtilsLabelEXT *pLabelInfo) {
4998 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4999
5000 // Squirrel away an easily accessible copy.
5001 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5002 cb_state->debug_label = LoggingLabel(pLabelInfo);
5003}
5004
5005void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
5006 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
5007 if (NULL != pPhysicalDeviceGroupProperties) {
5008 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
5009 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
5010 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
5011 auto &phys_device_state = physical_device_map[cur_phys_dev];
5012 phys_device_state.phys_device = cur_phys_dev;
5013 // Init actual features for each physical device
5014 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
5015 }
5016 }
5017 }
5018}
5019
5020void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
5021 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
5022 VkResult result) {
5023 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5024 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
5025}
5026
5027void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
5028 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
5029 VkResult result) {
5030 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5031 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
5032}
5033
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005034void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
5035 uint32_t queueFamilyIndex,
5036 uint32_t *pCounterCount,
5037 VkPerformanceCounterKHR *pCounters) {
5038 if (NULL == pCounters) return;
5039
5040 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5041 assert(physical_device_state);
5042
5043 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queueFamilyCounters(new QUEUE_FAMILY_PERF_COUNTERS());
5044 queueFamilyCounters->counters.resize(*pCounterCount);
5045 for (uint32_t i = 0; i < *pCounterCount; i++) queueFamilyCounters->counters[i] = pCounters[i];
5046
5047 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queueFamilyCounters);
5048}
5049
5050void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
5051 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
5052 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
5053 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5054 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
5055}
5056
5057void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
5058 VkResult result) {
5059 if (result == VK_SUCCESS) performance_lock_acquired = true;
5060}
5061
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005062void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
5063 performance_lock_acquired = false;
5064 for (auto &cmd_buffer : commandBufferMap) {
5065 cmd_buffer.second->performance_lock_released = true;
5066 }
5067}
5068
locke-lunargd556cc32019-09-17 01:21:23 -06005069void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
5070 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5071 const VkAllocationCallbacks *pAllocator) {
5072 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005073 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5074 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005075 desc_template_map.erase(descriptorUpdateTemplate);
5076}
5077
5078void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
5079 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5080 const VkAllocationCallbacks *pAllocator) {
5081 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005082 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5083 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005084 desc_template_map.erase(descriptorUpdateTemplate);
5085}
5086
5087void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
5088 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
5089 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005090 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06005091 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
5092}
5093
5094void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
5095 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
5096 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
5097 if (VK_SUCCESS != result) return;
5098 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
5099}
5100
5101void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
5102 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
5103 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
5104 if (VK_SUCCESS != result) return;
5105 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
5106}
5107
5108void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
5109 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5110 const void *pData) {
5111 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
5112 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
5113 assert(0);
5114 } else {
5115 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
5116 // TODO: Record template push descriptor updates
5117 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
5118 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
5119 }
5120 }
5121}
5122
5123void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
5124 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
5125 const void *pData) {
5126 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
5127}
5128
5129void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
5130 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5131 const void *pData) {
5132 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
5133}
5134
5135void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
5136 VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
5137 const void *pData) {
5138 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5139
5140 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5141 if (template_state) {
5142 auto layout_data = GetPipelineLayout(layout);
5143 auto dsl = GetDslFromPipelineLayout(layout_data, set);
5144 const auto &template_ci = template_state->create_info;
Jeff Bolz6ae39612019-10-11 20:57:36 -05005145 if (dsl && !dsl->destroyed) {
locke-lunargd556cc32019-09-17 01:21:23 -06005146 // Decode the template into a set of write updates
5147 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
5148 dsl->GetDescriptorSetLayout());
5149 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
5150 static_cast<uint32_t>(decoded_template.desc_writes.size()),
5151 decoded_template.desc_writes.data());
5152 }
5153 }
5154}
5155
5156void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
5157 uint32_t *pPropertyCount, void *pProperties) {
5158 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5159 if (*pPropertyCount) {
5160 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_COUNT) {
5161 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_COUNT;
Camden Stocker61050592019-11-27 12:03:09 -08005162 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005163 }
5164 physical_device_state->display_plane_property_count = *pPropertyCount;
5165 }
5166 if (pProperties) {
5167 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_DETAILS) {
5168 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08005169 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005170 }
5171 }
5172}
5173
5174void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
5175 uint32_t *pPropertyCount,
5176 VkDisplayPlanePropertiesKHR *pProperties,
5177 VkResult result) {
5178 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5179 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5180}
5181
5182void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
5183 uint32_t *pPropertyCount,
5184 VkDisplayPlaneProperties2KHR *pProperties,
5185 VkResult result) {
5186 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5187 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5188}
5189
5190void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5191 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
5192 QueryObject query_obj = {queryPool, query, index};
5193 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5194 RecordCmdBeginQuery(cb_state, query_obj);
5195}
5196
5197void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5198 uint32_t query, uint32_t index) {
5199 QueryObject query_obj = {queryPool, query, index};
5200 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5201 RecordCmdEndQuery(cb_state, query_obj);
5202}
5203
5204void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
5205 VkSamplerYcbcrConversion ycbcr_conversion) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005206 auto ycbcr_state = std::make_shared<SAMPLER_YCBCR_CONVERSION_STATE>();
5207
locke-lunargd556cc32019-09-17 01:21:23 -06005208 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005209 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion, ycbcr_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06005210 }
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005211
5212 const VkFormat conversion_format = create_info->format;
5213
5214 if (conversion_format != VK_FORMAT_UNDEFINED) {
5215 // If format is VK_FORMAT_UNDEFINED, will be set by external AHB features
5216 ycbcr_state->format_features = GetPotentialFormatFeatures(conversion_format);
5217 }
5218
5219 ycbcr_state->chromaFilter = create_info->chromaFilter;
5220 ycbcr_state->format = conversion_format;
5221 samplerYcbcrConversionMap[ycbcr_conversion] = std::move(ycbcr_state);
locke-lunargd556cc32019-09-17 01:21:23 -06005222}
5223
5224void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
5225 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5226 const VkAllocationCallbacks *pAllocator,
5227 VkSamplerYcbcrConversion *pYcbcrConversion,
5228 VkResult result) {
5229 if (VK_SUCCESS != result) return;
5230 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5231}
5232
5233void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
5234 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5235 const VkAllocationCallbacks *pAllocator,
5236 VkSamplerYcbcrConversion *pYcbcrConversion,
5237 VkResult result) {
5238 if (VK_SUCCESS != result) return;
5239 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5240}
5241
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005242void ValidationStateTracker::RecordDestroySamplerYcbcrConversionState(VkSamplerYcbcrConversion ycbcr_conversion) {
5243 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
5244 RecordDestroySamplerYcbcrConversionANDROID(ycbcr_conversion);
5245 }
5246
5247 auto ycbcr_state = GetSamplerYcbcrConversionState(ycbcr_conversion);
5248 ycbcr_state->destroyed = true;
5249 samplerYcbcrConversionMap.erase(ycbcr_conversion);
5250}
5251
locke-lunargd556cc32019-09-17 01:21:23 -06005252void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
5253 const VkAllocationCallbacks *pAllocator) {
5254 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005255 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005256}
5257
5258void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
5259 VkSamplerYcbcrConversion ycbcrConversion,
5260 const VkAllocationCallbacks *pAllocator) {
5261 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005262 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005263}
5264
Tony-LunarG977448c2019-12-02 14:52:02 -07005265void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5266 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005267 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07005268 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06005269
5270 // Do nothing if the query pool has been destroyed.
5271 auto query_pool_state = GetQueryPoolState(queryPool);
5272 if (!query_pool_state) return;
5273
5274 // Reset the state of existing entries.
5275 QueryObject query_obj{queryPool, 0};
5276 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
5277 for (uint32_t i = 0; i < max_query_count; ++i) {
5278 query_obj.query = firstQuery + i;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005279 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005280 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
5281 for (uint32_t passIndex = 0; passIndex < query_pool_state->n_performance_passes; passIndex++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005282 query_obj.perf_pass = passIndex;
5283 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005284 }
5285 }
locke-lunargd556cc32019-09-17 01:21:23 -06005286 }
5287}
5288
Tony-LunarG977448c2019-12-02 14:52:02 -07005289void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5290 uint32_t queryCount) {
5291 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5292}
5293
5294void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5295 uint32_t queryCount) {
5296 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5297}
5298
locke-lunargd556cc32019-09-17 01:21:23 -06005299void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
5300 const TEMPLATE_STATE *template_state, const void *pData) {
5301 // Translate the templated update into a normal update for validation...
5302 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
5303 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
5304 decoded_update.desc_writes.data(), 0, NULL);
5305}
5306
5307// Update the common AllocateDescriptorSetsData
5308void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005309 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005310 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05005311 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06005312 if (layout) {
5313 ds_data->layout_nodes[i] = layout;
5314 // Count total descriptors required per type
5315 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
5316 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
5317 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
5318 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
5319 }
5320 }
5321 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
5322 }
5323}
5324
5325// Decrement allocated sets from the pool and insert new sets into set_map
5326void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
5327 const VkDescriptorSet *descriptor_sets,
5328 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
5329 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
5330 // Account for sets and individual descriptors allocated from pool
5331 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
5332 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
5333 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
5334 }
5335
5336 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
5337 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
5338
5339 // Create tracking object for each descriptor set; insert into global map and the pool's set.
5340 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
5341 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
5342
Jeff Bolz41a1ced2019-10-11 11:40:49 -05005343 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07005344 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06005345 pool_state->sets.insert(new_ds.get());
5346 new_ds->in_use.store(0);
5347 setMap[descriptor_sets[i]] = std::move(new_ds);
5348 }
5349}
5350
5351// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
Jeremy Kniager05631e72020-06-08 14:21:35 -06005352void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type,
5353 VkPipelineBindPoint bind_point) {
5354 UpdateDrawState(cb_state, cmd_type, bind_point);
locke-lunargd556cc32019-09-17 01:21:23 -06005355 cb_state->hasDispatchCmd = true;
5356}
5357
locke-lunargd556cc32019-09-17 01:21:23 -06005358// Generic function to handle state update for all CmdDraw* type functions
Jeremy Kniager05631e72020-06-08 14:21:35 -06005359void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, VkPipelineBindPoint bind_point) {
5360 UpdateStateCmdDrawDispatchType(cb_state, cmd_type, bind_point);
locke-lunargd556cc32019-09-17 01:21:23 -06005361 cb_state->hasDrawCmd = true;
5362}
5363
5364void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
5365 uint32_t firstVertex, uint32_t firstInstance) {
5366 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005367 UpdateStateCmdDrawType(cb_state, CMD_DRAW, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005368}
5369
5370void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
5371 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
5372 uint32_t firstInstance) {
5373 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005374 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXED, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005375}
5376
5377void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5378 uint32_t count, uint32_t stride) {
5379 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5380 BUFFER_STATE *buffer_state = GetBufferState(buffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005381 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005382 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5383}
5384
5385void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5386 VkDeviceSize offset, uint32_t count, uint32_t stride) {
5387 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5388 BUFFER_STATE *buffer_state = GetBufferState(buffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005389 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005390 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5391}
5392
5393void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
5394 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005395 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCH, VK_PIPELINE_BIND_POINT_COMPUTE);
locke-lunargd556cc32019-09-17 01:21:23 -06005396}
5397
5398void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5399 VkDeviceSize offset) {
5400 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005401 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCHINDIRECT, VK_PIPELINE_BIND_POINT_COMPUTE);
locke-lunargd556cc32019-09-17 01:21:23 -06005402 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5403 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5404}
5405
Tony-LunarG977448c2019-12-02 14:52:02 -07005406void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5407 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5408 uint32_t stride) {
5409 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5410 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5411 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005412 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS);
Tony-LunarG977448c2019-12-02 14:52:02 -07005413 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5414 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5415}
5416
locke-lunargd556cc32019-09-17 01:21:23 -06005417void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5418 VkDeviceSize offset, VkBuffer countBuffer,
5419 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5420 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005421 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
5422}
5423
5424void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5425 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5426 uint32_t maxDrawCount, uint32_t stride) {
5427 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
5428}
5429
5430void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5431 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5432 uint32_t maxDrawCount, uint32_t stride) {
locke-lunargd556cc32019-09-17 01:21:23 -06005433 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5434 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5435 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005436 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005437 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5438 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5439}
5440
5441void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5442 VkDeviceSize offset, VkBuffer countBuffer,
5443 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5444 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005445 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
5446}
5447
5448void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
5449 VkDeviceSize offset, VkBuffer countBuffer,
5450 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5451 uint32_t stride) {
5452 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
locke-lunargd556cc32019-09-17 01:21:23 -06005453}
5454
5455void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
5456 uint32_t firstTask) {
5457 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005458 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSNV, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005459}
5460
5461void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5462 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
5463 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005464 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTNV, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005465 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5466 if (buffer_state) {
5467 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5468 }
5469}
5470
5471void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5472 VkDeviceSize offset, VkBuffer countBuffer,
5473 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5474 uint32_t stride) {
5475 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5476 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5477 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
Jeremy Kniager05631e72020-06-08 14:21:35 -06005478 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTCOUNTNV, VK_PIPELINE_BIND_POINT_GRAPHICS);
locke-lunargd556cc32019-09-17 01:21:23 -06005479 if (buffer_state) {
5480 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5481 }
5482 if (count_buffer_state) {
5483 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5484 }
5485}
5486
5487void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
5488 const VkAllocationCallbacks *pAllocator,
5489 VkShaderModule *pShaderModule, VkResult result,
5490 void *csm_state_data) {
5491 if (VK_SUCCESS != result) return;
5492 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
5493
Tony-LunarG8a51b7d2020-07-01 15:57:23 -06005494 spv_target_env spirv_environment = PickSpirvEnv(api_version, (device_extensions.vk_khr_spirv_1_4 != kNotEnabled));
locke-lunargd556cc32019-09-17 01:21:23 -06005495 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005496 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
5497 csm_state->unique_shader_id)
5498 : std::make_shared<SHADER_MODULE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06005499 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
5500}
5501
5502void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005503 PIPELINE_STATE::StageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005504 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
5505 auto module = GetShaderModuleState(pStage->module);
5506 if (!module->has_valid_spirv) return;
5507
5508 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
5509 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
5510 if (entrypoint == module->end()) return;
5511
5512 // Mark accessible ids
5513 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
5514 ProcessExecutionModes(module, entrypoint, pipeline);
5515
5516 stage_state->descriptor_uses =
Mark Lobodzinskid8d658e2020-01-30 15:05:51 -07005517 CollectInterfaceByDescriptorSlot(module, stage_state->accessible_ids, &stage_state->has_writable_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06005518 // Capture descriptor uses for the pipeline
5519 for (auto use : stage_state->descriptor_uses) {
5520 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06005521 const uint32_t slot = use.first.first;
5522 auto &reqs = pipeline->active_slots[slot][use.first.second];
locke-lunargd556cc32019-09-17 01:21:23 -06005523 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
John Zulauf649edd52019-10-02 14:39:41 -06005524 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunargd556cc32019-09-17 01:21:23 -06005525 }
5526}
5527
5528void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
5529 if (cb_state == nullptr) {
5530 return;
5531 }
5532
5533 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
5534 if (pipeline_layout_state == nullptr) {
5535 return;
5536 }
5537
5538 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
5539 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
5540 cb_state->push_constant_data.clear();
5541 uint32_t size_needed = 0;
5542 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
5543 size_needed = std::max(size_needed, (push_constant_range.offset + push_constant_range.size));
5544 }
5545 cb_state->push_constant_data.resize(size_needed, 0);
5546 }
5547}
John Zulauf22b0fbe2019-10-15 06:26:16 -06005548
5549void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
5550 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
5551 VkResult result) {
5552 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
5553 auto swapchain_state = GetSwapchainState(swapchain);
5554
5555 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
5556
5557 if (pSwapchainImages) {
5558 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_DETAILS) {
5559 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_DETAILS;
5560 }
5561 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
locke-lunargb3584732019-10-28 20:18:36 -06005562 if (swapchain_state->images[i].image != VK_NULL_HANDLE) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06005563
5564 // Add imageMap entries for each swapchain image
5565 VkImageCreateInfo image_ci;
5566 image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -06005567 image_ci.pNext = nullptr; // to be set later
5568 image_ci.flags = 0; // to be updated below
John Zulauf22b0fbe2019-10-15 06:26:16 -06005569 image_ci.imageType = VK_IMAGE_TYPE_2D;
5570 image_ci.format = swapchain_state->createInfo.imageFormat;
5571 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
5572 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
5573 image_ci.extent.depth = 1;
5574 image_ci.mipLevels = 1;
5575 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
5576 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
5577 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
5578 image_ci.usage = swapchain_state->createInfo.imageUsage;
5579 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
5580 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
5581 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
5582 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5583
5584 image_ci.pNext = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(swapchain_state->createInfo.pNext);
5585
5586 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR)
5587 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
5588 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)
5589 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
5590 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
5591 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR);
5592
locke-lunarg296a3c92020-03-25 01:04:29 -06005593 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(device, pSwapchainImages[i], &image_ci);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005594 auto &image_state = imageMap[pSwapchainImages[i]];
5595 image_state->valid = false;
5596 image_state->create_from_swapchain = swapchain;
5597 image_state->bind_swapchain = swapchain;
5598 image_state->bind_swapchain_imageIndex = i;
Tony-LunarGe64e4fe2020-02-17 16:21:55 -07005599 image_state->is_swapchain_image = true;
locke-lunargb3584732019-10-28 20:18:36 -06005600 swapchain_state->images[i].image = pSwapchainImages[i];
5601 swapchain_state->images[i].bound_images.emplace(pSwapchainImages[i]);
Petr Kraus44f1c482020-04-25 20:09:25 +02005602
5603 AddImageStateProps(*image_state, device, physical_device);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005604 }
5605 }
5606
5607 if (*pSwapchainImageCount) {
5608 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_COUNT) {
5609 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_COUNT;
5610 }
5611 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
5612 }
5613}
sourav parmar35e7a002020-06-09 17:58:44 -07005614
5615void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureKHR(
5616 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
5617 const VkAccelerationStructureBuildOffsetInfoKHR *const *ppOffsetInfos) {
5618 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5619 if (cb_state == nullptr) {
5620 return;
5621 }
5622 for (uint32_t i = 0; i < infoCount; ++i) {
5623 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(pInfos[i].dstAccelerationStructure);
5624 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(pInfos[i].srcAccelerationStructure);
5625 if (dst_as_state != nullptr) {
5626 dst_as_state->built = true;
5627 dst_as_state->build_info_khr.initialize(pInfos);
5628 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
5629 }
5630 if (src_as_state != nullptr) {
5631 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
5632 }
5633 }
5634 cb_state->hasBuildAccelerationStructureCmd = true;
5635}
5636
5637void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer,
5638 const VkCopyAccelerationStructureInfoKHR *pInfo) {
5639 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5640 if (cb_state) {
5641 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(pInfo->src);
5642 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(pInfo->dst);
5643 if (dst_as_state != nullptr && src_as_state != nullptr) {
5644 dst_as_state->built = true;
5645 dst_as_state->build_info_khr = src_as_state->build_info_khr;
5646 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
5647 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
5648 }
5649 }
5650}