blob: d48b1a393db01ff6639bccb22c9443fdc6455941 [file] [log] [blame]
Tony-LunarG73719992020-01-15 10:20:28 -07001/* Copyright (c) 2015-2020 The Khronos Group Inc.
2 * Copyright (c) 2015-2020 Valve Corporation
3 * Copyright (c) 2015-2020 LunarG, Inc.
4 * Copyright (C) 2015-2020 Google Inc.
locke-lunargd556cc32019-09-17 01:21:23 -06005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
locke-lunargd556cc32019-09-17 01:21:23 -060023#include <cmath>
24#include <set>
25#include <sstream>
26#include <string>
27
28#include "vk_enum_string_helper.h"
29#include "vk_format_utils.h"
30#include "vk_layer_data.h"
31#include "vk_layer_utils.h"
32#include "vk_layer_logging.h"
33#include "vk_typemap_helper.h"
34
35#include "chassis.h"
36#include "state_tracker.h"
37#include "shader_validation.h"
38
39using std::max;
40using std::string;
41using std::stringstream;
42using std::unique_ptr;
43using std::unordered_map;
44using std::unordered_set;
45using std::vector;
46
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -060047void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
48 if (add_obj) {
49 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
50 // Call base class
51 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
52 }
53}
54
locke-lunargd556cc32019-09-17 01:21:23 -060055#ifdef VK_USE_PLATFORM_ANDROID_KHR
56// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
57// This could also move into a seperate core_validation_android.cpp file... ?
58
59void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
60 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
61 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
Spencer Fricke224c9852020-04-06 07:47:29 -070062 is_node->external_ahb = true;
locke-lunargd556cc32019-09-17 01:21:23 -060063 }
64 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
65 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
66 is_node->has_ahb_format = true;
67 is_node->ahb_format = ext_fmt_android->externalFormat;
Spencer Fricke6bba8c72020-04-06 07:41:21 -070068 // VUID 01894 will catch if not found in map
69 auto it = ahb_ext_formats_map.find(ext_fmt_android->externalFormat);
70 if (it != ahb_ext_formats_map.end()) {
71 is_node->format_features = it->second;
72 }
locke-lunargd556cc32019-09-17 01:21:23 -060073 }
74}
75
sfricke-samsung013f1ef2020-05-14 22:56:20 -070076void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {
77 const VkExternalMemoryBufferCreateInfo *embci = lvl_find_in_chain<VkExternalMemoryBufferCreateInfo>(create_info->pNext);
78 if (embci && (embci->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
79 bs_node->external_ahb = true;
80 }
81}
82
locke-lunargd556cc32019-09-17 01:21:23 -060083void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -070084 VkSamplerYcbcrConversion ycbcr_conversion,
85 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state) {
locke-lunargd556cc32019-09-17 01:21:23 -060086 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
87 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
88 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
sfricke-samsungbe3584f2020-04-22 14:58:06 -070089 // VUID 01894 will catch if not found in map
90 auto it = ahb_ext_formats_map.find(ext_format_android->externalFormat);
91 if (it != ahb_ext_formats_map.end()) {
92 ycbcr_state->format_features = it->second;
93 }
locke-lunargd556cc32019-09-17 01:21:23 -060094 }
95};
96
97void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
98 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
99};
100
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700101void ValidationStateTracker::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
102 VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties, VkResult result) {
103 if (VK_SUCCESS != result) return;
104 auto ahb_format_props = lvl_find_in_chain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
105 if (ahb_format_props) {
106 ahb_ext_formats_map.insert({ahb_format_props->externalFormat, ahb_format_props->formatFeatures});
107 }
108}
109
locke-lunargd556cc32019-09-17 01:21:23 -0600110#else
111
112void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
113
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700114void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {}
115
locke-lunargd556cc32019-09-17 01:21:23 -0600116void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700117 VkSamplerYcbcrConversion ycbcr_conversion,
118 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state){};
locke-lunargd556cc32019-09-17 01:21:23 -0600119
120void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
121
122#endif // VK_USE_PLATFORM_ANDROID_KHR
123
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -0600124std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPipelineLayout(PIPELINE_LAYOUT_STATE const *layout_data,
125 uint32_t set) {
126 std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> dsl = nullptr;
127 if (layout_data && (set < layout_data->set_layouts.size())) {
128 dsl = layout_data->set_layouts[set];
129 }
130 return dsl;
131}
132
Petr Kraus44f1c482020-04-25 20:09:25 +0200133void AddImageStateProps(IMAGE_STATE &image_state, const VkDevice device, const VkPhysicalDevice physical_device) {
134 // Add feature support according to Image Format Features (vkspec.html#resources-image-format-features)
135 // if format is AHB external format then the features are already set
136 if (image_state.has_ahb_format == false) {
137 const VkImageTiling image_tiling = image_state.createInfo.tiling;
138 const VkFormat image_format = image_state.createInfo.format;
139 if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
140 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {
141 VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, nullptr};
142 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state.image, &drm_format_properties);
143
144 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
145 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
146 nullptr};
147 format_properties_2.pNext = (void *)&drm_properties_list;
148 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
149
150 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
151 if ((drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier &
152 drm_format_properties.drmFormatModifier) != 0) {
153 image_state.format_features |=
154 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
155 }
156 }
157 } else {
158 VkFormatProperties format_properties;
159 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_format, &format_properties);
160 image_state.format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
161 : format_properties.optimalTilingFeatures;
162 }
163 }
164}
165
locke-lunargd556cc32019-09-17 01:21:23 -0600166void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
167 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
168 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500169 auto is_node = std::make_shared<IMAGE_STATE>(*pImage, pCreateInfo);
sfricke-samsung71bc6572020-04-29 15:49:43 -0700170 is_node->disjoint = ((pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT) != 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600171 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
172 RecordCreateImageANDROID(pCreateInfo, is_node.get());
173 }
174 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
175 if (swapchain_info) {
176 is_node->create_from_swapchain = swapchain_info->swapchain;
177 }
178
locke-lunargd556cc32019-09-17 01:21:23 -0600179 // Record the memory requirements in case they won't be queried
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700180 // External AHB memory can't be queried until after memory is bound
Spencer Fricke224c9852020-04-06 07:47:29 -0700181 if (is_node->external_ahb == false) {
sfricke-samsung71bc6572020-04-29 15:49:43 -0700182 if (is_node->disjoint == false) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700183 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
184 } else {
185 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
186 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
187 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
188 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
189 mem_req_info2.pNext = &image_plane_req;
190 mem_req_info2.image = *pImage;
191
192 assert(plane_count != 0); // assumes each format has at least first plane
193 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
194 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
195 is_node->plane0_requirements = mem_reqs2.memoryRequirements;
196
197 if (plane_count >= 2) {
198 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
199 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
200 is_node->plane1_requirements = mem_reqs2.memoryRequirements;
201 }
202 if (plane_count >= 3) {
203 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
204 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
205 is_node->plane2_requirements = mem_reqs2.memoryRequirements;
206 }
207 }
locke-lunargd556cc32019-09-17 01:21:23 -0600208 }
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700209
Petr Kraus44f1c482020-04-25 20:09:25 +0200210 AddImageStateProps(*is_node, device, physical_device);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700211
locke-lunargd556cc32019-09-17 01:21:23 -0600212 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
213}
214
215void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
216 if (!image) return;
217 IMAGE_STATE *image_state = GetImageState(image);
218 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
219 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
220 // Clean up memory mapping, bindings and range references for image
221 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700222 RemoveImageMemoryRange(image, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600223 }
224 if (image_state->bind_swapchain) {
225 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
226 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -0600227 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -0600228 }
229 }
230 RemoveAliasingImage(image_state);
231 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500232 image_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600233 // Remove image from imageMap
234 imageMap.erase(image);
235}
236
237void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
238 VkImageLayout imageLayout, const VkClearColorValue *pColor,
239 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
240 auto cb_node = GetCBState(commandBuffer);
241 auto image_state = GetImageState(image);
242 if (cb_node && image_state) {
243 AddCommandBufferBindingImage(cb_node, image_state);
244 }
245}
246
247void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
248 VkImageLayout imageLayout,
249 const VkClearDepthStencilValue *pDepthStencil,
250 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
251 auto cb_node = GetCBState(commandBuffer);
252 auto image_state = GetImageState(image);
253 if (cb_node && image_state) {
254 AddCommandBufferBindingImage(cb_node, image_state);
255 }
256}
257
258void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
259 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
260 uint32_t regionCount, const VkImageCopy *pRegions) {
261 auto cb_node = GetCBState(commandBuffer);
262 auto src_image_state = GetImageState(srcImage);
263 auto dst_image_state = GetImageState(dstImage);
264
265 // Update bindings between images and cmd buffer
266 AddCommandBufferBindingImage(cb_node, src_image_state);
267 AddCommandBufferBindingImage(cb_node, dst_image_state);
268}
269
270void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
271 VkImageLayout srcImageLayout, VkImage dstImage,
272 VkImageLayout dstImageLayout, uint32_t regionCount,
273 const VkImageResolve *pRegions) {
274 auto cb_node = GetCBState(commandBuffer);
275 auto src_image_state = GetImageState(srcImage);
276 auto dst_image_state = GetImageState(dstImage);
277
278 // Update bindings between images and cmd buffer
279 AddCommandBufferBindingImage(cb_node, src_image_state);
280 AddCommandBufferBindingImage(cb_node, dst_image_state);
281}
282
283void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
284 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
285 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
286 auto cb_node = GetCBState(commandBuffer);
287 auto src_image_state = GetImageState(srcImage);
288 auto dst_image_state = GetImageState(dstImage);
289
290 // Update bindings between images and cmd buffer
291 AddCommandBufferBindingImage(cb_node, src_image_state);
292 AddCommandBufferBindingImage(cb_node, dst_image_state);
293}
294
295void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
296 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
297 VkResult result) {
298 if (result != VK_SUCCESS) return;
299 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500300 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600301
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700302 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
303 RecordCreateBufferANDROID(pCreateInfo, buffer_state.get());
304 }
locke-lunargd556cc32019-09-17 01:21:23 -0600305 // Get a set of requirements in the case the app does not
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700306 // External AHB memory can't be queried until after memory is bound
307 if (buffer_state->external_ahb == false) {
308 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
309 }
locke-lunargd556cc32019-09-17 01:21:23 -0600310
311 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
312}
313
314void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
315 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
316 VkResult result) {
317 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500318 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
319 bufferViewMap[*pView] = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600320}
321
322void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
323 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
324 VkResult result) {
325 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500326 auto image_state = GetImageShared(pCreateInfo->image);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700327 auto image_view_state = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
328
329 // Add feature support according to Image View Format Features (vkspec.html#resources-image-view-format-features)
330 const VkImageTiling image_tiling = image_state->createInfo.tiling;
331 const VkFormat image_view_format = pCreateInfo->format;
332 if (image_state->has_ahb_format == true) {
333 // The ImageView uses same Image's format feature since they share same AHB
334 image_view_state->format_features = image_state->format_features;
335 } else if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
336 // Parameter validation should catch if this is used without VK_EXT_image_drm_format_modifier
337 assert(device_extensions.vk_ext_image_drm_format_modifier);
338 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
339 nullptr};
340 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state->image, &drm_format_properties);
341
342 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
343 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
344 nullptr};
345 format_properties_2.pNext = (void *)&drm_properties_list;
346 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
347
348 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
349 if ((drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier & drm_format_properties.drmFormatModifier) !=
350 0) {
351 image_view_state->format_features |=
352 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
353 }
354 }
355 } else {
356 VkFormatProperties format_properties;
357 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_view_format, &format_properties);
358 image_view_state->format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
359 : format_properties.optimalTilingFeatures;
360 }
361
362 imageViewMap.insert(std::make_pair(*pView, std::move(image_view_state)));
locke-lunargd556cc32019-09-17 01:21:23 -0600363}
364
365void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
366 uint32_t regionCount, const VkBufferCopy *pRegions) {
367 auto cb_node = GetCBState(commandBuffer);
368 auto src_buffer_state = GetBufferState(srcBuffer);
369 auto dst_buffer_state = GetBufferState(dstBuffer);
370
371 // Update bindings between buffers and cmd buffer
372 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
373 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
374}
375
376void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
377 const VkAllocationCallbacks *pAllocator) {
378 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
379 if (!image_view_state) return;
380 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
381
382 // Any bound cmd buffers are now invalid
383 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500384 image_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600385 imageViewMap.erase(imageView);
386}
387
388void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
389 if (!buffer) return;
390 auto buffer_state = GetBufferState(buffer);
391 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
392
393 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
394 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700395 RemoveBufferMemoryRange(buffer, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600396 }
397 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500398 buffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600399 bufferMap.erase(buffer_state->buffer);
400}
401
402void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
403 const VkAllocationCallbacks *pAllocator) {
404 if (!bufferView) return;
405 auto buffer_view_state = GetBufferViewState(bufferView);
406 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
407
408 // Any bound cmd buffers are now invalid
409 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500410 buffer_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600411 bufferViewMap.erase(bufferView);
412}
413
414void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
415 VkDeviceSize size, uint32_t data) {
416 auto cb_node = GetCBState(commandBuffer);
417 auto buffer_state = GetBufferState(dstBuffer);
418 // Update bindings between buffer and cmd buffer
419 AddCommandBufferBindingBuffer(cb_node, buffer_state);
420}
421
422void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
423 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
424 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
425 auto cb_node = GetCBState(commandBuffer);
426 auto src_image_state = GetImageState(srcImage);
427 auto dst_buffer_state = GetBufferState(dstBuffer);
428
429 // Update bindings between buffer/image and cmd buffer
430 AddCommandBufferBindingImage(cb_node, src_image_state);
431 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
432}
433
434void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
435 VkImageLayout dstImageLayout, uint32_t regionCount,
436 const VkBufferImageCopy *pRegions) {
437 auto cb_node = GetCBState(commandBuffer);
438 auto src_buffer_state = GetBufferState(srcBuffer);
439 auto dst_image_state = GetImageState(dstImage);
440
441 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
442 AddCommandBufferBindingImage(cb_node, dst_image_state);
443}
444
445// Get the image viewstate for a given framebuffer attachment
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300446IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(CMD_BUFFER_STATE *cb, FRAMEBUFFER_STATE *framebuffer,
447 uint32_t index) {
448 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) {
449 assert(index < cb->imagelessFramebufferAttachments.size());
450 return cb->imagelessFramebufferAttachments[index];
451 }
locke-lunargd556cc32019-09-17 01:21:23 -0600452 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
453 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
454 return GetImageViewState(image_view);
455}
456
457// Get the image viewstate for a given framebuffer attachment
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300458const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const CMD_BUFFER_STATE *cb,
459 const FRAMEBUFFER_STATE *framebuffer,
locke-lunargd556cc32019-09-17 01:21:23 -0600460 uint32_t index) const {
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300461 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) {
462 assert(index < cb->imagelessFramebufferAttachments.size());
463 return cb->imagelessFramebufferAttachments[index];
464 }
locke-lunargd556cc32019-09-17 01:21:23 -0600465 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
466 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
467 return GetImageViewState(image_view);
468}
469
470void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
locke-lunargd556cc32019-09-17 01:21:23 -0600471 std::unordered_set<VkImage> *bound_images = nullptr;
472
locke-lunargb3584732019-10-28 20:18:36 -0600473 if (image_state->bind_swapchain) {
474 auto swapchain_state = GetSwapchainState(image_state->bind_swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -0600475 if (swapchain_state) {
locke-lunargb3584732019-10-28 20:18:36 -0600476 bound_images = &swapchain_state->images[image_state->bind_swapchain_imageIndex].bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600477 }
478 } else {
locke-lunargcf04d582019-11-26 00:31:50 -0700479 if (image_state->binding.mem_state) {
480 bound_images = &image_state->binding.mem_state->bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600481 }
482 }
483
484 if (bound_images) {
485 for (const auto &handle : *bound_images) {
486 if (handle != image_state->image) {
487 auto is = GetImageState(handle);
488 if (is && is->IsCompatibleAliasing(image_state)) {
489 auto inserted = is->aliasing_images.emplace(image_state->image);
490 if (inserted.second) {
491 image_state->aliasing_images.emplace(handle);
492 }
493 }
494 }
495 }
496 }
497}
498
499void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
500 for (const auto &image : image_state->aliasing_images) {
501 auto is = GetImageState(image);
502 if (is) {
503 is->aliasing_images.erase(image_state->image);
504 }
505 }
506 image_state->aliasing_images.clear();
507}
508
509void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
510 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
511 // reference. It doesn't need two ways clear.
512 for (const auto &handle : bound_images) {
513 auto is = GetImageState(handle);
514 if (is) {
515 is->aliasing_images.clear();
516 }
517 }
518}
519
Jeff Bolz310775c2019-10-09 00:46:33 -0500520const EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) const {
521 auto it = eventMap.find(event);
522 if (it == eventMap.end()) {
523 return nullptr;
524 }
525 return &it->second;
526}
527
locke-lunargd556cc32019-09-17 01:21:23 -0600528EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
529 auto it = eventMap.find(event);
530 if (it == eventMap.end()) {
531 return nullptr;
532 }
533 return &it->second;
534}
535
536const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
537 auto it = queueMap.find(queue);
538 if (it == queueMap.cend()) {
539 return nullptr;
540 }
541 return &it->second;
542}
543
544QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
545 auto it = queueMap.find(queue);
546 if (it == queueMap.end()) {
547 return nullptr;
548 }
549 return &it->second;
550}
551
552const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
553 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
554 auto it = phys_dev_map->find(phys);
555 if (it == phys_dev_map->end()) {
556 return nullptr;
557 }
558 return &it->second;
559}
560
561PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
562 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
563 auto it = phys_dev_map->find(phys);
564 if (it == phys_dev_map->end()) {
565 return nullptr;
566 }
567 return &it->second;
568}
569
570PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
571const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
572
573// Return ptr to memory binding for given handle of specified type
574template <typename State, typename Result>
575static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
576 switch (typed_handle.type) {
577 case kVulkanObjectTypeImage:
578 return state->GetImageState(typed_handle.Cast<VkImage>());
579 case kVulkanObjectTypeBuffer:
580 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
581 case kVulkanObjectTypeAccelerationStructureNV:
582 return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
583 default:
584 break;
585 }
586 return nullptr;
587}
588
589const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
590 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
591}
592
593BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
594 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
595}
596
597void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
598 assert(object != NULL);
599
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500600 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo);
601 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600602
603 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
604 if (dedicated) {
605 mem_info->is_dedicated = true;
606 mem_info->dedicated_buffer = dedicated->buffer;
607 mem_info->dedicated_image = dedicated->image;
608 }
609 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
610 if (export_info) {
611 mem_info->is_export = true;
612 mem_info->export_handle_type_flags = export_info->handleTypes;
613 }
614}
615
616// Create binding link between given sampler and command buffer node
617void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600618 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600619 return;
620 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500621 AddCommandBufferBinding(sampler_state->cb_bindings,
622 VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600623}
624
625// Create binding link between given image node and command buffer node
626void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600627 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600628 return;
629 }
630 // Skip validation if this image was created through WSI
631 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
632 // First update cb binding for image
Jeff Bolzadbfa852019-10-04 13:53:30 -0500633 if (AddCommandBufferBinding(image_state->cb_bindings,
634 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600635 // Now update CB binding in MemObj mini CB list
636 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700637 // Now update CBInfo's Mem reference list
638 AddCommandBufferBinding(mem_binding->cb_bindings,
639 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600640 }
641 }
642 }
643}
644
645// Create binding link between given image view node and its image with command buffer node
646void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600647 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600648 return;
649 }
650 // First add bindings for imageView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500651 if (AddCommandBufferBinding(view_state->cb_bindings,
652 VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600653 // Only need to continue if this is a new item
Jeff Bolzadbfa852019-10-04 13:53:30 -0500654 auto image_state = view_state->image_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600655 // Add bindings for image within imageView
656 if (image_state) {
657 AddCommandBufferBindingImage(cb_node, image_state);
658 }
659 }
660}
661
662// Create binding link between given buffer node and command buffer node
663void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600664 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600665 return;
666 }
667 // First update cb binding for buffer
Jeff Bolzadbfa852019-10-04 13:53:30 -0500668 if (AddCommandBufferBinding(buffer_state->cb_bindings,
669 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600670 // Now update CB binding in MemObj mini CB list
671 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700672 // Now update CBInfo's Mem reference list
673 AddCommandBufferBinding(mem_binding->cb_bindings,
674 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600675 }
676 }
677}
678
679// Create binding link between given buffer view node and its buffer with command buffer node
680void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600681 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600682 return;
683 }
684 // First add bindings for bufferView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500685 if (AddCommandBufferBinding(view_state->cb_bindings,
686 VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
687 auto buffer_state = view_state->buffer_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600688 // Add bindings for buffer within bufferView
689 if (buffer_state) {
690 AddCommandBufferBindingBuffer(cb_node, buffer_state);
691 }
692 }
693}
694
695// Create binding link between given acceleration structure and command buffer node
696void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
697 ACCELERATION_STRUCTURE_STATE *as_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600698 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600699 return;
700 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500701 if (AddCommandBufferBinding(
702 as_state->cb_bindings,
703 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600704 // Now update CB binding in MemObj mini CB list
705 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700706 // Now update CBInfo's Mem reference list
707 AddCommandBufferBinding(mem_binding->cb_bindings,
708 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600709 }
710 }
711}
712
locke-lunargd556cc32019-09-17 01:21:23 -0600713// Clear a single object binding from given memory object
locke-lunarg5f59e782019-12-19 10:32:23 -0700714void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
locke-lunargd556cc32019-09-17 01:21:23 -0600715 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
716 if (mem_info) {
717 mem_info->obj_bindings.erase(typed_handle);
718 }
719}
720
721// ClearMemoryObjectBindings clears the binding of objects to memory
722// For the given object it pulls the memory bindings and makes sure that the bindings
723// no longer refer to the object being cleared. This occurs when objects are destroyed.
724void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
725 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
726 if (mem_binding) {
727 if (!mem_binding->sparse) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700728 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600729 } else { // Sparse, clear all bindings
730 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700731 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600732 }
733 }
734 }
735}
736
737// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
738// Corresponding valid usage checks are in ValidateSetMemBinding().
739void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
740 const VulkanTypedHandle &typed_handle) {
741 assert(mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600742
743 if (mem != VK_NULL_HANDLE) {
locke-lunargcf04d582019-11-26 00:31:50 -0700744 mem_binding->binding.mem_state = GetShared<DEVICE_MEMORY_STATE>(mem);
745 if (mem_binding->binding.mem_state) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700746 mem_binding->binding.offset = memory_offset;
747 mem_binding->binding.size = mem_binding->requirements.size;
locke-lunargcf04d582019-11-26 00:31:50 -0700748 mem_binding->binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -0600749 // For image objects, make sure default memory state is correctly set
750 // TODO : What's the best/correct way to handle this?
751 if (kVulkanObjectTypeImage == typed_handle.type) {
752 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
753 if (image_state) {
754 VkImageCreateInfo ici = image_state->createInfo;
755 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
756 // TODO:: More memory state transition stuff.
757 }
758 }
759 }
locke-lunargcf04d582019-11-26 00:31:50 -0700760 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
locke-lunargd556cc32019-09-17 01:21:23 -0600761 }
762 }
763}
764
765// For NULL mem case, clear any previous binding Else...
766// Make sure given object is in its object map
767// IF a previous binding existed, update binding
768// Add reference from objectInfo to memoryInfo
769// Add reference off of object's binding info
770// Return VK_TRUE if addition is successful, VK_FALSE otherwise
locke-lunargcf04d582019-11-26 00:31:50 -0700771bool ValidationStateTracker::SetSparseMemBinding(const VkDeviceMemory mem, const VkDeviceSize mem_offset,
772 const VkDeviceSize mem_size, const VulkanTypedHandle &typed_handle) {
locke-lunargd556cc32019-09-17 01:21:23 -0600773 bool skip = VK_FALSE;
774 // Handle NULL case separately, just clear previous binding & decrement reference
locke-lunargcf04d582019-11-26 00:31:50 -0700775 if (mem == VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -0600776 // TODO : This should cause the range of the resource to be unbound according to spec
777 } else {
778 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
779 assert(mem_binding);
780 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
781 assert(mem_binding->sparse);
locke-lunargcf04d582019-11-26 00:31:50 -0700782 MEM_BINDING binding = {GetShared<DEVICE_MEMORY_STATE>(mem), mem_offset, mem_size};
783 if (binding.mem_state) {
784 binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -0600785 // Need to set mem binding for this object
786 mem_binding->sparse_bindings.insert(binding);
787 mem_binding->UpdateBoundMemorySet();
788 }
789 }
790 }
791 return skip;
792}
793
locke-lunargd556cc32019-09-17 01:21:23 -0600794void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, const VkPipelineBindPoint bind_point) {
795 auto &state = cb_state->lastBound[bind_point];
796 PIPELINE_STATE *pPipe = state.pipeline_state;
797 if (VK_NULL_HANDLE != state.pipeline_layout) {
798 for (const auto &set_binding_pair : pPipe->active_slots) {
799 uint32_t setIndex = set_binding_pair.first;
800 // Pull the set node
801 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
locke-lunargd556cc32019-09-17 01:21:23 -0600802
Tony-LunarG77822802020-05-28 16:35:46 -0600803 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
locke-lunargd556cc32019-09-17 01:21:23 -0600804
Tony-LunarG77822802020-05-28 16:35:46 -0600805 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
806 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
807 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
808 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
809
810 if (reduced_map.IsManyDescriptors()) {
811 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
812 descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
813 }
814
815 // We can skip updating the state if "nothing" has changed since the last validation.
816 // See CoreChecks::ValidateCmdBufDrawState for more details.
817 bool descriptor_set_changed =
818 !reduced_map.IsManyDescriptors() ||
819 // Update if descriptor set (or contents) has changed
820 state.per_set[setIndex].validated_set != descriptor_set ||
821 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
822 (!disabled[image_layout_validation] &&
823 state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
824 bool need_update = descriptor_set_changed ||
825 // Update if previous bindingReqMap doesn't include new bindingReqMap
826 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
827 state.per_set[setIndex].validated_set_binding_req_map.end(), binding_req_map.begin(),
828 binding_req_map.end());
829
830 if (need_update) {
831 // Bind this set and its active descriptor resources to the command buffer
832 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
833 // Only record the bindings that haven't already been recorded
834 BindingReqMap delta_reqs;
835 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
836 state.per_set[setIndex].validated_set_binding_req_map.begin(),
837 state.per_set[setIndex].validated_set_binding_req_map.end(),
838 std::inserter(delta_reqs, delta_reqs.begin()));
839 descriptor_set->UpdateDrawState(this, cb_state, pPipe, delta_reqs);
840 } else {
841 descriptor_set->UpdateDrawState(this, cb_state, pPipe, binding_req_map);
locke-lunargd556cc32019-09-17 01:21:23 -0600842 }
843
Tony-LunarG77822802020-05-28 16:35:46 -0600844 state.per_set[setIndex].validated_set = descriptor_set;
845 state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
846 state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
847 if (reduced_map.IsManyDescriptors()) {
848 // Check whether old == new before assigning, the equality check is much cheaper than
849 // freeing and reallocating the map.
850 if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
851 state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
Jeff Bolz56308942019-10-06 22:05:23 -0500852 }
Tony-LunarG77822802020-05-28 16:35:46 -0600853 } else {
854 state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
locke-lunargd556cc32019-09-17 01:21:23 -0600855 }
856 }
857 }
858 }
859 if (!pPipe->vertex_binding_descriptions_.empty()) {
860 cb_state->vertex_buffer_used = true;
861 }
862}
863
864// Remove set from setMap and delete the set
865void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500866 descriptor_set->destroyed = true;
Jeff Bolzadbfa852019-10-04 13:53:30 -0500867 const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500868 // Any bound cmd buffers are now invalid
Jeff Bolzadbfa852019-10-04 13:53:30 -0500869 InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500870
locke-lunargd556cc32019-09-17 01:21:23 -0600871 setMap.erase(descriptor_set->GetSet());
872}
873
874// Free all DS Pools including their Sets & related sub-structs
875// NOTE : Calls to this function should be wrapped in mutex
876void ValidationStateTracker::DeleteDescriptorSetPools() {
877 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
878 // Remove this pools' sets from setMap and delete them
879 for (auto ds : ii->second->sets) {
880 FreeDescriptorSet(ds);
881 }
882 ii->second->sets.clear();
883 ii = descriptorPoolMap.erase(ii);
884 }
885}
886
887// For given object struct return a ptr of BASE_NODE type for its wrapping struct
888BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500889 if (object_struct.node) {
890#ifdef _DEBUG
891 // assert that lookup would find the same object
892 VulkanTypedHandle other = object_struct;
893 other.node = nullptr;
894 assert(object_struct.node == GetStateStructPtrFromObject(other));
895#endif
896 return object_struct.node;
897 }
locke-lunargd556cc32019-09-17 01:21:23 -0600898 BASE_NODE *base_ptr = nullptr;
899 switch (object_struct.type) {
900 case kVulkanObjectTypeDescriptorSet: {
901 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
902 break;
903 }
904 case kVulkanObjectTypeSampler: {
905 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
906 break;
907 }
908 case kVulkanObjectTypeQueryPool: {
909 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
910 break;
911 }
912 case kVulkanObjectTypePipeline: {
913 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
914 break;
915 }
916 case kVulkanObjectTypeBuffer: {
917 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
918 break;
919 }
920 case kVulkanObjectTypeBufferView: {
921 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
922 break;
923 }
924 case kVulkanObjectTypeImage: {
925 base_ptr = GetImageState(object_struct.Cast<VkImage>());
926 break;
927 }
928 case kVulkanObjectTypeImageView: {
929 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
930 break;
931 }
932 case kVulkanObjectTypeEvent: {
933 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
934 break;
935 }
936 case kVulkanObjectTypeDescriptorPool: {
937 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
938 break;
939 }
940 case kVulkanObjectTypeCommandPool: {
941 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
942 break;
943 }
944 case kVulkanObjectTypeFramebuffer: {
945 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
946 break;
947 }
948 case kVulkanObjectTypeRenderPass: {
949 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
950 break;
951 }
952 case kVulkanObjectTypeDeviceMemory: {
953 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
954 break;
955 }
956 case kVulkanObjectTypeAccelerationStructureNV: {
957 base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
958 break;
959 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500960 case kVulkanObjectTypeUnknown:
961 // This can happen if an element of the object_bindings vector has been
962 // zeroed out, after an object is destroyed.
963 break;
locke-lunargd556cc32019-09-17 01:21:23 -0600964 default:
965 // TODO : Any other objects to be handled here?
966 assert(0);
967 break;
968 }
969 return base_ptr;
970}
971
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700972VkFormatFeatureFlags ValidationStateTracker::GetPotentialFormatFeatures(VkFormat format) const {
973 VkFormatFeatureFlags format_features = 0;
974
975 if (format != VK_FORMAT_UNDEFINED) {
976 VkFormatProperties format_properties;
977 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
978 format_features |= format_properties.linearTilingFeatures;
979 format_features |= format_properties.optimalTilingFeatures;
980 if (device_extensions.vk_ext_image_drm_format_modifier) {
981 // VK_KHR_get_physical_device_properties2 is required in this case
982 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2};
983 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
984 nullptr};
985 format_properties_2.pNext = (void *)&drm_properties_list;
986 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
987 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
988 format_features |= drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
989 }
990 }
991 }
992
993 return format_features;
994}
995
locke-lunargd556cc32019-09-17 01:21:23 -0600996// Tie the VulkanTypedHandle to the cmd buffer which includes:
997// Add object_binding to cmd buffer
998// Add cb_binding to object
Jeff Bolzadbfa852019-10-04 13:53:30 -0500999bool ValidationStateTracker::AddCommandBufferBinding(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_bindings,
locke-lunargd556cc32019-09-17 01:21:23 -06001000 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001001 if (disabled[command_buffer_state]) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001002 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001003 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001004 // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
1005 // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
1006 auto inserted = cb_bindings.insert({cb_node, -1});
1007 if (inserted.second) {
1008 cb_node->object_bindings.push_back(obj);
1009 inserted.first->second = (int)cb_node->object_bindings.size() - 1;
1010 return true;
1011 }
1012 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001013}
1014
1015// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
1016void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
1017 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
1018 if (base_obj) base_obj->cb_bindings.erase(cb_node);
1019}
1020
1021// Reset the command buffer state
1022// Maintain the createInfo and set state to CB_NEW, but clear all other state
1023void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
1024 CMD_BUFFER_STATE *pCB = GetCBState(cb);
1025 if (pCB) {
1026 pCB->in_use.store(0);
1027 // Reset CB state (note that createInfo is not cleared)
1028 pCB->commandBuffer = cb;
1029 memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
1030 memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
1031 pCB->hasDrawCmd = false;
1032 pCB->hasTraceRaysCmd = false;
1033 pCB->hasBuildAccelerationStructureCmd = false;
1034 pCB->hasDispatchCmd = false;
1035 pCB->state = CB_NEW;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001036 pCB->commandCount = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06001037 pCB->submitCount = 0;
1038 pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
1039 pCB->status = 0;
1040 pCB->static_status = 0;
1041 pCB->viewportMask = 0;
1042 pCB->scissorMask = 0;
1043
1044 for (auto &item : pCB->lastBound) {
1045 item.second.reset();
1046 }
1047
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07001048 pCB->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo();
locke-lunargd556cc32019-09-17 01:21:23 -06001049 pCB->activeRenderPass = nullptr;
1050 pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
1051 pCB->activeSubpass = 0;
1052 pCB->broken_bindings.clear();
1053 pCB->waitedEvents.clear();
1054 pCB->events.clear();
1055 pCB->writeEventsBeforeWait.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001056 pCB->activeQueries.clear();
1057 pCB->startedQueries.clear();
1058 pCB->image_layout_map.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001059 pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
1060 pCB->vertex_buffer_used = false;
1061 pCB->primaryCommandBuffer = VK_NULL_HANDLE;
1062 // If secondary, invalidate any primary command buffer that may call us.
1063 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001064 InvalidateLinkedCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -06001065 }
1066
1067 // Remove reverse command buffer links.
1068 for (auto pSubCB : pCB->linkedCommandBuffers) {
1069 pSubCB->linkedCommandBuffers.erase(pCB);
1070 }
1071 pCB->linkedCommandBuffers.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001072 pCB->queue_submit_functions.clear();
1073 pCB->cmd_execute_commands_functions.clear();
1074 pCB->eventUpdates.clear();
1075 pCB->queryUpdates.clear();
1076
1077 // Remove object bindings
1078 for (const auto &obj : pCB->object_bindings) {
1079 RemoveCommandBufferBinding(obj, pCB);
1080 }
1081 pCB->object_bindings.clear();
1082 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
1083 for (auto framebuffer : pCB->framebuffers) {
1084 auto fb_state = GetFramebufferState(framebuffer);
1085 if (fb_state) fb_state->cb_bindings.erase(pCB);
1086 }
1087 pCB->framebuffers.clear();
1088 pCB->activeFramebuffer = VK_NULL_HANDLE;
1089 memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
1090
1091 pCB->qfo_transfer_image_barriers.Reset();
1092 pCB->qfo_transfer_buffer_barriers.Reset();
1093
1094 // Clean up the label data
1095 ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
1096 pCB->debug_label.Reset();
locke-gb3ce08f2019-09-30 12:30:56 -06001097 pCB->validate_descriptorsets_in_queuesubmit.clear();
Attilio Provenzano02859b22020-02-27 14:17:28 +00001098
1099 // Best practices info
1100 pCB->small_indexed_draw_call_count = 0;
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06001101
1102 pCB->transform_feedback_active = false;
locke-lunargd556cc32019-09-17 01:21:23 -06001103 }
1104 if (command_buffer_reset_callback) {
1105 (*command_buffer_reset_callback)(cb);
1106 }
1107}
1108
1109void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
1110 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
1111 VkResult result) {
1112 if (VK_SUCCESS != result) return;
1113
1114 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
1115 if (nullptr == enabled_features_found) {
1116 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
1117 if (features2) {
1118 enabled_features_found = &(features2->features);
1119 }
1120 }
1121
1122 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
1123 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
1124 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
1125
1126 if (nullptr == enabled_features_found) {
1127 state_tracker->enabled_features.core = {};
1128 } else {
1129 state_tracker->enabled_features.core = *enabled_features_found;
1130 }
1131
1132 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
1133 // previously set them through an explicit API call.
1134 uint32_t count;
1135 auto pd_state = GetPhysicalDeviceState(gpu);
1136 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
1137 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
1138 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
1139 // Save local link to this device's physical device state
1140 state_tracker->physical_device_state = pd_state;
1141
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001142 const auto *vulkan_12_features = lvl_find_in_chain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
1143 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001144 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001145 } else {
sfricke-samsung27c70722020-05-02 08:42:39 -07001146 // Set Extension Feature Aliases to false as there is no struct to check
1147 state_tracker->enabled_features.core12.drawIndirectCount = VK_FALSE;
1148 state_tracker->enabled_features.core12.samplerMirrorClampToEdge = VK_FALSE;
1149 state_tracker->enabled_features.core12.descriptorIndexing = VK_FALSE;
1150 state_tracker->enabled_features.core12.samplerFilterMinmax = VK_FALSE;
1151 state_tracker->enabled_features.core12.shaderOutputLayer = VK_FALSE;
1152 state_tracker->enabled_features.core12.shaderOutputViewportIndex = VK_FALSE;
1153
1154 // These structs are only allowed in pNext chain if there is no VkPhysicalDeviceVulkan12Features
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001155
1156 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
1157 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001158 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
1159 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
1160 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
1161 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001162 }
1163
1164 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
1165 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001166 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
1167 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001168 }
1169
1170 const auto *descriptor_indexing_features =
1171 lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
1172 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001173 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
1174 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
1175 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
1176 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
1177 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
1178 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
1179 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
1180 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
1181 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
1182 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
1183 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
1184 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
1185 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
1186 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
1187 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
1188 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
1189 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
1190 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
1191 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
1192 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
1193 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
1194 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
1195 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
1196 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
1197 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
1198 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
1199 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
1200 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
1201 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
1202 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
1203 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
1204 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
1205 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
1206 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
1207 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
1208 descriptor_indexing_features->descriptorBindingPartiallyBound;
1209 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
1210 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
1211 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001212 }
1213
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001214 const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001215 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001216 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001217 }
1218
1219 const auto *imageless_framebuffer_features =
1220 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
1221 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001222 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001223 }
1224
1225 const auto *uniform_buffer_standard_layout_features =
1226 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
1227 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001228 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
1229 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001230 }
1231
1232 const auto *subgroup_extended_types_features =
1233 lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
1234 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001235 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
1236 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001237 }
1238
1239 const auto *separate_depth_stencil_layouts_features =
1240 lvl_find_in_chain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
1241 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001242 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
1243 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001244 }
1245
1246 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
1247 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001248 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001249 }
1250
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001251 const auto *timeline_semaphore_features = lvl_find_in_chain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001252 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001253 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001254 }
1255
1256 const auto *buffer_device_address = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
1257 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001258 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
1259 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1260 buffer_device_address->bufferDeviceAddressCaptureReplay;
1261 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1262 buffer_device_address->bufferDeviceAddressMultiDevice;
1263 }
1264 }
1265
1266 const auto *vulkan_11_features = lvl_find_in_chain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
1267 if (vulkan_11_features) {
1268 state_tracker->enabled_features.core11 = *vulkan_11_features;
1269 } else {
1270 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
1271
1272 const auto *sixteen_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
1273 if (sixteen_bit_storage_features) {
1274 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
1275 sixteen_bit_storage_features->storageBuffer16BitAccess;
1276 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
1277 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
1278 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
1279 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
1280 }
1281
1282 const auto *multiview_features = lvl_find_in_chain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
1283 if (multiview_features) {
1284 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1285 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1286 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1287 }
1288
1289 const auto *variable_pointers_features = lvl_find_in_chain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
1290 if (variable_pointers_features) {
1291 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1292 variable_pointers_features->variablePointersStorageBuffer;
1293 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1294 }
1295
1296 const auto *protected_memory_features = lvl_find_in_chain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
1297 if (protected_memory_features) {
1298 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1299 }
1300
1301 const auto *ycbcr_conversion_features =
1302 lvl_find_in_chain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
1303 if (ycbcr_conversion_features) {
1304 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1305 }
1306
1307 const auto *shader_draw_parameters_features =
1308 lvl_find_in_chain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
1309 if (shader_draw_parameters_features) {
1310 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001311 }
1312 }
1313
locke-lunargd556cc32019-09-17 01:21:23 -06001314 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
1315 state_tracker->physical_device_count =
1316 device_group_ci && device_group_ci->physicalDeviceCount > 0 ? device_group_ci->physicalDeviceCount : 1;
1317
locke-lunargd556cc32019-09-17 01:21:23 -06001318 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
1319 if (exclusive_scissor_features) {
1320 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1321 }
1322
1323 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1324 if (shading_rate_image_features) {
1325 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1326 }
1327
1328 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1329 if (mesh_shader_features) {
1330 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1331 }
1332
1333 const auto *inline_uniform_block_features =
1334 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1335 if (inline_uniform_block_features) {
1336 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1337 }
1338
1339 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1340 if (transform_feedback_features) {
1341 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1342 }
1343
locke-lunargd556cc32019-09-17 01:21:23 -06001344 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1345 if (vtx_attrib_div_features) {
1346 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1347 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001348
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001349 const auto *buffer_device_address_ext = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
1350 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001351 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001352 }
1353
1354 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1355 if (cooperative_matrix_features) {
1356 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1357 }
1358
locke-lunargd556cc32019-09-17 01:21:23 -06001359 const auto *compute_shader_derivatives_features =
1360 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1361 if (compute_shader_derivatives_features) {
1362 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1363 }
1364
1365 const auto *fragment_shader_barycentric_features =
1366 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1367 if (fragment_shader_barycentric_features) {
1368 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1369 }
1370
1371 const auto *shader_image_footprint_features =
1372 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1373 if (shader_image_footprint_features) {
1374 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1375 }
1376
1377 const auto *fragment_shader_interlock_features =
1378 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1379 if (fragment_shader_interlock_features) {
1380 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1381 }
1382
1383 const auto *demote_to_helper_invocation_features =
1384 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1385 if (demote_to_helper_invocation_features) {
1386 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1387 }
1388
1389 const auto *texel_buffer_alignment_features =
1390 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1391 if (texel_buffer_alignment_features) {
1392 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1393 }
1394
locke-lunargd556cc32019-09-17 01:21:23 -06001395 const auto *pipeline_exe_props_features =
1396 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1397 if (pipeline_exe_props_features) {
1398 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1399 }
1400
Jeff Bolz82f854d2019-09-17 14:56:47 -05001401 const auto *dedicated_allocation_image_aliasing_features =
1402 lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
1403 if (dedicated_allocation_image_aliasing_features) {
1404 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1405 *dedicated_allocation_image_aliasing_features;
1406 }
1407
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001408 const auto *performance_query_features = lvl_find_in_chain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
1409 if (performance_query_features) {
1410 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1411 }
1412
Tobias Hector782bcde2019-11-28 16:19:42 +00001413 const auto *device_coherent_memory_features = lvl_find_in_chain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
1414 if (device_coherent_memory_features) {
1415 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1416 }
1417
sfricke-samsungcead0802020-01-30 22:20:10 -08001418 const auto *ycbcr_image_array_features = lvl_find_in_chain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
1419 if (ycbcr_image_array_features) {
1420 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1421 }
1422
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001423 const auto *ray_tracing_features = lvl_find_in_chain<VkPhysicalDeviceRayTracingFeaturesKHR>(pCreateInfo->pNext);
1424 if (ray_tracing_features) {
1425 state_tracker->enabled_features.ray_tracing_features = *ray_tracing_features;
1426 }
1427
Jeff Bolz165818a2020-05-08 11:19:03 -05001428 const auto *robustness2_features = lvl_find_in_chain<VkPhysicalDeviceRobustness2FeaturesEXT>(pCreateInfo->pNext);
1429 if (robustness2_features) {
1430 state_tracker->enabled_features.robustness2_features = *robustness2_features;
1431 }
1432
janharaldfredriksen-arm3b793772020-05-12 18:55:53 +02001433 const auto *fragment_density_map_features =
1434 lvl_find_in_chain<VkPhysicalDeviceFragmentDensityMapFeaturesEXT>(pCreateInfo->pNext);
1435 if (fragment_density_map_features) {
1436 state_tracker->enabled_features.fragment_density_map_features = *fragment_density_map_features;
1437 }
1438
Tony-LunarG7337b312020-04-15 16:40:25 -06001439 const auto *custom_border_color_features = lvl_find_in_chain<VkPhysicalDeviceCustomBorderColorFeaturesEXT>(pCreateInfo->pNext);
1440 if (custom_border_color_features) {
1441 state_tracker->enabled_features.custom_border_color_features = *custom_border_color_features;
1442 }
1443
sfricke-samsungfd661d62020-05-16 00:57:27 -07001444 const auto *pipeline_creation_cache_control_features =
1445 lvl_find_in_chain<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT>(pCreateInfo->pNext);
1446 if (pipeline_creation_cache_control_features) {
1447 state_tracker->enabled_features.pipeline_creation_cache_control_features = *pipeline_creation_cache_control_features;
1448 }
1449
locke-lunargd556cc32019-09-17 01:21:23 -06001450 // Store physical device properties and physical device mem limits into CoreChecks structs
1451 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1452 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001453 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1454 &state_tracker->phys_dev_props_core11);
1455 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1456 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001457
1458 const auto &dev_ext = state_tracker->device_extensions;
1459 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1460
1461 if (dev_ext.vk_khr_push_descriptor) {
1462 // Get the needed push_descriptor limits
1463 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1464 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1465 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1466 }
1467
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001468 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
1469 VkPhysicalDeviceDescriptorIndexingPropertiesEXT descriptor_indexing_prop;
1470 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1471 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1472 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1473 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1474 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1475 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1476 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1477 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1478 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1479 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1480 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1481 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1482 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1483 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1484 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1485 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1486 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1487 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1488 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1489 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1490 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1491 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1492 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1493 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1494 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1495 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1496 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
1497 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1498 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
1499 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
1500 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
1501 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
1502 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
1503 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
1504 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1505 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1506 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
1507 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
1508 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1509 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
1510 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
1511 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
1512 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
1513 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
1514 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
1515 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
1516 }
1517
locke-lunargd556cc32019-09-17 01:21:23 -06001518 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1519 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1520 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1521 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001522
1523 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
1524 VkPhysicalDeviceDepthStencilResolvePropertiesKHR depth_stencil_resolve_props;
1525 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
1526 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
1527 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
1528 depth_stencil_resolve_props.supportedStencilResolveModes;
1529 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
1530 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
1531 }
1532
locke-lunargd556cc32019-09-17 01:21:23 -06001533 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001534 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
1535 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing, &phys_dev_props->ray_tracing_propsKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06001536 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1537 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001538 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
sfricke-samsung8f658d42020-05-03 20:12:24 -07001539 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_sample_locations, &phys_dev_props->sample_locations_props);
Tony-LunarG7337b312020-04-15 16:40:25 -06001540 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_custom_border_color, &phys_dev_props->custom_border_color_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001541
1542 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
1543 VkPhysicalDeviceTimelineSemaphorePropertiesKHR timeline_semaphore_props;
1544 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
1545 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
1546 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
1547 }
1548
1549 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
1550 VkPhysicalDeviceFloatControlsPropertiesKHR float_controls_props;
1551 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
1552 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
1553 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
1554 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
1555 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
1556 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
1557 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
1558 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
1559 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
1560 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
1561 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
1562 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
1563 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
1564 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
1565 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
1566 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
1567 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
1568 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
1569 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
1570 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
1571 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
1572 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07001573
locke-lunargd556cc32019-09-17 01:21:23 -06001574 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1575 // Get the needed cooperative_matrix properties
1576 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1577 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
1578 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1579 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1580
1581 uint32_t numCooperativeMatrixProperties = 0;
1582 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
1583 state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
1584 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
1585
1586 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
1587 state_tracker->cooperative_matrix_properties.data());
1588 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001589 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06001590 // Get the needed subgroup limits
1591 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
1592 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
1593 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1594
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001595 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
1596 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
1597 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
1598 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06001599 }
1600
1601 // Store queue family data
1602 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1603 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001604 const VkDeviceQueueCreateInfo &queue_create_info = pCreateInfo->pQueueCreateInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06001605 state_tracker->queue_family_index_map.insert(
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001606 std::make_pair(queue_create_info.queueFamilyIndex, queue_create_info.queueCount));
1607 state_tracker->queue_family_create_flags_map.insert(
1608 std::make_pair(queue_create_info.queueFamilyIndex, queue_create_info.flags));
locke-lunargd556cc32019-09-17 01:21:23 -06001609 }
1610 }
1611}
1612
1613void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1614 if (!device) return;
1615
locke-lunargd556cc32019-09-17 01:21:23 -06001616 // Reset all command buffers before destroying them, to unlink object_bindings.
1617 for (auto &commandBuffer : commandBufferMap) {
1618 ResetCommandBufferState(commandBuffer.first);
1619 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001620 pipelineMap.clear();
1621 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001622 commandBufferMap.clear();
1623
1624 // This will also delete all sets in the pool & remove them from setMap
1625 DeleteDescriptorSetPools();
1626 // All sets should be removed
1627 assert(setMap.empty());
1628 descriptorSetLayoutMap.clear();
1629 imageViewMap.clear();
1630 imageMap.clear();
1631 bufferViewMap.clear();
1632 bufferMap.clear();
1633 // Queues persist until device is destroyed
1634 queueMap.clear();
1635}
1636
1637// Loop through bound objects and increment their in_use counts.
1638void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
1639 for (auto obj : cb_node->object_bindings) {
1640 auto base_obj = GetStateStructPtrFromObject(obj);
1641 if (base_obj) {
1642 base_obj->in_use.fetch_add(1);
1643 }
1644 }
1645}
1646
1647// Track which resources are in-flight by atomically incrementing their "in_use" count
1648void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1649 cb_node->submitCount++;
1650 cb_node->in_use.fetch_add(1);
1651
1652 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
1653 IncrementBoundObjects(cb_node);
1654 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1655 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1656 // should then be flagged prior to calling this function
1657 for (auto event : cb_node->writeEventsBeforeWait) {
1658 auto event_state = GetEventState(event);
1659 if (event_state) event_state->write_in_use++;
1660 }
1661}
1662
1663// Decrement in-use count for objects bound to command buffer
1664void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
1665 BASE_NODE *base_obj = nullptr;
1666 for (auto obj : cb_node->object_bindings) {
1667 base_obj = GetStateStructPtrFromObject(obj);
1668 if (base_obj) {
1669 base_obj->in_use.fetch_sub(1);
1670 }
1671 }
1672}
1673
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001674void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
locke-lunargd556cc32019-09-17 01:21:23 -06001675 std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
1676
1677 // Roll this queue forward, one submission at a time.
1678 while (pQueue->seq < seq) {
1679 auto &submission = pQueue->submissions.front();
1680
1681 for (auto &wait : submission.waitSemaphores) {
1682 auto pSemaphore = GetSemaphoreState(wait.semaphore);
1683 if (pSemaphore) {
1684 pSemaphore->in_use.fetch_sub(1);
1685 }
1686 auto &lastSeq = otherQueueSeqs[wait.queue];
1687 lastSeq = std::max(lastSeq, wait.seq);
1688 }
1689
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001690 for (auto &signal : submission.signalSemaphores) {
1691 auto pSemaphore = GetSemaphoreState(signal.semaphore);
locke-lunargd556cc32019-09-17 01:21:23 -06001692 if (pSemaphore) {
1693 pSemaphore->in_use.fetch_sub(1);
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001694 if (pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && pSemaphore->payload < signal.payload) {
1695 pSemaphore->payload = signal.payload;
1696 }
locke-lunargd556cc32019-09-17 01:21:23 -06001697 }
1698 }
1699
1700 for (auto &semaphore : submission.externalSemaphores) {
1701 auto pSemaphore = GetSemaphoreState(semaphore);
1702 if (pSemaphore) {
1703 pSemaphore->in_use.fetch_sub(1);
1704 }
1705 }
1706
1707 for (auto cb : submission.cbs) {
1708 auto cb_node = GetCBState(cb);
1709 if (!cb_node) {
1710 continue;
1711 }
1712 // First perform decrement on general case bound objects
1713 DecrementBoundResources(cb_node);
1714 for (auto event : cb_node->writeEventsBeforeWait) {
1715 auto eventNode = eventMap.find(event);
1716 if (eventNode != eventMap.end()) {
1717 eventNode->second.write_in_use--;
1718 }
1719 }
Jeff Bolz310775c2019-10-09 00:46:33 -05001720 QueryMap localQueryToStateMap;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001721 VkQueryPool first_pool = VK_NULL_HANDLE;
Jeff Bolz310775c2019-10-09 00:46:33 -05001722 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001723 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &localQueryToStateMap);
Jeff Bolz310775c2019-10-09 00:46:33 -05001724 }
1725
1726 for (auto queryStatePair : localQueryToStateMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001727 if (queryStatePair.second == QUERYSTATE_ENDED) {
1728 queryToStateMap[queryStatePair.first] = QUERYSTATE_AVAILABLE;
1729 }
locke-lunargd556cc32019-09-17 01:21:23 -06001730 }
locke-lunargd556cc32019-09-17 01:21:23 -06001731 cb_node->in_use.fetch_sub(1);
1732 }
1733
1734 auto pFence = GetFenceState(submission.fence);
1735 if (pFence && pFence->scope == kSyncScopeInternal) {
1736 pFence->state = FENCE_RETIRED;
1737 }
1738
1739 pQueue->submissions.pop_front();
1740 pQueue->seq++;
1741 }
1742
1743 // Roll other queues forward to the highest seq we saw a wait for
1744 for (auto qs : otherQueueSeqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001745 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001746 }
1747}
1748
1749// Submit a fence to a queue, delimiting previous fences and previous untracked
1750// work by it.
1751static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1752 pFence->state = FENCE_INFLIGHT;
1753 pFence->signaler.first = pQueue->queue;
1754 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1755}
1756
1757void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1758 VkFence fence, VkResult result) {
Mark Lobodzinski09379db2020-05-07 08:22:01 -06001759 if (result != VK_SUCCESS) return;
locke-lunargd556cc32019-09-17 01:21:23 -06001760 uint64_t early_retire_seq = 0;
1761 auto pQueue = GetQueueState(queue);
1762 auto pFence = GetFenceState(fence);
1763
1764 if (pFence) {
1765 if (pFence->scope == kSyncScopeInternal) {
1766 // Mark fence in use
1767 SubmitFence(pQueue, pFence, std::max(1u, submitCount));
1768 if (!submitCount) {
1769 // If no submissions, but just dropping a fence on the end of the queue,
1770 // record an empty submission with just the fence, so we can determine
1771 // its completion.
1772 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001773 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001774 }
1775 } else {
1776 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1777 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1778 }
1779 }
1780
1781 // Now process each individual submit
1782 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1783 std::vector<VkCommandBuffer> cbs;
1784 const VkSubmitInfo *submit = &pSubmits[submit_idx];
1785 vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001786 vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06001787 vector<VkSemaphore> semaphore_externals;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001788 const uint64_t next_seq = pQueue->seq + pQueue->submissions.size() + 1;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001789 auto *timeline_semaphore_submit = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001790 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
1791 VkSemaphore semaphore = submit->pWaitSemaphores[i];
1792 auto pSemaphore = GetSemaphoreState(semaphore);
1793 if (pSemaphore) {
1794 if (pSemaphore->scope == kSyncScopeInternal) {
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001795 SEMAPHORE_WAIT wait;
1796 wait.semaphore = semaphore;
1797 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
1798 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1799 wait.queue = pSemaphore->signaler.first;
1800 wait.seq = pSemaphore->signaler.second;
1801 semaphore_waits.push_back(wait);
1802 pSemaphore->in_use.fetch_add(1);
1803 }
1804 pSemaphore->signaler.first = VK_NULL_HANDLE;
1805 pSemaphore->signaled = false;
1806 } else if (pSemaphore->payload < timeline_semaphore_submit->pWaitSemaphoreValues[i]) {
1807 wait.queue = queue;
1808 wait.seq = next_seq;
1809 wait.payload = timeline_semaphore_submit->pWaitSemaphoreValues[i];
1810 semaphore_waits.push_back(wait);
locke-lunargd556cc32019-09-17 01:21:23 -06001811 pSemaphore->in_use.fetch_add(1);
1812 }
locke-lunargd556cc32019-09-17 01:21:23 -06001813 } else {
1814 semaphore_externals.push_back(semaphore);
1815 pSemaphore->in_use.fetch_add(1);
1816 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1817 pSemaphore->scope = kSyncScopeInternal;
1818 }
1819 }
1820 }
1821 }
1822 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
1823 VkSemaphore semaphore = submit->pSignalSemaphores[i];
1824 auto pSemaphore = GetSemaphoreState(semaphore);
1825 if (pSemaphore) {
1826 if (pSemaphore->scope == kSyncScopeInternal) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001827 SEMAPHORE_SIGNAL signal;
1828 signal.semaphore = semaphore;
1829 signal.seq = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001830 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
1831 pSemaphore->signaler.first = queue;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001832 pSemaphore->signaler.second = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001833 pSemaphore->signaled = true;
1834 } else {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001835 signal.payload = timeline_semaphore_submit->pSignalSemaphoreValues[i];
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001836 }
locke-lunargd556cc32019-09-17 01:21:23 -06001837 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001838 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06001839 } else {
1840 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001841 early_retire_seq = std::max(early_retire_seq, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001842 }
1843 }
1844 }
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001845 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
1846 uint32_t perf_pass = perf_submit ? perf_submit->counterPassIndex : 0;
1847
locke-lunargd556cc32019-09-17 01:21:23 -06001848 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
1849 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
1850 if (cb_node) {
1851 cbs.push_back(submit->pCommandBuffers[i]);
1852 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
1853 cbs.push_back(secondaryCmdBuffer->commandBuffer);
1854 IncrementResources(secondaryCmdBuffer);
1855 }
1856 IncrementResources(cb_node);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001857
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001858 VkQueryPool first_pool = VK_NULL_HANDLE;
1859 EventToStageMap localEventToStageMap;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001860 QueryMap localQueryToStateMap;
1861 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001862 function(nullptr, /*do_validate*/ false, first_pool, perf_pass, &localQueryToStateMap);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001863 }
1864
1865 for (auto queryStatePair : localQueryToStateMap) {
1866 queryToStateMap[queryStatePair.first] = queryStatePair.second;
1867 }
1868
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001869 for (auto &function : cb_node->eventUpdates) {
1870 function(nullptr, /*do_validate*/ false, &localEventToStageMap);
1871 }
1872
1873 for (auto eventStagePair : localEventToStageMap) {
1874 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
1875 }
locke-lunargd556cc32019-09-17 01:21:23 -06001876 }
1877 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001878
locke-lunargd556cc32019-09-17 01:21:23 -06001879 pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001880 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, perf_pass);
locke-lunargd556cc32019-09-17 01:21:23 -06001881 }
1882
1883 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001884 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001885 }
1886}
1887
1888void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1889 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1890 VkResult result) {
1891 if (VK_SUCCESS == result) {
1892 AddMemObjInfo(device, *pMemory, pAllocateInfo);
1893 }
1894 return;
1895}
1896
1897void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1898 if (!mem) return;
1899 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
1900 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
1901
1902 // Clear mem binding for any bound objects
1903 for (const auto &obj : mem_info->obj_bindings) {
1904 BINDABLE *bindable_state = nullptr;
1905 switch (obj.type) {
1906 case kVulkanObjectTypeImage:
1907 bindable_state = GetImageState(obj.Cast<VkImage>());
1908 break;
1909 case kVulkanObjectTypeBuffer:
1910 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
1911 break;
1912 case kVulkanObjectTypeAccelerationStructureNV:
1913 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
1914 break;
1915
1916 default:
1917 // Should only have acceleration structure, buffer, or image objects bound to memory
1918 assert(0);
1919 }
1920
1921 if (bindable_state) {
Jeff Bolz41e29052020-03-29 22:33:55 -05001922 // Remove any sparse bindings bound to the resource that use this memory.
1923 for (auto it = bindable_state->sparse_bindings.begin(); it != bindable_state->sparse_bindings.end();) {
1924 auto nextit = it;
1925 nextit++;
1926
1927 auto &sparse_mem_binding = *it;
1928 if (sparse_mem_binding.mem_state.get() == mem_info) {
1929 bindable_state->sparse_bindings.erase(it);
1930 }
1931
1932 it = nextit;
1933 }
locke-lunargd556cc32019-09-17 01:21:23 -06001934 bindable_state->UpdateBoundMemorySet();
1935 }
1936 }
1937 // Any bound cmd buffers are now invalid
1938 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
1939 RemoveAliasingImages(mem_info->bound_images);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001940 mem_info->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001941 memObjMap.erase(mem);
1942}
1943
1944void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
1945 VkFence fence, VkResult result) {
1946 if (result != VK_SUCCESS) return;
1947 uint64_t early_retire_seq = 0;
1948 auto pFence = GetFenceState(fence);
1949 auto pQueue = GetQueueState(queue);
1950
1951 if (pFence) {
1952 if (pFence->scope == kSyncScopeInternal) {
1953 SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
1954 if (!bindInfoCount) {
1955 // No work to do, just dropping a fence in the queue by itself.
1956 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001957 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001958 }
1959 } else {
1960 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1961 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1962 }
1963 }
1964
1965 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
1966 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
1967 // Track objects tied to memory
1968 for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
1969 for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
1970 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07001971 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06001972 VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
1973 }
1974 }
1975 for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
1976 for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
1977 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07001978 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06001979 VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
1980 }
1981 }
1982 for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
1983 for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
1984 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
1985 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
1986 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
locke-lunargcf04d582019-11-26 00:31:50 -07001987 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, size,
locke-lunargd556cc32019-09-17 01:21:23 -06001988 VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
1989 }
1990 }
1991
1992 std::vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001993 std::vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06001994 std::vector<VkSemaphore> semaphore_externals;
1995 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
1996 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
1997 auto pSemaphore = GetSemaphoreState(semaphore);
1998 if (pSemaphore) {
1999 if (pSemaphore->scope == kSyncScopeInternal) {
2000 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
2001 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
2002 pSemaphore->in_use.fetch_add(1);
2003 }
2004 pSemaphore->signaler.first = VK_NULL_HANDLE;
2005 pSemaphore->signaled = false;
2006 } else {
2007 semaphore_externals.push_back(semaphore);
2008 pSemaphore->in_use.fetch_add(1);
2009 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
2010 pSemaphore->scope = kSyncScopeInternal;
2011 }
2012 }
2013 }
2014 }
2015 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
2016 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
2017 auto pSemaphore = GetSemaphoreState(semaphore);
2018 if (pSemaphore) {
2019 if (pSemaphore->scope == kSyncScopeInternal) {
2020 pSemaphore->signaler.first = queue;
2021 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
2022 pSemaphore->signaled = true;
2023 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002024
2025 SEMAPHORE_SIGNAL signal;
2026 signal.semaphore = semaphore;
2027 signal.seq = pSemaphore->signaler.second;
2028 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06002029 } else {
2030 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
2031 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
2032 }
2033 }
2034 }
2035
2036 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002037 bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002038 }
2039
2040 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002041 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002042 }
2043}
2044
2045void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
2046 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
2047 VkResult result) {
2048 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002049 auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002050 semaphore_state->signaler.first = VK_NULL_HANDLE;
2051 semaphore_state->signaler.second = 0;
2052 semaphore_state->signaled = false;
2053 semaphore_state->scope = kSyncScopeInternal;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002054 semaphore_state->type = VK_SEMAPHORE_TYPE_BINARY_KHR;
2055 semaphore_state->payload = 0;
2056 auto semaphore_type_create_info = lvl_find_in_chain<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo->pNext);
2057 if (semaphore_type_create_info) {
2058 semaphore_state->type = semaphore_type_create_info->semaphoreType;
2059 semaphore_state->payload = semaphore_type_create_info->initialValue;
2060 }
locke-lunargd556cc32019-09-17 01:21:23 -06002061 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
2062}
2063
2064void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
2065 VkSemaphoreImportFlagsKHR flags) {
2066 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
2067 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
2068 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
2069 sema_node->scope == kSyncScopeInternal) {
2070 sema_node->scope = kSyncScopeExternalTemporary;
2071 } else {
2072 sema_node->scope = kSyncScopeExternalPermanent;
2073 }
2074 }
2075}
2076
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002077void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR *pSignalInfo,
2078 VkResult result) {
2079 auto *pSemaphore = GetSemaphoreState(pSignalInfo->semaphore);
2080 pSemaphore->payload = pSignalInfo->value;
2081}
2082
locke-lunargd556cc32019-09-17 01:21:23 -06002083void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
2084 auto mem_info = GetDevMemState(mem);
2085 if (mem_info) {
2086 mem_info->mapped_range.offset = offset;
2087 mem_info->mapped_range.size = size;
2088 mem_info->p_driver_data = *ppData;
2089 }
2090}
2091
2092void ValidationStateTracker::RetireFence(VkFence fence) {
2093 auto pFence = GetFenceState(fence);
2094 if (pFence && pFence->scope == kSyncScopeInternal) {
2095 if (pFence->signaler.first != VK_NULL_HANDLE) {
2096 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002097 RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002098 } else {
2099 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
2100 // the fence as retired.
2101 pFence->state = FENCE_RETIRED;
2102 }
2103 }
2104}
2105
2106void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2107 VkBool32 waitAll, uint64_t timeout, VkResult result) {
2108 if (VK_SUCCESS != result) return;
2109
2110 // When we know that all fences are complete we can clean/remove their CBs
2111 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
2112 for (uint32_t i = 0; i < fenceCount; i++) {
2113 RetireFence(pFences[i]);
2114 }
2115 }
2116 // NOTE : Alternate case not handled here is when some fences have completed. In
2117 // this case for app to guarantee which fences completed it will have to call
2118 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
2119}
2120
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002121void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
2122 auto pSemaphore = GetSemaphoreState(semaphore);
2123 if (pSemaphore) {
2124 for (auto &pair : queueMap) {
2125 QUEUE_STATE &queueState = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002126 uint64_t max_seq = 0;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002127 for (const auto &submission : queueState.submissions) {
2128 for (const auto &signalSemaphore : submission.signalSemaphores) {
2129 if (signalSemaphore.semaphore == semaphore && signalSemaphore.payload <= until_payload) {
Tony-LunarG47d5e272020-04-07 15:35:55 -06002130 if (signalSemaphore.seq > max_seq) {
2131 max_seq = signalSemaphore.seq;
2132 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002133 }
2134 }
2135 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06002136 if (max_seq) {
2137 RetireWorkOnQueue(&queueState, max_seq);
2138 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002139 }
2140 }
2141}
2142
John Zulauff89de662020-04-13 18:57:34 -06002143void ValidationStateTracker::RecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2144 VkResult result) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002145 if (VK_SUCCESS != result) return;
2146
2147 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
2148 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
2149 }
2150}
2151
John Zulauff89de662020-04-13 18:57:34 -06002152void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2153 VkResult result) {
2154 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2155}
2156
2157void ValidationStateTracker::PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo,
2158 uint64_t timeout, VkResult result) {
2159 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2160}
2161
locke-lunargd556cc32019-09-17 01:21:23 -06002162void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
2163 if (VK_SUCCESS != result) return;
2164 RetireFence(fence);
2165}
2166
2167void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
2168 // Add queue to tracking set only if it is new
2169 auto queue_is_new = queues.emplace(queue);
2170 if (queue_is_new.second == true) {
2171 QUEUE_STATE *queue_state = &queueMap[queue];
2172 queue_state->queue = queue;
2173 queue_state->queueFamilyIndex = queue_family_index;
2174 queue_state->seq = 0;
2175 }
2176}
2177
2178void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2179 VkQueue *pQueue) {
2180 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2181}
2182
2183void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2184 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2185}
2186
2187void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2188 if (VK_SUCCESS != result) return;
2189 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002190 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002191}
2192
2193void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2194 if (VK_SUCCESS != result) return;
2195 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002196 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002197 }
2198}
2199
2200void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2201 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002202 auto fence_state = GetFenceState(fence);
2203 fence_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002204 fenceMap.erase(fence);
2205}
2206
2207void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2208 const VkAllocationCallbacks *pAllocator) {
2209 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002210 auto semaphore_state = GetSemaphoreState(semaphore);
2211 semaphore_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002212 semaphoreMap.erase(semaphore);
2213}
2214
2215void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2216 if (!event) return;
2217 EVENT_STATE *event_state = GetEventState(event);
2218 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
2219 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
2220 eventMap.erase(event);
2221}
2222
2223void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2224 const VkAllocationCallbacks *pAllocator) {
2225 if (!queryPool) return;
2226 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
2227 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
2228 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002229 qp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002230 queryPoolMap.erase(queryPool);
2231}
2232
2233// Object with given handle is being bound to memory w/ given mem_info struct.
2234// Track the newly bound memory range with given memoryOffset
2235// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
2236// and non-linear range incorrectly overlap.
locke-lunargd556cc32019-09-17 01:21:23 -06002237void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002238 VkDeviceSize memoryOffset) {
locke-lunargd556cc32019-09-17 01:21:23 -06002239 if (typed_handle.type == kVulkanObjectTypeImage) {
2240 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
2241 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002242 mem_info->bound_buffers.insert(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002243 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002244 mem_info->bound_acceleration_structures.insert(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002245 } else {
2246 // Unsupported object type
2247 assert(false);
2248 }
2249}
2250
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002251void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2252 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002253}
2254
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002255void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2256 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002257}
2258
2259void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002260 VkDeviceSize mem_offset) {
2261 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002262}
2263
2264// This function will remove the handle-to-index mapping from the appropriate map.
2265static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
2266 if (typed_handle.type == kVulkanObjectTypeImage) {
2267 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
2268 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002269 mem_info->bound_buffers.erase(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002270 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002271 mem_info->bound_acceleration_structures.erase(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002272 } else {
2273 // Unsupported object type
2274 assert(false);
2275 }
2276}
2277
2278void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
2279 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
2280}
2281
2282void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
2283 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
2284}
2285
2286void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
2287 RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
2288}
2289
2290void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2291 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2292 if (buffer_state) {
2293 // Track bound memory range information
2294 auto mem_info = GetDevMemState(mem);
2295 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002296 InsertBufferMemoryRange(buffer, mem_info, memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06002297 }
2298 // Track objects tied to memory
2299 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
2300 }
2301}
2302
2303void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2304 VkDeviceSize memoryOffset, VkResult result) {
2305 if (VK_SUCCESS != result) return;
2306 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2307}
2308
2309void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
2310 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2311 for (uint32_t i = 0; i < bindInfoCount; i++) {
2312 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2313 }
2314}
2315
2316void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
2317 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2318 for (uint32_t i = 0; i < bindInfoCount; i++) {
2319 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2320 }
2321}
2322
Spencer Fricke6c127102020-04-16 06:25:20 -07002323void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer) {
locke-lunargd556cc32019-09-17 01:21:23 -06002324 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2325 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002326 buffer_state->memory_requirements_checked = true;
2327 }
2328}
2329
2330void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2331 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002332 RecordGetBufferMemoryRequirementsState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002333}
2334
2335void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
2336 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2337 VkMemoryRequirements2KHR *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002338 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002339}
2340
2341void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
2342 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2343 VkMemoryRequirements2KHR *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002344 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002345}
2346
Spencer Fricke6c127102020-04-16 06:25:20 -07002347void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002348 const VkImagePlaneMemoryRequirementsInfo *plane_info =
2349 (pInfo == nullptr) ? nullptr : lvl_find_in_chain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002350 IMAGE_STATE *image_state = GetImageState(image);
2351 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002352 if (plane_info != nullptr) {
2353 // Multi-plane image
2354 image_state->memory_requirements_checked = false; // Each image plane needs to be checked itself
2355 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
2356 image_state->plane0_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002357 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
2358 image_state->plane1_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002359 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
2360 image_state->plane2_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002361 }
2362 } else {
2363 // Single Plane image
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002364 image_state->memory_requirements_checked = true;
2365 }
locke-lunargd556cc32019-09-17 01:21:23 -06002366 }
2367}
2368
2369void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2370 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002371 RecordGetImageMemoryRequirementsState(image, nullptr);
locke-lunargd556cc32019-09-17 01:21:23 -06002372}
2373
2374void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2375 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002376 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002377}
2378
2379void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2380 const VkImageMemoryRequirementsInfo2 *pInfo,
2381 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002382 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002383}
2384
2385static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2386 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2387 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2388 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2389 image_state->sparse_metadata_required = true;
2390 }
2391}
2392
2393void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2394 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2395 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2396 auto image_state = GetImageState(image);
2397 image_state->get_sparse_reqs_called = true;
2398 if (!pSparseMemoryRequirements) return;
2399 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2400 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2401 }
2402}
2403
2404void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
2405 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2406 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2407 auto image_state = GetImageState(pInfo->image);
2408 image_state->get_sparse_reqs_called = true;
2409 if (!pSparseMemoryRequirements) return;
2410 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2411 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2412 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2413 }
2414}
2415
2416void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
2417 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2418 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2419 auto image_state = GetImageState(pInfo->image);
2420 image_state->get_sparse_reqs_called = true;
2421 if (!pSparseMemoryRequirements) return;
2422 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2423 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2424 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2425 }
2426}
2427
2428void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2429 const VkAllocationCallbacks *pAllocator) {
2430 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002431 auto shader_module_state = GetShaderModuleState(shaderModule);
2432 shader_module_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002433 shaderModuleMap.erase(shaderModule);
2434}
2435
2436void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2437 const VkAllocationCallbacks *pAllocator) {
2438 if (!pipeline) return;
2439 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
2440 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
2441 // Any bound cmd buffers are now invalid
2442 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002443 pipeline_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002444 pipelineMap.erase(pipeline);
2445}
2446
2447void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2448 const VkAllocationCallbacks *pAllocator) {
2449 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002450 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
2451 pipeline_layout_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002452 pipelineLayoutMap.erase(pipelineLayout);
2453}
2454
2455void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2456 const VkAllocationCallbacks *pAllocator) {
2457 if (!sampler) return;
2458 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
2459 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
2460 // Any bound cmd buffers are now invalid
2461 if (sampler_state) {
2462 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
Yuly Novikov424cdd52020-05-26 16:45:12 -04002463
2464 if (sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2465 sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
2466 custom_border_color_sampler_count--;
2467 }
2468
2469 sampler_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002470 }
2471 samplerMap.erase(sampler);
2472}
2473
2474void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2475 const VkAllocationCallbacks *pAllocator) {
2476 if (!descriptorSetLayout) return;
2477 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2478 if (layout_it != descriptorSetLayoutMap.end()) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002479 layout_it->second.get()->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002480 descriptorSetLayoutMap.erase(layout_it);
2481 }
2482}
2483
2484void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2485 const VkAllocationCallbacks *pAllocator) {
2486 if (!descriptorPool) return;
2487 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
2488 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
2489 if (desc_pool_state) {
2490 // Any bound cmd buffers are now invalid
2491 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
2492 // Free sets that were in this pool
2493 for (auto ds : desc_pool_state->sets) {
2494 FreeDescriptorSet(ds);
2495 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002496 desc_pool_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002497 descriptorPoolMap.erase(descriptorPool);
2498 }
2499}
2500
2501// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2502void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2503 const VkCommandBuffer *command_buffers) {
2504 for (uint32_t i = 0; i < command_buffer_count; i++) {
2505 auto cb_state = GetCBState(command_buffers[i]);
2506 // Remove references to command buffer's state and delete
2507 if (cb_state) {
2508 // reset prior to delete, removing various references to it.
2509 // TODO: fix this, it's insane.
2510 ResetCommandBufferState(cb_state->commandBuffer);
2511 // Remove the cb_state's references from COMMAND_POOL_STATEs
2512 pool_state->commandBuffers.erase(command_buffers[i]);
2513 // Remove the cb debug labels
2514 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
2515 // Remove CBState from CB map
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002516 cb_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002517 commandBufferMap.erase(cb_state->commandBuffer);
2518 }
2519 }
2520}
2521
2522void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2523 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
2524 auto pPool = GetCommandPoolState(commandPool);
2525 FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
2526}
2527
2528void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2529 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2530 VkResult result) {
2531 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002532 auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002533 cmd_pool_state->createFlags = pCreateInfo->flags;
2534 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
2535 commandPoolMap[*pCommandPool] = std::move(cmd_pool_state);
2536}
2537
2538void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2539 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2540 VkResult result) {
2541 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002542 auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002543 query_pool_state->createInfo = *pCreateInfo;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002544 query_pool_state->pool = *pQueryPool;
2545 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
2546 const auto *perf = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
2547 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
2548
2549 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2550 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2551 switch (counter.scope) {
2552 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
2553 query_pool_state->has_perf_scope_command_buffer = true;
2554 break;
2555 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
2556 query_pool_state->has_perf_scope_render_pass = true;
2557 break;
2558 default:
2559 break;
2560 }
2561 }
2562
2563 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
2564 &query_pool_state->n_performance_passes);
2565 }
2566
locke-lunargd556cc32019-09-17 01:21:23 -06002567 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
2568
2569 QueryObject query_obj{*pQueryPool, 0u};
2570 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2571 query_obj.query = i;
2572 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2573 }
2574}
2575
2576void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2577 const VkAllocationCallbacks *pAllocator) {
2578 if (!commandPool) return;
2579 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2580 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2581 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2582 if (cp_state) {
2583 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2584 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2585 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002586 cp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002587 commandPoolMap.erase(commandPool);
2588 }
2589}
2590
2591void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2592 VkCommandPoolResetFlags flags, VkResult result) {
2593 if (VK_SUCCESS != result) return;
2594 // Reset all of the CBs allocated from this pool
2595 auto command_pool_state = GetCommandPoolState(commandPool);
2596 for (auto cmdBuffer : command_pool_state->commandBuffers) {
2597 ResetCommandBufferState(cmdBuffer);
2598 }
2599}
2600
2601void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2602 VkResult result) {
2603 for (uint32_t i = 0; i < fenceCount; ++i) {
2604 auto pFence = GetFenceState(pFences[i]);
2605 if (pFence) {
2606 if (pFence->scope == kSyncScopeInternal) {
2607 pFence->state = FENCE_UNSIGNALED;
2608 } else if (pFence->scope == kSyncScopeExternalTemporary) {
2609 pFence->scope = kSyncScopeInternal;
2610 }
2611 }
2612 }
2613}
2614
Jeff Bolzadbfa852019-10-04 13:53:30 -05002615// For given cb_nodes, invalidate them and track object causing invalidation.
2616// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
2617// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
2618// can also unlink objects from command buffers.
2619void ValidationStateTracker::InvalidateCommandBuffers(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_nodes,
2620 const VulkanTypedHandle &obj, bool unlink) {
2621 for (const auto &cb_node_pair : cb_nodes) {
2622 auto &cb_node = cb_node_pair.first;
2623 if (cb_node->state == CB_RECORDING) {
2624 cb_node->state = CB_INVALID_INCOMPLETE;
2625 } else if (cb_node->state == CB_RECORDED) {
2626 cb_node->state = CB_INVALID_COMPLETE;
2627 }
2628 cb_node->broken_bindings.push_back(obj);
2629
2630 // if secondary, then propagate the invalidation to the primaries that will call us.
2631 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2632 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
2633 }
2634 if (unlink) {
2635 int index = cb_node_pair.second;
2636 assert(cb_node->object_bindings[index] == obj);
2637 cb_node->object_bindings[index] = VulkanTypedHandle();
2638 }
2639 }
2640 if (unlink) {
2641 cb_nodes.clear();
2642 }
2643}
2644
2645void ValidationStateTracker::InvalidateLinkedCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
2646 const VulkanTypedHandle &obj) {
locke-lunargd556cc32019-09-17 01:21:23 -06002647 for (auto cb_node : cb_nodes) {
2648 if (cb_node->state == CB_RECORDING) {
2649 cb_node->state = CB_INVALID_INCOMPLETE;
2650 } else if (cb_node->state == CB_RECORDED) {
2651 cb_node->state = CB_INVALID_COMPLETE;
2652 }
2653 cb_node->broken_bindings.push_back(obj);
2654
2655 // if secondary, then propagate the invalidation to the primaries that will call us.
2656 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002657 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
locke-lunargd556cc32019-09-17 01:21:23 -06002658 }
2659 }
2660}
2661
2662void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2663 const VkAllocationCallbacks *pAllocator) {
2664 if (!framebuffer) return;
2665 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
2666 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
2667 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002668 framebuffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002669 frameBufferMap.erase(framebuffer);
2670}
2671
2672void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2673 const VkAllocationCallbacks *pAllocator) {
2674 if (!renderPass) return;
2675 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
2676 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
2677 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002678 rp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002679 renderPassMap.erase(renderPass);
2680}
2681
2682void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2683 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2684 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002685 auto fence_state = std::make_shared<FENCE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002686 fence_state->fence = *pFence;
2687 fence_state->createInfo = *pCreateInfo;
2688 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
2689 fenceMap[*pFence] = std::move(fence_state);
2690}
2691
2692bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2693 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2694 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002695 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002696 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2697 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2698 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2699 cgpl_state->pipe_state.reserve(count);
2700 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002701 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002702 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002703 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002704 }
2705 return false;
2706}
2707
2708void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2709 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2710 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2711 VkResult result, void *cgpl_state_data) {
2712 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2713 // This API may create pipelines regardless of the return value
2714 for (uint32_t i = 0; i < count; i++) {
2715 if (pPipelines[i] != VK_NULL_HANDLE) {
2716 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2717 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2718 }
2719 }
2720 cgpl_state->pipe_state.clear();
2721}
2722
2723bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2724 const VkComputePipelineCreateInfo *pCreateInfos,
2725 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002726 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002727 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2728 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2729 ccpl_state->pipe_state.reserve(count);
2730 for (uint32_t i = 0; i < count; i++) {
2731 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002732 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002733 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002734 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002735 }
2736 return false;
2737}
2738
2739void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2740 const VkComputePipelineCreateInfo *pCreateInfos,
2741 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2742 VkResult result, void *ccpl_state_data) {
2743 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2744
2745 // This API may create pipelines regardless of the return value
2746 for (uint32_t i = 0; i < count; i++) {
2747 if (pPipelines[i] != VK_NULL_HANDLE) {
2748 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2749 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2750 }
2751 }
2752 ccpl_state->pipe_state.clear();
2753}
2754
2755bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2756 uint32_t count,
2757 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2758 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002759 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002760 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2761 crtpl_state->pipe_state.reserve(count);
2762 for (uint32_t i = 0; i < count; i++) {
2763 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002764 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002765 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002766 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002767 }
2768 return false;
2769}
2770
2771void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2772 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2773 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2774 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2775 // This API may create pipelines regardless of the return value
2776 for (uint32_t i = 0; i < count; i++) {
2777 if (pPipelines[i] != VK_NULL_HANDLE) {
2778 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2779 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2780 }
2781 }
2782 crtpl_state->pipe_state.clear();
2783}
2784
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002785bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkPipelineCache pipelineCache,
2786 uint32_t count,
2787 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2788 const VkAllocationCallbacks *pAllocator,
2789 VkPipeline *pPipelines, void *crtpl_state_data) const {
2790 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2791 crtpl_state->pipe_state.reserve(count);
2792 for (uint32_t i = 0; i < count; i++) {
2793 // Create and initialize internal tracking data structure
2794 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
2795 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
2796 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
2797 }
2798 return false;
2799}
2800
2801void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(
2802 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2803 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2804 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2805 // This API may create pipelines regardless of the return value
2806 for (uint32_t i = 0; i < count; i++) {
2807 if (pPipelines[i] != VK_NULL_HANDLE) {
2808 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2809 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2810 }
2811 }
2812 crtpl_state->pipe_state.clear();
2813}
2814
locke-lunargd556cc32019-09-17 01:21:23 -06002815void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2816 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2817 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002818 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
Tony-LunarG7337b312020-04-15 16:40:25 -06002819 if (pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT || pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT)
2820 custom_border_color_sampler_count++;
locke-lunargd556cc32019-09-17 01:21:23 -06002821}
2822
2823void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2824 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2825 const VkAllocationCallbacks *pAllocator,
2826 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2827 if (VK_SUCCESS != result) return;
2828 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2829}
2830
2831// For repeatable sorting, not very useful for "memory in range" search
2832struct PushConstantRangeCompare {
2833 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2834 if (lhs->offset == rhs->offset) {
2835 if (lhs->size == rhs->size) {
2836 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2837 return lhs->stageFlags < rhs->stageFlags;
2838 }
2839 // If the offsets are the same then sorting by the end of range is useful for validation
2840 return lhs->size < rhs->size;
2841 }
2842 return lhs->offset < rhs->offset;
2843 }
2844};
2845
2846static PushConstantRangesDict push_constant_ranges_dict;
2847
2848PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2849 if (!info->pPushConstantRanges) {
2850 // Hand back the empty entry (creating as needed)...
2851 return push_constant_ranges_dict.look_up(PushConstantRanges());
2852 }
2853
2854 // Sort the input ranges to ensure equivalent ranges map to the same id
2855 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2856 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2857 sorted.insert(info->pPushConstantRanges + i);
2858 }
2859
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07002860 PushConstantRanges ranges;
2861 ranges.reserve(sorted.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002862 for (const auto range : sorted) {
2863 ranges.emplace_back(*range);
2864 }
2865 return push_constant_ranges_dict.look_up(std::move(ranges));
2866}
2867
2868// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2869static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2870
2871// Dictionary of canonical form of the "compatible for set" records
2872static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2873
2874static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2875 const PipelineLayoutSetLayoutsId set_layouts_id) {
2876 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2877}
2878
2879void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2880 const VkAllocationCallbacks *pAllocator,
2881 VkPipelineLayout *pPipelineLayout, VkResult result) {
2882 if (VK_SUCCESS != result) return;
2883
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002884 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002885 pipeline_layout_state->layout = *pPipelineLayout;
2886 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2887 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2888 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002889 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002890 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2891 }
2892
2893 // Get canonical form IDs for the "compatible for set" contents
2894 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
2895 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
2896 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
2897
2898 // Create table of "compatible for set N" cannonical forms for trivial accept validation
2899 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2900 pipeline_layout_state->compat_for_set.emplace_back(
2901 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
2902 }
2903 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
2904}
2905
2906void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
2907 const VkAllocationCallbacks *pAllocator,
2908 VkDescriptorPool *pDescriptorPool, VkResult result) {
2909 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002910 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002911}
2912
2913void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2914 VkDescriptorPoolResetFlags flags, VkResult result) {
2915 if (VK_SUCCESS != result) return;
2916 DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
2917 // TODO: validate flags
2918 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
2919 for (auto ds : pPool->sets) {
2920 FreeDescriptorSet(ds);
2921 }
2922 pPool->sets.clear();
2923 // Reset available count for each type and available sets for this pool
2924 for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
2925 pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
2926 }
2927 pPool->availableSets = pPool->maxSets;
2928}
2929
2930bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
2931 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002932 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002933 // Always update common data
2934 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2935 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2936 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
2937
2938 return false;
2939}
2940
2941// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
2942void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
2943 VkDescriptorSet *pDescriptorSets, VkResult result,
2944 void *ads_state_data) {
2945 if (VK_SUCCESS != result) return;
2946 // All the updates are contained in a single cvdescriptorset function
2947 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2948 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2949 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
2950}
2951
2952void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
2953 const VkDescriptorSet *pDescriptorSets) {
2954 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
2955 // Update available descriptor sets in pool
2956 pool_state->availableSets += count;
2957
2958 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
2959 for (uint32_t i = 0; i < count; ++i) {
2960 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
2961 auto descriptor_set = setMap[pDescriptorSets[i]].get();
2962 uint32_t type_index = 0, descriptor_count = 0;
2963 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
2964 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
2965 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
2966 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
2967 }
2968 FreeDescriptorSet(descriptor_set);
2969 pool_state->sets.erase(descriptor_set);
2970 }
2971 }
2972}
2973
2974void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
2975 const VkWriteDescriptorSet *pDescriptorWrites,
2976 uint32_t descriptorCopyCount,
2977 const VkCopyDescriptorSet *pDescriptorCopies) {
2978 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
2979 pDescriptorCopies);
2980}
2981
2982void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
2983 VkCommandBuffer *pCommandBuffer, VkResult result) {
2984 if (VK_SUCCESS != result) return;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002985 auto pPool = GetCommandPoolShared(pCreateInfo->commandPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002986 if (pPool) {
2987 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
2988 // Add command buffer to its commandPool map
2989 pPool->commandBuffers.insert(pCommandBuffer[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002990 auto pCB = std::make_shared<CMD_BUFFER_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002991 pCB->createInfo = *pCreateInfo;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002992 pCB->command_pool = pPool;
locke-lunargd556cc32019-09-17 01:21:23 -06002993 // Add command buffer to map
2994 commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
2995 ResetCommandBufferState(pCommandBuffer[i]);
2996 }
2997 }
2998}
2999
3000// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
3001void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003002 AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003003 cb_state);
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -07003004 // If imageless fb, skip fb binding
3005 if (fb_state->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003006 const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
3007 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003008 auto view_state = GetAttachmentImageViewState(cb_state, fb_state, attachment);
locke-lunargd556cc32019-09-17 01:21:23 -06003009 if (view_state) {
3010 AddCommandBufferBindingImageView(cb_state, view_state);
3011 }
3012 }
3013}
3014
3015void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
3016 const VkCommandBufferBeginInfo *pBeginInfo) {
3017 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3018 if (!cb_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003019 if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
3020 // Secondary Command Buffer
3021 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
3022 if (pInfo) {
3023 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
3024 assert(pInfo->renderPass);
3025 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
3026 if (framebuffer) {
3027 // Connect this framebuffer and its children to this cmdBuffer
3028 AddFramebufferBinding(cb_state, framebuffer);
3029 }
3030 }
3031 }
3032 }
3033 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
3034 ResetCommandBufferState(commandBuffer);
3035 }
3036 // Set updated state here in case implicit reset occurs above
3037 cb_state->state = CB_RECORDING;
3038 cb_state->beginInfo = *pBeginInfo;
3039 if (cb_state->beginInfo.pInheritanceInfo) {
3040 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
3041 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
3042 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
3043 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
3044 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
3045 cb_state->activeRenderPass = GetRenderPassState(cb_state->beginInfo.pInheritanceInfo->renderPass);
3046 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
3047 cb_state->activeFramebuffer = cb_state->beginInfo.pInheritanceInfo->framebuffer;
3048 cb_state->framebuffers.insert(cb_state->beginInfo.pInheritanceInfo->framebuffer);
3049 }
3050 }
3051
3052 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
3053 if (chained_device_group_struct) {
3054 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
3055 } else {
3056 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
3057 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003058
3059 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06003060}
3061
3062void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
3063 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3064 if (!cb_state) return;
3065 // Cached validation is specific to a specific recording of a specific command buffer.
3066 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
3067 descriptor_set->ClearCachedValidation(cb_state);
3068 }
3069 cb_state->validated_descriptor_sets.clear();
3070 if (VK_SUCCESS == result) {
3071 cb_state->state = CB_RECORDED;
3072 }
3073}
3074
3075void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
3076 VkResult result) {
3077 if (VK_SUCCESS == result) {
3078 ResetCommandBufferState(commandBuffer);
3079 }
3080}
3081
3082CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
3083 // initially assume everything is static state
3084 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
3085
3086 if (ds) {
3087 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
3088 switch (ds->pDynamicStates[i]) {
3089 case VK_DYNAMIC_STATE_LINE_WIDTH:
3090 flags &= ~CBSTATUS_LINE_WIDTH_SET;
3091 break;
3092 case VK_DYNAMIC_STATE_DEPTH_BIAS:
3093 flags &= ~CBSTATUS_DEPTH_BIAS_SET;
3094 break;
3095 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
3096 flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
3097 break;
3098 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
3099 flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
3100 break;
3101 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
3102 flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
3103 break;
3104 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
3105 flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
3106 break;
3107 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
3108 flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
3109 break;
3110 case VK_DYNAMIC_STATE_SCISSOR:
3111 flags &= ~CBSTATUS_SCISSOR_SET;
3112 break;
3113 case VK_DYNAMIC_STATE_VIEWPORT:
3114 flags &= ~CBSTATUS_VIEWPORT_SET;
3115 break;
3116 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
3117 flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
3118 break;
3119 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
3120 flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
3121 break;
3122 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
3123 flags &= ~CBSTATUS_LINE_STIPPLE_SET;
3124 break;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003125 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
3126 flags &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
3127 break;
locke-lunargd556cc32019-09-17 01:21:23 -06003128 default:
3129 break;
3130 }
3131 }
3132 }
3133
3134 return flags;
3135}
3136
3137// Validation cache:
3138// CV is the bottommost implementor of this extension. Don't pass calls down.
3139// utility function to set collective state for pipeline
3140void SetPipelineState(PIPELINE_STATE *pPipe) {
3141 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
3142 if (pPipe->graphicsPipelineCI.pColorBlendState) {
3143 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
3144 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
3145 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3146 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3147 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3148 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3149 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3150 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3151 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3152 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
3153 pPipe->blendConstantsEnabled = true;
3154 }
3155 }
3156 }
3157 }
sfricke-samsung8f658d42020-05-03 20:12:24 -07003158 // Check if sample location is enabled
3159 if (pPipe->graphicsPipelineCI.pMultisampleState) {
3160 const VkPipelineSampleLocationsStateCreateInfoEXT *sample_location_state =
3161 lvl_find_in_chain<VkPipelineSampleLocationsStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pMultisampleState->pNext);
3162 if (sample_location_state != nullptr) {
3163 pPipe->sample_location_enabled = sample_location_state->sampleLocationsEnable;
3164 }
3165 }
locke-lunargd556cc32019-09-17 01:21:23 -06003166}
3167
3168void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
3169 VkPipeline pipeline) {
3170 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3171 assert(cb_state);
3172
3173 auto pipe_state = GetPipelineState(pipeline);
3174 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
3175 cb_state->status &= ~cb_state->static_status;
3176 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
3177 cb_state->status |= cb_state->static_status;
3178 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003179 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout->layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003180 cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
3181 SetPipelineState(pipe_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003182 AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003183}
3184
3185void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3186 uint32_t viewportCount, const VkViewport *pViewports) {
3187 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3188 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
3189 cb_state->status |= CBSTATUS_VIEWPORT_SET;
3190}
3191
3192void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
3193 uint32_t exclusiveScissorCount,
3194 const VkRect2D *pExclusiveScissors) {
3195 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3196 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
3197 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
3198 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
3199}
3200
3201void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3202 VkImageLayout imageLayout) {
3203 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3204
3205 if (imageView != VK_NULL_HANDLE) {
3206 auto view_state = GetImageViewState(imageView);
3207 AddCommandBufferBindingImageView(cb_state, view_state);
3208 }
3209}
3210
3211void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3212 uint32_t viewportCount,
3213 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3214 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3215 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3216 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3217 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
3218}
3219
3220void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3221 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3222 const VkAllocationCallbacks *pAllocator,
3223 VkAccelerationStructureNV *pAccelerationStructure,
3224 VkResult result) {
3225 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003226 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003227
3228 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3229 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
3230 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3231 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
3232 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3233 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3234
3235 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
3236 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3237 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
3238 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3239 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3240 &as_state->build_scratch_memory_requirements);
3241
3242 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
3243 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3244 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
3245 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3246 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3247 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003248 as_state->allocator = pAllocator;
locke-lunargd556cc32019-09-17 01:21:23 -06003249 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3250}
3251
Jeff Bolz95176d02020-04-01 00:36:16 -05003252void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3253 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3254 const VkAllocationCallbacks *pAllocator,
3255 VkAccelerationStructureKHR *pAccelerationStructure,
3256 VkResult result) {
3257 if (VK_SUCCESS != result) return;
3258 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
3259
3260 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3261 VkAccelerationStructureMemoryRequirementsInfoKHR as_memory_requirements_info = {};
3262 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3263 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR;
3264 as_memory_requirements_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3265 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3266 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &as_memory_requirements_info, &as_state->memory_requirements);
3267
3268 VkAccelerationStructureMemoryRequirementsInfoKHR scratch_memory_req_info = {};
3269 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3270 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR;
3271 scratch_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3272 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3273 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &scratch_memory_req_info,
3274 &as_state->build_scratch_memory_requirements);
3275
3276 VkAccelerationStructureMemoryRequirementsInfoKHR update_memory_req_info = {};
3277 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3278 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR;
3279 update_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3280 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3281 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &update_memory_req_info,
3282 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003283 as_state->allocator = pAllocator;
Jeff Bolz95176d02020-04-01 00:36:16 -05003284 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3285}
3286
locke-lunargd556cc32019-09-17 01:21:23 -06003287void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
3288 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
3289 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
3290 if (as_state != nullptr) {
3291 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
3292 as_state->memory_requirements = *pMemoryRequirements;
3293 as_state->memory_requirements_checked = true;
3294 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
3295 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
3296 as_state->build_scratch_memory_requirements_checked = true;
3297 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
3298 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
3299 as_state->update_scratch_memory_requirements_checked = true;
3300 }
3301 }
3302}
3303
Jeff Bolz95176d02020-04-01 00:36:16 -05003304void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryCommon(
3305 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result,
3306 bool isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003307 if (VK_SUCCESS != result) return;
3308 for (uint32_t i = 0; i < bindInfoCount; i++) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003309 const VkBindAccelerationStructureMemoryInfoKHR &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06003310
3311 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
3312 if (as_state) {
3313 // Track bound memory range information
3314 auto mem_info = GetDevMemState(info.memory);
3315 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07003316 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06003317 }
3318 // Track objects tied to memory
3319 SetMemBinding(info.memory, as_state, info.memoryOffset,
Jeff Bolz95176d02020-04-01 00:36:16 -05003320 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR));
locke-lunargd556cc32019-09-17 01:21:23 -06003321
3322 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05003323 // XXX TODO: Query device address for KHR extension
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003324 if (enabled[gpu_validation] && isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003325 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
3326 }
3327 }
3328 }
3329}
3330
Jeff Bolz95176d02020-04-01 00:36:16 -05003331void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
3332 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
3333 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, true);
3334}
3335
3336void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryKHR(
3337 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result) {
3338 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, false);
3339}
3340
locke-lunargd556cc32019-09-17 01:21:23 -06003341void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
3342 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
3343 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
3344 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3345 if (cb_state == nullptr) {
3346 return;
3347 }
3348
3349 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3350 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3351 if (dst_as_state != nullptr) {
3352 dst_as_state->built = true;
3353 dst_as_state->build_info.initialize(pInfo);
3354 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3355 }
3356 if (src_as_state != nullptr) {
3357 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3358 }
3359 cb_state->hasBuildAccelerationStructureCmd = true;
3360}
3361
3362void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
3363 VkAccelerationStructureNV dst,
3364 VkAccelerationStructureNV src,
3365 VkCopyAccelerationStructureModeNV mode) {
3366 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3367 if (cb_state) {
3368 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3369 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3370 if (dst_as_state != nullptr && src_as_state != nullptr) {
3371 dst_as_state->built = true;
3372 dst_as_state->build_info = src_as_state->build_info;
3373 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3374 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3375 }
3376 }
3377}
3378
Jeff Bolz95176d02020-04-01 00:36:16 -05003379void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
3380 VkAccelerationStructureKHR accelerationStructure,
3381 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06003382 if (!accelerationStructure) return;
3383 auto *as_state = GetAccelerationStructureState(accelerationStructure);
3384 if (as_state) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003385 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06003386 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
3387 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07003388 RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -06003389 }
3390 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003391 as_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003392 accelerationStructureMap.erase(accelerationStructure);
3393 }
3394}
3395
Jeff Bolz95176d02020-04-01 00:36:16 -05003396void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
3397 VkAccelerationStructureNV accelerationStructure,
3398 const VkAllocationCallbacks *pAllocator) {
3399 PreCallRecordDestroyAccelerationStructureKHR(device, accelerationStructure, pAllocator);
3400}
3401
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003402void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3403 uint32_t viewportCount,
3404 const VkViewportWScalingNV *pViewportWScalings) {
3405 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3406 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
3407}
3408
locke-lunargd556cc32019-09-17 01:21:23 -06003409void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
3410 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3411 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
3412}
3413
3414void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
3415 uint16_t lineStipplePattern) {
3416 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3417 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
3418}
3419
3420void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
3421 float depthBiasClamp, float depthBiasSlopeFactor) {
3422 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3423 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
3424}
3425
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003426void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
3427 const VkRect2D *pScissors) {
3428 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3429 cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
3430 cb_state->status |= CBSTATUS_SCISSOR_SET;
3431}
3432
locke-lunargd556cc32019-09-17 01:21:23 -06003433void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
3434 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3435 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
3436}
3437
3438void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
3439 float maxDepthBounds) {
3440 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3441 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
3442}
3443
3444void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3445 uint32_t compareMask) {
3446 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3447 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
3448}
3449
3450void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3451 uint32_t writeMask) {
3452 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3453 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
3454}
3455
3456void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3457 uint32_t reference) {
3458 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3459 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
3460}
3461
3462// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
3463// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
3464// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
3465void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
3466 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
3467 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
3468 cvdescriptorset::DescriptorSet *push_descriptor_set,
3469 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
3470 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
3471 // Defensive
3472 assert(pipeline_layout);
3473 if (!pipeline_layout) return;
3474
3475 uint32_t required_size = first_set + set_count;
3476 const uint32_t last_binding_index = required_size - 1;
3477 assert(last_binding_index < pipeline_layout->compat_for_set.size());
3478
3479 // Some useful shorthand
3480 auto &last_bound = cb_state->lastBound[pipeline_bind_point];
3481 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
3482 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
3483
3484 // We need this three times in this function, but nowhere else
3485 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
3486 if (ds && ds->IsPushDescriptor()) {
3487 assert(ds == last_bound.push_descriptor_set.get());
3488 last_bound.push_descriptor_set = nullptr;
3489 return true;
3490 }
3491 return false;
3492 };
3493
3494 // Clean up the "disturbed" before and after the range to be set
3495 if (required_size < current_size) {
3496 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
3497 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
3498 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
3499 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
3500 }
3501 } else {
3502 // We're not disturbing past last, so leave the upper binding data alone.
3503 required_size = current_size;
3504 }
3505 }
3506
3507 // We resize if we need more set entries or if those past "last" are disturbed
3508 if (required_size != current_size) {
3509 last_bound.per_set.resize(required_size);
3510 }
3511
3512 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
3513 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
3514 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
3515 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3516 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
3517 last_bound.per_set[set_idx].dynamicOffsets.clear();
3518 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
3519 }
3520 }
3521
3522 // Now update the bound sets with the input sets
3523 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
3524 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
3525 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
3526 cvdescriptorset::DescriptorSet *descriptor_set =
3527 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
3528
3529 // Record binding (or push)
3530 if (descriptor_set != last_bound.push_descriptor_set.get()) {
3531 // Only cleanup the push descriptors if they aren't the currently used set.
3532 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3533 }
3534 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
3535 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
3536
3537 if (descriptor_set) {
3538 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
3539 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
3540 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
3541 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
3542 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
3543 input_dynamic_offsets = end_offset;
3544 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
3545 } else {
3546 last_bound.per_set[set_idx].dynamicOffsets.clear();
3547 }
3548 if (!descriptor_set->IsPushDescriptor()) {
3549 // Can't cache validation of push_descriptors
3550 cb_state->validated_descriptor_sets.insert(descriptor_set);
3551 }
3552 }
3553 }
3554}
3555
3556// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
3557void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
3558 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3559 uint32_t firstSet, uint32_t setCount,
3560 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
3561 const uint32_t *pDynamicOffsets) {
3562 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3563 auto pipeline_layout = GetPipelineLayout(layout);
3564
3565 // Resize binding arrays
3566 uint32_t last_set_index = firstSet + setCount - 1;
3567 if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
3568 cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
3569 }
3570
3571 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
3572 dynamicOffsetCount, pDynamicOffsets);
3573 cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
3574 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3575}
3576
3577void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
3578 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
3579 const VkWriteDescriptorSet *pDescriptorWrites) {
3580 const auto &pipeline_layout = GetPipelineLayout(layout);
3581 // Short circuit invalid updates
3582 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
3583 !pipeline_layout->set_layouts[set]->IsPushDescriptor())
3584 return;
3585
3586 // We need a descriptor set to update the bindings with, compatible with the passed layout
3587 const auto dsl = pipeline_layout->set_layouts[set];
3588 auto &last_bound = cb_state->lastBound[pipelineBindPoint];
3589 auto &push_descriptor_set = last_bound.push_descriptor_set;
3590 // If we are disturbing the current push_desriptor_set clear it
3591 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
John Zulaufd2c3dae2019-12-12 11:02:17 -07003592 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003593 }
3594
3595 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3596 nullptr);
3597 last_bound.pipeline_layout = layout;
3598
3599 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003600 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003601}
3602
3603void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3604 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3605 uint32_t set, uint32_t descriptorWriteCount,
3606 const VkWriteDescriptorSet *pDescriptorWrites) {
3607 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3608 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3609}
3610
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003611void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3612 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3613 const void *pValues) {
3614 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3615 if (cb_state != nullptr) {
3616 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3617
3618 auto &push_constant_data = cb_state->push_constant_data;
3619 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3620 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
3621 }
3622}
3623
locke-lunargd556cc32019-09-17 01:21:23 -06003624void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3625 VkIndexType indexType) {
3626 auto buffer_state = GetBufferState(buffer);
3627 auto cb_state = GetCBState(commandBuffer);
3628
3629 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
3630 cb_state->index_buffer_binding.buffer = buffer;
3631 cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
3632 cb_state->index_buffer_binding.offset = offset;
3633 cb_state->index_buffer_binding.index_type = indexType;
3634 // Add binding for this index buffer to this commandbuffer
3635 AddCommandBufferBindingBuffer(cb_state, buffer_state);
3636}
3637
3638void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3639 uint32_t bindingCount, const VkBuffer *pBuffers,
3640 const VkDeviceSize *pOffsets) {
3641 auto cb_state = GetCBState(commandBuffer);
3642
3643 uint32_t end = firstBinding + bindingCount;
3644 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3645 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3646 }
3647
3648 for (uint32_t i = 0; i < bindingCount; ++i) {
3649 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
3650 vertex_buffer_binding.buffer = pBuffers[i];
3651 vertex_buffer_binding.offset = pOffsets[i];
3652 // Add binding for this vertex buffer to this commandbuffer
Jeff Bolz165818a2020-05-08 11:19:03 -05003653 if (pBuffers[i]) {
3654 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
3655 }
locke-lunargd556cc32019-09-17 01:21:23 -06003656 }
3657}
3658
3659void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3660 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
3661 auto cb_state = GetCBState(commandBuffer);
3662 auto dst_buffer_state = GetBufferState(dstBuffer);
3663
3664 // Update bindings between buffer and cmd buffer
3665 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
3666}
3667
Jeff Bolz310775c2019-10-09 00:46:33 -05003668bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags stageMask,
3669 EventToStageMap *localEventToStageMap) {
3670 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003671 return false;
3672}
3673
3674void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3675 VkPipelineStageFlags stageMask) {
3676 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3677 auto event_state = GetEventState(event);
3678 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003679 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003680 }
3681 cb_state->events.push_back(event);
3682 if (!cb_state->waitedEvents.count(event)) {
3683 cb_state->writeEventsBeforeWait.push_back(event);
3684 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003685 cb_state->eventUpdates.emplace_back(
3686 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3687 return SetEventStageMask(event, stageMask, localEventToStageMap);
3688 });
locke-lunargd556cc32019-09-17 01:21:23 -06003689}
3690
3691void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3692 VkPipelineStageFlags stageMask) {
3693 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3694 auto event_state = GetEventState(event);
3695 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003696 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003697 }
3698 cb_state->events.push_back(event);
3699 if (!cb_state->waitedEvents.count(event)) {
3700 cb_state->writeEventsBeforeWait.push_back(event);
3701 }
3702
3703 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003704 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
3705 return SetEventStageMask(event, VkPipelineStageFlags(0), localEventToStageMap);
3706 });
locke-lunargd556cc32019-09-17 01:21:23 -06003707}
3708
3709void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3710 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3711 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3712 uint32_t bufferMemoryBarrierCount,
3713 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3714 uint32_t imageMemoryBarrierCount,
3715 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3716 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3717 for (uint32_t i = 0; i < eventCount; ++i) {
3718 auto event_state = GetEventState(pEvents[i]);
3719 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003720 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
3721 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003722 }
3723 cb_state->waitedEvents.insert(pEvents[i]);
3724 cb_state->events.push_back(pEvents[i]);
3725 }
3726}
3727
Jeff Bolz310775c2019-10-09 00:46:33 -05003728bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
3729 (*localQueryToStateMap)[object] = value;
3730 return false;
3731}
3732
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003733bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
3734 QueryState value, QueryMap *localQueryToStateMap) {
Jeff Bolz310775c2019-10-09 00:46:33 -05003735 for (uint32_t i = 0; i < queryCount; i++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003736 QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
Jeff Bolz310775c2019-10-09 00:46:33 -05003737 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06003738 }
3739 return false;
3740}
3741
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003742QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex,
3743 uint32_t perfPass) const {
3744 QueryObject query = QueryObject(QueryObject(queryPool, queryIndex), perfPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003745
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003746 auto iter = localQueryToStateMap->find(query);
3747 if (iter != localQueryToStateMap->end()) return iter->second;
Jeff Bolz310775c2019-10-09 00:46:33 -05003748
Jeff Bolz310775c2019-10-09 00:46:33 -05003749 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06003750}
3751
3752void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003753 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003754 cb_state->activeQueries.insert(query_obj);
3755 cb_state->startedQueries.insert(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003756 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3757 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3758 QueryMap *localQueryToStateMap) {
3759 SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
3760 return false;
3761 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003762 auto pool_state = GetQueryPoolState(query_obj.pool);
3763 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3764 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003765}
3766
3767void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3768 VkFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003769 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003770 QueryObject query = {queryPool, slot};
3771 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3772 RecordCmdBeginQuery(cb_state, query);
3773}
3774
3775void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003776 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003777 cb_state->activeQueries.erase(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003778 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3779 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3780 QueryMap *localQueryToStateMap) {
3781 return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3782 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003783 auto pool_state = GetQueryPoolState(query_obj.pool);
3784 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3785 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003786}
3787
3788void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003789 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003790 QueryObject query_obj = {queryPool, slot};
3791 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3792 RecordCmdEndQuery(cb_state, query_obj);
3793}
3794
3795void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3796 uint32_t firstQuery, uint32_t queryCount) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003797 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003798 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3799
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02003800 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
3801 QueryObject query = {queryPool, slot};
3802 cb_state->resetQueries.insert(query);
3803 }
3804
Jeff Bolz310775c2019-10-09 00:46:33 -05003805 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003806 bool do_validate, VkQueryPool &firstPerfQueryPool,
3807 uint32_t perfQueryPass,
3808 QueryMap *localQueryToStateMap) {
3809 return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06003810 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003811 auto pool_state = GetQueryPoolState(queryPool);
3812 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003813 cb_state);
3814}
3815
3816void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3817 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3818 VkDeviceSize dstOffset, VkDeviceSize stride,
3819 VkQueryResultFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003820 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003821 auto cb_state = GetCBState(commandBuffer);
3822 auto dst_buff_state = GetBufferState(dstBuffer);
3823 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003824 auto pool_state = GetQueryPoolState(queryPool);
3825 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003826 cb_state);
3827}
3828
3829void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3830 VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003831 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003832 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003833 auto pool_state = GetQueryPoolState(queryPool);
3834 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003835 cb_state);
3836 QueryObject query = {queryPool, slot};
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003837 cb_state->queryUpdates.emplace_back([query](const ValidationStateTracker *device_data, bool do_validate,
3838 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3839 QueryMap *localQueryToStateMap) {
3840 return SetQueryState(QueryObject(query, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3841 });
locke-lunargd556cc32019-09-17 01:21:23 -06003842}
3843
3844void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3845 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3846 VkResult result) {
3847 if (VK_SUCCESS != result) return;
3848 // Shadow create info and store in map
Jeff Bolz6ae39612019-10-11 20:57:36 -05003849 auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
locke-lunargd556cc32019-09-17 01:21:23 -06003850
3851 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
3852 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
3853 VkImageView view = pCreateInfo->pAttachments[i];
3854 auto view_state = GetImageViewState(view);
3855 if (!view_state) {
3856 continue;
3857 }
3858 }
3859 }
3860 frameBufferMap[*pFramebuffer] = std::move(fb_state);
3861}
3862
3863void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3864 RENDER_PASS_STATE *render_pass) {
3865 auto &subpass_to_node = render_pass->subpassToNode;
3866 subpass_to_node.resize(pCreateInfo->subpassCount);
3867 auto &self_dependencies = render_pass->self_dependencies;
3868 self_dependencies.resize(pCreateInfo->subpassCount);
3869
3870 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
3871 subpass_to_node[i].pass = i;
3872 self_dependencies[i].clear();
3873 }
3874 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
3875 const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
3876 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
3877 if (dependency.srcSubpass == dependency.dstSubpass) {
3878 self_dependencies[dependency.srcSubpass].push_back(i);
3879 } else {
3880 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
3881 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
3882 }
3883 }
3884 }
3885}
3886
3887static void MarkAttachmentFirstUse(RENDER_PASS_STATE *render_pass, uint32_t index, bool is_read) {
3888 if (index == VK_ATTACHMENT_UNUSED) return;
3889
3890 if (!render_pass->attachment_first_read.count(index)) render_pass->attachment_first_read[index] = is_read;
3891}
3892
3893void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
3894 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
3895 VkRenderPass *pRenderPass) {
3896 render_pass->renderPass = *pRenderPass;
3897 auto create_info = render_pass->createInfo.ptr();
3898
3899 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
3900
3901 for (uint32_t i = 0; i < create_info->subpassCount; ++i) {
3902 const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[i];
3903 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
3904 MarkAttachmentFirstUse(render_pass.get(), subpass.pColorAttachments[j].attachment, false);
3905
3906 // resolve attachments are considered to be written
3907 if (subpass.pResolveAttachments) {
3908 MarkAttachmentFirstUse(render_pass.get(), subpass.pResolveAttachments[j].attachment, false);
3909 }
3910 }
3911 if (subpass.pDepthStencilAttachment) {
3912 MarkAttachmentFirstUse(render_pass.get(), subpass.pDepthStencilAttachment->attachment, false);
3913 }
3914 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
3915 MarkAttachmentFirstUse(render_pass.get(), subpass.pInputAttachments[j].attachment, true);
3916 }
3917 }
3918
3919 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
3920 renderPassMap[*pRenderPass] = std::move(render_pass);
3921}
3922
3923// Style note:
3924// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
3925// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
3926// construction or assignment.
3927void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
3928 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3929 VkResult result) {
3930 if (VK_SUCCESS != result) return;
3931 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3932 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
3933}
3934
Tony-LunarG977448c2019-12-02 14:52:02 -07003935void ValidationStateTracker::RecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3936 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3937 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06003938 if (VK_SUCCESS != result) return;
3939 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3940 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
3941}
3942
Tony-LunarG977448c2019-12-02 14:52:02 -07003943void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3944 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3945 VkResult result) {
3946 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
3947}
3948
3949void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3950 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3951 VkResult result) {
3952 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
3953}
3954
locke-lunargd556cc32019-09-17 01:21:23 -06003955void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
3956 const VkRenderPassBeginInfo *pRenderPassBegin,
3957 const VkSubpassContents contents) {
3958 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3959 auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
3960 auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
3961
3962 if (render_pass_state) {
3963 cb_state->activeFramebuffer = pRenderPassBegin->framebuffer;
3964 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07003965 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06003966 cb_state->activeSubpass = 0;
3967 cb_state->activeSubpassContents = contents;
3968 cb_state->framebuffers.insert(pRenderPassBegin->framebuffer);
3969 // Connect this framebuffer and its children to this cmdBuffer
3970 AddFramebufferBinding(cb_state, framebuffer);
3971 // Connect this RP to cmdBuffer
Jeff Bolzadbfa852019-10-04 13:53:30 -05003972 AddCommandBufferBinding(render_pass_state->cb_bindings,
3973 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state),
3974 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003975
3976 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
3977 if (chained_device_group_struct) {
3978 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
3979 } else {
3980 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
3981 }
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003982
3983 cb_state->imagelessFramebufferAttachments.clear();
3984 auto attachment_info_struct = lvl_find_in_chain<VkRenderPassAttachmentBeginInfo>(pRenderPassBegin->pNext);
3985 if (attachment_info_struct) {
3986 for (uint32_t i = 0; i < attachment_info_struct->attachmentCount; i++) {
3987 IMAGE_VIEW_STATE *img_view_state = GetImageViewState(attachment_info_struct->pAttachments[i]);
3988 cb_state->imagelessFramebufferAttachments.push_back(img_view_state);
3989 }
3990 }
locke-lunargd556cc32019-09-17 01:21:23 -06003991 }
3992}
3993
3994void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
3995 const VkRenderPassBeginInfo *pRenderPassBegin,
3996 VkSubpassContents contents) {
3997 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
3998}
3999
4000void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
4001 const VkRenderPassBeginInfo *pRenderPassBegin,
4002 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
4003 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4004}
4005
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06004006void ValidationStateTracker::PostCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4007 uint32_t counterBufferCount,
4008 const VkBuffer *pCounterBuffers,
4009 const VkDeviceSize *pCounterBufferOffsets) {
4010 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4011
4012 cb_state->transform_feedback_active = true;
4013}
4014
4015void ValidationStateTracker::PostCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4016 uint32_t counterBufferCount, const VkBuffer *pCounterBuffers,
4017 const VkDeviceSize *pCounterBufferOffsets) {
4018 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4019
4020 cb_state->transform_feedback_active = false;
4021}
4022
Tony-LunarG977448c2019-12-02 14:52:02 -07004023void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
4024 const VkRenderPassBeginInfo *pRenderPassBegin,
4025 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
4026 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4027}
4028
locke-lunargd556cc32019-09-17 01:21:23 -06004029void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4030 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4031 cb_state->activeSubpass++;
4032 cb_state->activeSubpassContents = contents;
4033}
4034
4035void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4036 RecordCmdNextSubpass(commandBuffer, contents);
4037}
4038
4039void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
4040 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
4041 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4042 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4043}
4044
Tony-LunarG977448c2019-12-02 14:52:02 -07004045void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
4046 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
4047 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4048 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4049}
4050
locke-lunargd556cc32019-09-17 01:21:23 -06004051void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
4052 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4053 cb_state->activeRenderPass = nullptr;
4054 cb_state->activeSubpass = 0;
4055 cb_state->activeFramebuffer = VK_NULL_HANDLE;
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004056 cb_state->imagelessFramebufferAttachments.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06004057}
4058
4059void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
4060 RecordCmdEndRenderPassState(commandBuffer);
4061}
4062
4063void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
4064 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4065 RecordCmdEndRenderPassState(commandBuffer);
4066}
4067
Tony-LunarG977448c2019-12-02 14:52:02 -07004068void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
4069 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4070 RecordCmdEndRenderPassState(commandBuffer);
4071}
locke-lunargd556cc32019-09-17 01:21:23 -06004072void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
4073 const VkCommandBuffer *pCommandBuffers) {
4074 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4075
4076 CMD_BUFFER_STATE *sub_cb_state = NULL;
4077 for (uint32_t i = 0; i < commandBuffersCount; i++) {
4078 sub_cb_state = GetCBState(pCommandBuffers[i]);
4079 assert(sub_cb_state);
4080 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
4081 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
4082 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
4083 // from the validation step to the recording step
4084 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
4085 }
4086 }
4087
4088 // Propagate inital layout and current layout state to the primary cmd buffer
4089 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
4090 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
4091 // for those other classes.
4092 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
4093 const auto image = sub_layout_map_entry.first;
4094 const auto *image_state = GetImageState(image);
4095 if (!image_state) continue; // Can't set layouts of a dead image
4096
4097 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
4098 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
4099 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
4100 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
4101 }
4102
4103 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
4104 cb_state->linkedCommandBuffers.insert(sub_cb_state);
4105 sub_cb_state->linkedCommandBuffers.insert(cb_state);
4106 for (auto &function : sub_cb_state->queryUpdates) {
4107 cb_state->queryUpdates.push_back(function);
4108 }
4109 for (auto &function : sub_cb_state->queue_submit_functions) {
4110 cb_state->queue_submit_functions.push_back(function);
4111 }
4112 }
4113}
4114
4115void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
4116 VkFlags flags, void **ppData, VkResult result) {
4117 if (VK_SUCCESS != result) return;
4118 RecordMappedMemory(mem, offset, size, ppData);
4119}
4120
4121void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
4122 auto mem_info = GetDevMemState(mem);
4123 if (mem_info) {
4124 mem_info->mapped_range = MemRange();
4125 mem_info->p_driver_data = nullptr;
4126 }
4127}
4128
4129void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
4130 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
4131 if (image_state) {
4132 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
4133 if (swapchain_info) {
4134 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
4135 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -06004136 swapchain->images[swapchain_info->imageIndex].bound_images.emplace(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -06004137 image_state->bind_swapchain = swapchain_info->swapchain;
4138 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
4139 }
4140 } else {
4141 // Track bound memory range information
4142 auto mem_info = GetDevMemState(bindInfo.memory);
4143 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07004144 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06004145 }
4146
4147 // Track objects tied to memory
4148 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
4149 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
4150 }
Tony-LunarG330cf4c2020-03-04 16:29:03 -07004151 if ((image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) || swapchain_info) {
locke-lunargd556cc32019-09-17 01:21:23 -06004152 AddAliasingImage(image_state);
4153 }
4154 }
4155}
4156
4157void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
4158 VkDeviceSize memoryOffset, VkResult result) {
4159 if (VK_SUCCESS != result) return;
4160 VkBindImageMemoryInfo bindInfo = {};
4161 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
4162 bindInfo.image = image;
4163 bindInfo.memory = mem;
4164 bindInfo.memoryOffset = memoryOffset;
4165 UpdateBindImageMemoryState(bindInfo);
4166}
4167
4168void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
4169 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
4170 if (VK_SUCCESS != result) return;
4171 for (uint32_t i = 0; i < bindInfoCount; i++) {
4172 UpdateBindImageMemoryState(pBindInfos[i]);
4173 }
4174}
4175
4176void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
4177 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
4178 if (VK_SUCCESS != result) return;
4179 for (uint32_t i = 0; i < bindInfoCount; i++) {
4180 UpdateBindImageMemoryState(pBindInfos[i]);
4181 }
4182}
4183
4184void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
4185 auto event_state = GetEventState(event);
4186 if (event_state) {
4187 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
4188 }
locke-lunargd556cc32019-09-17 01:21:23 -06004189}
4190
4191void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
4192 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
4193 VkResult result) {
4194 if (VK_SUCCESS != result) return;
4195 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
4196 pImportSemaphoreFdInfo->flags);
4197}
4198
4199void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
4200 VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
4201 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
4202 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4203 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
4204 semaphore_state->scope = kSyncScopeExternalPermanent;
4205 }
4206}
4207
4208#ifdef VK_USE_PLATFORM_WIN32_KHR
4209void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
4210 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
4211 if (VK_SUCCESS != result) return;
4212 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
4213 pImportSemaphoreWin32HandleInfo->flags);
4214}
4215
4216void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
4217 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4218 HANDLE *pHandle, VkResult result) {
4219 if (VK_SUCCESS != result) return;
4220 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
4221}
4222
4223void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
4224 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
4225 if (VK_SUCCESS != result) return;
4226 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
4227 pImportFenceWin32HandleInfo->flags);
4228}
4229
4230void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
4231 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4232 HANDLE *pHandle, VkResult result) {
4233 if (VK_SUCCESS != result) return;
4234 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
4235}
4236#endif
4237
4238void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
4239 VkResult result) {
4240 if (VK_SUCCESS != result) return;
4241 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
4242}
4243
4244void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
4245 VkFenceImportFlagsKHR flags) {
4246 FENCE_STATE *fence_node = GetFenceState(fence);
4247 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
4248 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
4249 fence_node->scope == kSyncScopeInternal) {
4250 fence_node->scope = kSyncScopeExternalTemporary;
4251 } else {
4252 fence_node->scope = kSyncScopeExternalPermanent;
4253 }
4254 }
4255}
4256
4257void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
4258 VkResult result) {
4259 if (VK_SUCCESS != result) return;
4260 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
4261}
4262
4263void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
4264 FENCE_STATE *fence_state = GetFenceState(fence);
4265 if (fence_state) {
4266 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4267 // Export with reference transference becomes external
4268 fence_state->scope = kSyncScopeExternalPermanent;
4269 } else if (fence_state->scope == kSyncScopeInternal) {
4270 // Export with copy transference has a side effect of resetting the fence
4271 fence_state->state = FENCE_UNSIGNALED;
4272 }
4273 }
4274}
4275
4276void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
4277 VkResult result) {
4278 if (VK_SUCCESS != result) return;
4279 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
4280}
4281
4282void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
4283 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
4284 if (VK_SUCCESS != result) return;
4285 eventMap[*pEvent].write_in_use = 0;
4286 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
4287}
4288
4289void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
4290 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
4291 SWAPCHAIN_NODE *old_swapchain_state) {
4292 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004293 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004294 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
4295 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
4296 swapchain_state->shared_presentable = true;
4297 }
4298 surface_state->swapchain = swapchain_state.get();
4299 swapchainMap[*pSwapchain] = std::move(swapchain_state);
4300 } else {
4301 surface_state->swapchain = nullptr;
4302 }
4303 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
4304 if (old_swapchain_state) {
4305 old_swapchain_state->retired = true;
4306 }
4307 return;
4308}
4309
4310void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
4311 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
4312 VkResult result) {
4313 auto surface_state = GetSurfaceState(pCreateInfo->surface);
4314 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
4315 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
4316}
4317
4318void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
4319 const VkAllocationCallbacks *pAllocator) {
4320 if (!swapchain) return;
4321 auto swapchain_data = GetSwapchainState(swapchain);
4322 if (swapchain_data) {
4323 for (const auto &swapchain_image : swapchain_data->images) {
locke-lunargb3584732019-10-28 20:18:36 -06004324 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image, kVulkanObjectTypeImage));
4325 imageMap.erase(swapchain_image.image);
4326 RemoveAliasingImages(swapchain_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06004327 }
4328
4329 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
4330 if (surface_state) {
4331 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
4332 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004333 swapchain_data->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004334 swapchainMap.erase(swapchain);
4335 }
4336}
4337
4338void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
4339 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
4340 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
4341 auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
4342 if (pSemaphore) {
4343 pSemaphore->signaler.first = VK_NULL_HANDLE;
4344 pSemaphore->signaled = false;
4345 }
4346 }
4347
4348 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
4349 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
4350 // confused itself just as much.
4351 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
4352 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
4353 // Mark the image as having been released to the WSI
4354 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
4355 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
locke-lunargb3584732019-10-28 20:18:36 -06004356 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004357 auto image_state = GetImageState(image);
4358 if (image_state) {
4359 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004360 if (image_state->shared_presentable) {
4361 image_state->layout_locked = true;
4362 }
locke-lunargd556cc32019-09-17 01:21:23 -06004363 }
4364 }
4365 }
4366 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
4367 // its semaphore waits) /never/ participate in any completion proof.
4368}
4369
4370void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
4371 const VkSwapchainCreateInfoKHR *pCreateInfos,
4372 const VkAllocationCallbacks *pAllocator,
4373 VkSwapchainKHR *pSwapchains, VkResult result) {
4374 if (pCreateInfos) {
4375 for (uint32_t i = 0; i < swapchainCount; i++) {
4376 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
4377 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
4378 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
4379 }
4380 }
4381}
4382
4383void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4384 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
4385 auto pFence = GetFenceState(fence);
4386 if (pFence && pFence->scope == kSyncScopeInternal) {
4387 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
4388 // import
4389 pFence->state = FENCE_INFLIGHT;
4390 pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
4391 }
4392
4393 auto pSemaphore = GetSemaphoreState(semaphore);
4394 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
4395 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
4396 // temporary import
4397 pSemaphore->signaled = true;
4398 pSemaphore->signaler.first = VK_NULL_HANDLE;
4399 }
4400
4401 // Mark the image as acquired.
4402 auto swapchain_data = GetSwapchainState(swapchain);
4403 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
locke-lunargb3584732019-10-28 20:18:36 -06004404 auto image = swapchain_data->images[*pImageIndex].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004405 auto image_state = GetImageState(image);
4406 if (image_state) {
4407 image_state->acquired = true;
4408 image_state->shared_presentable = swapchain_data->shared_presentable;
4409 }
4410 }
4411}
4412
4413void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4414 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
4415 VkResult result) {
4416 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4417 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
4418}
4419
4420void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
4421 uint32_t *pImageIndex, VkResult result) {
4422 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4423 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
4424 pAcquireInfo->fence, pImageIndex);
4425}
4426
4427void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
4428 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
4429 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
4430 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
4431 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
4432 phys_device_state.phys_device = pPhysicalDevices[i];
4433 // Init actual features for each physical device
4434 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
4435 }
4436 }
4437}
4438
4439// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
4440static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
4441 VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4442 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
4443
4444 if (!pQueueFamilyProperties) {
4445 if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
4446 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
4447 } else { // Save queue family properties
4448 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
4449
4450 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
4451 for (uint32_t i = 0; i < count; ++i) {
4452 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
4453 }
4454 }
4455}
4456
4457void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
4458 uint32_t *pQueueFamilyPropertyCount,
4459 VkQueueFamilyProperties *pQueueFamilyProperties) {
4460 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4461 assert(physical_device_state);
4462 VkQueueFamilyProperties2KHR *pqfp = nullptr;
4463 std::vector<VkQueueFamilyProperties2KHR> qfp;
4464 qfp.resize(*pQueueFamilyPropertyCount);
4465 if (pQueueFamilyProperties) {
4466 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
4467 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
4468 qfp[i].pNext = nullptr;
4469 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
4470 }
4471 pqfp = qfp.data();
4472 }
4473 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
4474}
4475
4476void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
4477 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4478 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4479 assert(physical_device_state);
4480 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4481 pQueueFamilyProperties);
4482}
4483
4484void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
4485 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4486 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4487 assert(physical_device_state);
4488 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4489 pQueueFamilyProperties);
4490}
4491void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
4492 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004493 if (!surface) return;
4494 auto surface_state = GetSurfaceState(surface);
4495 surface_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004496 surface_map.erase(surface);
4497}
4498
4499void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004500 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06004501}
4502
4503void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
4504 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
4505 const VkAllocationCallbacks *pAllocator,
4506 VkSurfaceKHR *pSurface, VkResult result) {
4507 if (VK_SUCCESS != result) return;
4508 RecordVulkanSurface(pSurface);
4509}
4510
4511#ifdef VK_USE_PLATFORM_ANDROID_KHR
4512void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
4513 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
4514 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4515 VkResult result) {
4516 if (VK_SUCCESS != result) return;
4517 RecordVulkanSurface(pSurface);
4518}
4519#endif // VK_USE_PLATFORM_ANDROID_KHR
4520
4521#ifdef VK_USE_PLATFORM_IOS_MVK
4522void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
4523 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4524 VkResult result) {
4525 if (VK_SUCCESS != result) return;
4526 RecordVulkanSurface(pSurface);
4527}
4528#endif // VK_USE_PLATFORM_IOS_MVK
4529
4530#ifdef VK_USE_PLATFORM_MACOS_MVK
4531void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
4532 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
4533 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4534 VkResult result) {
4535 if (VK_SUCCESS != result) return;
4536 RecordVulkanSurface(pSurface);
4537}
4538#endif // VK_USE_PLATFORM_MACOS_MVK
4539
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07004540#ifdef VK_USE_PLATFORM_METAL_EXT
4541void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
4542 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
4543 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4544 VkResult result) {
4545 if (VK_SUCCESS != result) return;
4546 RecordVulkanSurface(pSurface);
4547}
4548#endif // VK_USE_PLATFORM_METAL_EXT
4549
locke-lunargd556cc32019-09-17 01:21:23 -06004550#ifdef VK_USE_PLATFORM_WAYLAND_KHR
4551void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
4552 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
4553 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4554 VkResult result) {
4555 if (VK_SUCCESS != result) return;
4556 RecordVulkanSurface(pSurface);
4557}
4558#endif // VK_USE_PLATFORM_WAYLAND_KHR
4559
4560#ifdef VK_USE_PLATFORM_WIN32_KHR
4561void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
4562 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
4563 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4564 VkResult result) {
4565 if (VK_SUCCESS != result) return;
4566 RecordVulkanSurface(pSurface);
4567}
4568#endif // VK_USE_PLATFORM_WIN32_KHR
4569
4570#ifdef VK_USE_PLATFORM_XCB_KHR
4571void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
4572 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4573 VkResult result) {
4574 if (VK_SUCCESS != result) return;
4575 RecordVulkanSurface(pSurface);
4576}
4577#endif // VK_USE_PLATFORM_XCB_KHR
4578
4579#ifdef VK_USE_PLATFORM_XLIB_KHR
4580void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
4581 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4582 VkResult result) {
4583 if (VK_SUCCESS != result) return;
4584 RecordVulkanSurface(pSurface);
4585}
4586#endif // VK_USE_PLATFORM_XLIB_KHR
4587
Niklas Haas8b84af12020-04-19 22:20:11 +02004588void ValidationStateTracker::PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance,
4589 const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
4590 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4591 VkResult result) {
4592 if (VK_SUCCESS != result) return;
4593 RecordVulkanSurface(pSurface);
4594}
4595
Cort23cf2282019-09-20 18:58:18 +02004596void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004597 VkPhysicalDeviceFeatures *pFeatures) {
4598 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4599 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4600 physical_device_state->features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
4601 physical_device_state->features2.pNext = nullptr;
4602 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02004603}
4604
4605void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004606 VkPhysicalDeviceFeatures2 *pFeatures) {
4607 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4608 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4609 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004610}
4611
4612void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004613 VkPhysicalDeviceFeatures2 *pFeatures) {
4614 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4615 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4616 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004617}
4618
locke-lunargd556cc32019-09-17 01:21:23 -06004619void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
4620 VkSurfaceKHR surface,
4621 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
4622 VkResult result) {
4623 if (VK_SUCCESS != result) return;
4624 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4625 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004626 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004627 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
4628}
4629
4630void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
4631 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4632 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
4633 if (VK_SUCCESS != result) return;
4634 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4635 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004636 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004637 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
4638}
4639
4640void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
4641 VkSurfaceKHR surface,
4642 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
4643 VkResult result) {
4644 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4645 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004646 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004647 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
4648 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
4649 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
4650 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
4651 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
4652 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
4653 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
4654 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
4655 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
4656 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
4657}
4658
4659void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
4660 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
4661 VkBool32 *pSupported, VkResult result) {
4662 if (VK_SUCCESS != result) return;
4663 auto surface_state = GetSurfaceState(surface);
4664 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
4665}
4666
4667void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
4668 VkSurfaceKHR surface,
4669 uint32_t *pPresentModeCount,
4670 VkPresentModeKHR *pPresentModes,
4671 VkResult result) {
4672 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4673
4674 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
4675 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4676 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState;
4677
4678 if (*pPresentModeCount) {
4679 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4680 if (*pPresentModeCount > physical_device_state->present_modes.size())
4681 physical_device_state->present_modes.resize(*pPresentModeCount);
4682 }
4683 if (pPresentModes) {
4684 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4685 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
4686 physical_device_state->present_modes[i] = pPresentModes[i];
4687 }
4688 }
4689}
4690
4691void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
4692 uint32_t *pSurfaceFormatCount,
4693 VkSurfaceFormatKHR *pSurfaceFormats,
4694 VkResult result) {
4695 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4696
4697 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4698 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
4699
4700 if (*pSurfaceFormatCount) {
4701 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4702 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
4703 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
4704 }
4705 if (pSurfaceFormats) {
4706 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4707 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4708 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
4709 }
4710 }
4711}
4712
4713void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
4714 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4715 uint32_t *pSurfaceFormatCount,
4716 VkSurfaceFormat2KHR *pSurfaceFormats,
4717 VkResult result) {
4718 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4719
4720 auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
4721 if (*pSurfaceFormatCount) {
4722 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_COUNT) {
4723 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_COUNT;
4724 }
4725 if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
4726 physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
4727 }
4728 if (pSurfaceFormats) {
4729 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_DETAILS) {
4730 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_DETAILS;
4731 }
4732 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4733 physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
4734 }
4735 }
4736}
4737
4738void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4739 const VkDebugUtilsLabelEXT *pLabelInfo) {
4740 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4741}
4742
4743void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
4744 EndCmdDebugUtilsLabel(report_data, commandBuffer);
4745}
4746
4747void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4748 const VkDebugUtilsLabelEXT *pLabelInfo) {
4749 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4750
4751 // Squirrel away an easily accessible copy.
4752 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4753 cb_state->debug_label = LoggingLabel(pLabelInfo);
4754}
4755
4756void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
4757 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
4758 if (NULL != pPhysicalDeviceGroupProperties) {
4759 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
4760 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
4761 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
4762 auto &phys_device_state = physical_device_map[cur_phys_dev];
4763 phys_device_state.phys_device = cur_phys_dev;
4764 // Init actual features for each physical device
4765 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
4766 }
4767 }
4768 }
4769}
4770
4771void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
4772 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4773 VkResult result) {
4774 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4775 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4776}
4777
4778void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
4779 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4780 VkResult result) {
4781 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4782 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4783}
4784
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004785void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
4786 uint32_t queueFamilyIndex,
4787 uint32_t *pCounterCount,
4788 VkPerformanceCounterKHR *pCounters) {
4789 if (NULL == pCounters) return;
4790
4791 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4792 assert(physical_device_state);
4793
4794 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queueFamilyCounters(new QUEUE_FAMILY_PERF_COUNTERS());
4795 queueFamilyCounters->counters.resize(*pCounterCount);
4796 for (uint32_t i = 0; i < *pCounterCount; i++) queueFamilyCounters->counters[i] = pCounters[i];
4797
4798 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queueFamilyCounters);
4799}
4800
4801void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
4802 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
4803 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
4804 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4805 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
4806}
4807
4808void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
4809 VkResult result) {
4810 if (result == VK_SUCCESS) performance_lock_acquired = true;
4811}
4812
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004813void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
4814 performance_lock_acquired = false;
4815 for (auto &cmd_buffer : commandBufferMap) {
4816 cmd_buffer.second->performance_lock_released = true;
4817 }
4818}
4819
locke-lunargd556cc32019-09-17 01:21:23 -06004820void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
4821 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4822 const VkAllocationCallbacks *pAllocator) {
4823 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004824 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4825 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004826 desc_template_map.erase(descriptorUpdateTemplate);
4827}
4828
4829void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
4830 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4831 const VkAllocationCallbacks *pAllocator) {
4832 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004833 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4834 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004835 desc_template_map.erase(descriptorUpdateTemplate);
4836}
4837
4838void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
4839 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
4840 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004841 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004842 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
4843}
4844
4845void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
4846 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4847 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4848 if (VK_SUCCESS != result) return;
4849 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4850}
4851
4852void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
4853 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4854 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4855 if (VK_SUCCESS != result) return;
4856 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4857}
4858
4859void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
4860 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4861 const void *pData) {
4862 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
4863 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
4864 assert(0);
4865 } else {
4866 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
4867 // TODO: Record template push descriptor updates
4868 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
4869 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
4870 }
4871 }
4872}
4873
4874void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
4875 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4876 const void *pData) {
4877 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4878}
4879
4880void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
4881 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4882 const void *pData) {
4883 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4884}
4885
4886void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
4887 VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
4888 const void *pData) {
4889 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4890
4891 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4892 if (template_state) {
4893 auto layout_data = GetPipelineLayout(layout);
4894 auto dsl = GetDslFromPipelineLayout(layout_data, set);
4895 const auto &template_ci = template_state->create_info;
Jeff Bolz6ae39612019-10-11 20:57:36 -05004896 if (dsl && !dsl->destroyed) {
locke-lunargd556cc32019-09-17 01:21:23 -06004897 // Decode the template into a set of write updates
4898 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
4899 dsl->GetDescriptorSetLayout());
4900 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
4901 static_cast<uint32_t>(decoded_template.desc_writes.size()),
4902 decoded_template.desc_writes.data());
4903 }
4904 }
4905}
4906
4907void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
4908 uint32_t *pPropertyCount, void *pProperties) {
4909 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4910 if (*pPropertyCount) {
4911 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_COUNT) {
4912 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_COUNT;
Camden Stocker61050592019-11-27 12:03:09 -08004913 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004914 }
4915 physical_device_state->display_plane_property_count = *pPropertyCount;
4916 }
4917 if (pProperties) {
4918 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_DETAILS) {
4919 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004920 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004921 }
4922 }
4923}
4924
4925void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
4926 uint32_t *pPropertyCount,
4927 VkDisplayPlanePropertiesKHR *pProperties,
4928 VkResult result) {
4929 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4930 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4931}
4932
4933void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
4934 uint32_t *pPropertyCount,
4935 VkDisplayPlaneProperties2KHR *pProperties,
4936 VkResult result) {
4937 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4938 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4939}
4940
4941void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4942 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
4943 QueryObject query_obj = {queryPool, query, index};
4944 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4945 RecordCmdBeginQuery(cb_state, query_obj);
4946}
4947
4948void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4949 uint32_t query, uint32_t index) {
4950 QueryObject query_obj = {queryPool, query, index};
4951 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4952 RecordCmdEndQuery(cb_state, query_obj);
4953}
4954
4955void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
4956 VkSamplerYcbcrConversion ycbcr_conversion) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004957 auto ycbcr_state = std::make_shared<SAMPLER_YCBCR_CONVERSION_STATE>();
4958
locke-lunargd556cc32019-09-17 01:21:23 -06004959 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004960 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion, ycbcr_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06004961 }
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004962
4963 const VkFormat conversion_format = create_info->format;
4964
4965 if (conversion_format != VK_FORMAT_UNDEFINED) {
4966 // If format is VK_FORMAT_UNDEFINED, will be set by external AHB features
4967 ycbcr_state->format_features = GetPotentialFormatFeatures(conversion_format);
4968 }
4969
4970 ycbcr_state->chromaFilter = create_info->chromaFilter;
4971 ycbcr_state->format = conversion_format;
4972 samplerYcbcrConversionMap[ycbcr_conversion] = std::move(ycbcr_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004973}
4974
4975void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
4976 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4977 const VkAllocationCallbacks *pAllocator,
4978 VkSamplerYcbcrConversion *pYcbcrConversion,
4979 VkResult result) {
4980 if (VK_SUCCESS != result) return;
4981 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4982}
4983
4984void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
4985 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4986 const VkAllocationCallbacks *pAllocator,
4987 VkSamplerYcbcrConversion *pYcbcrConversion,
4988 VkResult result) {
4989 if (VK_SUCCESS != result) return;
4990 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4991}
4992
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004993void ValidationStateTracker::RecordDestroySamplerYcbcrConversionState(VkSamplerYcbcrConversion ycbcr_conversion) {
4994 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4995 RecordDestroySamplerYcbcrConversionANDROID(ycbcr_conversion);
4996 }
4997
4998 auto ycbcr_state = GetSamplerYcbcrConversionState(ycbcr_conversion);
4999 ycbcr_state->destroyed = true;
5000 samplerYcbcrConversionMap.erase(ycbcr_conversion);
5001}
5002
locke-lunargd556cc32019-09-17 01:21:23 -06005003void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
5004 const VkAllocationCallbacks *pAllocator) {
5005 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005006 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005007}
5008
5009void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
5010 VkSamplerYcbcrConversion ycbcrConversion,
5011 const VkAllocationCallbacks *pAllocator) {
5012 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005013 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005014}
5015
Tony-LunarG977448c2019-12-02 14:52:02 -07005016void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5017 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005018 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07005019 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06005020
5021 // Do nothing if the query pool has been destroyed.
5022 auto query_pool_state = GetQueryPoolState(queryPool);
5023 if (!query_pool_state) return;
5024
5025 // Reset the state of existing entries.
5026 QueryObject query_obj{queryPool, 0};
5027 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
5028 for (uint32_t i = 0; i < max_query_count; ++i) {
5029 query_obj.query = firstQuery + i;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005030 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005031 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
5032 for (uint32_t passIndex = 0; passIndex < query_pool_state->n_performance_passes; passIndex++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005033 query_obj.perf_pass = passIndex;
5034 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005035 }
5036 }
locke-lunargd556cc32019-09-17 01:21:23 -06005037 }
5038}
5039
Tony-LunarG977448c2019-12-02 14:52:02 -07005040void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5041 uint32_t queryCount) {
5042 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5043}
5044
5045void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5046 uint32_t queryCount) {
5047 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5048}
5049
locke-lunargd556cc32019-09-17 01:21:23 -06005050void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
5051 const TEMPLATE_STATE *template_state, const void *pData) {
5052 // Translate the templated update into a normal update for validation...
5053 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
5054 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
5055 decoded_update.desc_writes.data(), 0, NULL);
5056}
5057
5058// Update the common AllocateDescriptorSetsData
5059void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005060 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005061 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05005062 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06005063 if (layout) {
5064 ds_data->layout_nodes[i] = layout;
5065 // Count total descriptors required per type
5066 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
5067 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
5068 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
5069 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
5070 }
5071 }
5072 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
5073 }
5074}
5075
5076// Decrement allocated sets from the pool and insert new sets into set_map
5077void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
5078 const VkDescriptorSet *descriptor_sets,
5079 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
5080 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
5081 // Account for sets and individual descriptors allocated from pool
5082 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
5083 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
5084 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
5085 }
5086
5087 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
5088 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
5089
5090 // Create tracking object for each descriptor set; insert into global map and the pool's set.
5091 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
5092 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
5093
Jeff Bolz41a1ced2019-10-11 11:40:49 -05005094 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07005095 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06005096 pool_state->sets.insert(new_ds.get());
5097 new_ds->in_use.store(0);
5098 setMap[descriptor_sets[i]] = std::move(new_ds);
5099 }
5100}
5101
5102// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
5103void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
5104 UpdateDrawState(cb_state, bind_point);
5105 cb_state->hasDispatchCmd = true;
5106}
5107
locke-lunargd556cc32019-09-17 01:21:23 -06005108// Generic function to handle state update for all CmdDraw* type functions
5109void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
5110 UpdateStateCmdDrawDispatchType(cb_state, bind_point);
locke-lunargd556cc32019-09-17 01:21:23 -06005111 cb_state->hasDrawCmd = true;
5112}
5113
5114void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
5115 uint32_t firstVertex, uint32_t firstInstance) {
5116 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5117 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5118}
5119
5120void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
5121 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
5122 uint32_t firstInstance) {
5123 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5124 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5125}
5126
5127void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5128 uint32_t count, uint32_t stride) {
5129 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5130 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5131 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5132 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5133}
5134
5135void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5136 VkDeviceSize offset, uint32_t count, uint32_t stride) {
5137 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5138 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5139 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5140 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5141}
5142
5143void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
5144 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5145 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
5146}
5147
5148void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5149 VkDeviceSize offset) {
5150 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5151 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
5152 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5153 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5154}
5155
Tony-LunarG977448c2019-12-02 14:52:02 -07005156void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5157 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5158 uint32_t stride) {
5159 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5160 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5161 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
5162 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5163 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5164 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5165}
5166
locke-lunargd556cc32019-09-17 01:21:23 -06005167void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5168 VkDeviceSize offset, VkBuffer countBuffer,
5169 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5170 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005171 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
5172}
5173
5174void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5175 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5176 uint32_t maxDrawCount, uint32_t stride) {
5177 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
5178}
5179
5180void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5181 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5182 uint32_t maxDrawCount, uint32_t stride) {
locke-lunargd556cc32019-09-17 01:21:23 -06005183 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5184 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5185 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
5186 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5187 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5188 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5189}
5190
5191void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5192 VkDeviceSize offset, VkBuffer countBuffer,
5193 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5194 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005195 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
5196}
5197
5198void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
5199 VkDeviceSize offset, VkBuffer countBuffer,
5200 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5201 uint32_t stride) {
5202 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
locke-lunargd556cc32019-09-17 01:21:23 -06005203}
5204
5205void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
5206 uint32_t firstTask) {
5207 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5208 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5209}
5210
5211void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5212 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
5213 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5214 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5215 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5216 if (buffer_state) {
5217 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5218 }
5219}
5220
5221void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5222 VkDeviceSize offset, VkBuffer countBuffer,
5223 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5224 uint32_t stride) {
5225 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5226 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5227 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
5228 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5229 if (buffer_state) {
5230 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5231 }
5232 if (count_buffer_state) {
5233 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5234 }
5235}
5236
5237void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
5238 const VkAllocationCallbacks *pAllocator,
5239 VkShaderModule *pShaderModule, VkResult result,
5240 void *csm_state_data) {
5241 if (VK_SUCCESS != result) return;
5242 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
5243
5244 spv_target_env spirv_environment = ((api_version >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
5245 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005246 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
5247 csm_state->unique_shader_id)
5248 : std::make_shared<SHADER_MODULE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06005249 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
5250}
5251
5252void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005253 PIPELINE_STATE::StageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005254 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
5255 auto module = GetShaderModuleState(pStage->module);
5256 if (!module->has_valid_spirv) return;
5257
5258 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
5259 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
5260 if (entrypoint == module->end()) return;
5261
5262 // Mark accessible ids
5263 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
5264 ProcessExecutionModes(module, entrypoint, pipeline);
5265
5266 stage_state->descriptor_uses =
Mark Lobodzinskid8d658e2020-01-30 15:05:51 -07005267 CollectInterfaceByDescriptorSlot(module, stage_state->accessible_ids, &stage_state->has_writable_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06005268 // Capture descriptor uses for the pipeline
5269 for (auto use : stage_state->descriptor_uses) {
5270 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06005271 const uint32_t slot = use.first.first;
5272 auto &reqs = pipeline->active_slots[slot][use.first.second];
locke-lunargd556cc32019-09-17 01:21:23 -06005273 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
John Zulauf649edd52019-10-02 14:39:41 -06005274 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunargd556cc32019-09-17 01:21:23 -06005275 }
5276}
5277
5278void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
5279 if (cb_state == nullptr) {
5280 return;
5281 }
5282
5283 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
5284 if (pipeline_layout_state == nullptr) {
5285 return;
5286 }
5287
5288 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
5289 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
5290 cb_state->push_constant_data.clear();
5291 uint32_t size_needed = 0;
5292 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
5293 size_needed = std::max(size_needed, (push_constant_range.offset + push_constant_range.size));
5294 }
5295 cb_state->push_constant_data.resize(size_needed, 0);
5296 }
5297}
John Zulauf22b0fbe2019-10-15 06:26:16 -06005298
5299void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
5300 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
5301 VkResult result) {
5302 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
5303 auto swapchain_state = GetSwapchainState(swapchain);
5304
5305 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
5306
5307 if (pSwapchainImages) {
5308 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_DETAILS) {
5309 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_DETAILS;
5310 }
5311 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
locke-lunargb3584732019-10-28 20:18:36 -06005312 if (swapchain_state->images[i].image != VK_NULL_HANDLE) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06005313
5314 // Add imageMap entries for each swapchain image
5315 VkImageCreateInfo image_ci;
5316 image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -06005317 image_ci.pNext = nullptr; // to be set later
5318 image_ci.flags = 0; // to be updated below
John Zulauf22b0fbe2019-10-15 06:26:16 -06005319 image_ci.imageType = VK_IMAGE_TYPE_2D;
5320 image_ci.format = swapchain_state->createInfo.imageFormat;
5321 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
5322 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
5323 image_ci.extent.depth = 1;
5324 image_ci.mipLevels = 1;
5325 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
5326 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
5327 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
5328 image_ci.usage = swapchain_state->createInfo.imageUsage;
5329 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
5330 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
5331 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
5332 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5333
5334 image_ci.pNext = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(swapchain_state->createInfo.pNext);
5335
5336 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR)
5337 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
5338 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)
5339 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
5340 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
5341 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR);
5342
5343 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(pSwapchainImages[i], &image_ci);
5344 auto &image_state = imageMap[pSwapchainImages[i]];
5345 image_state->valid = false;
5346 image_state->create_from_swapchain = swapchain;
5347 image_state->bind_swapchain = swapchain;
5348 image_state->bind_swapchain_imageIndex = i;
Tony-LunarGe64e4fe2020-02-17 16:21:55 -07005349 image_state->is_swapchain_image = true;
locke-lunargb3584732019-10-28 20:18:36 -06005350 swapchain_state->images[i].image = pSwapchainImages[i];
5351 swapchain_state->images[i].bound_images.emplace(pSwapchainImages[i]);
Petr Kraus44f1c482020-04-25 20:09:25 +02005352
5353 AddImageStateProps(*image_state, device, physical_device);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005354 }
5355 }
5356
5357 if (*pSwapchainImageCount) {
5358 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_COUNT) {
5359 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_COUNT;
5360 }
5361 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
5362 }
5363}
sourav parmar35e7a002020-06-09 17:58:44 -07005364
5365void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureKHR(
5366 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
5367 const VkAccelerationStructureBuildOffsetInfoKHR *const *ppOffsetInfos) {
5368 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5369 if (cb_state == nullptr) {
5370 return;
5371 }
5372 for (uint32_t i = 0; i < infoCount; ++i) {
5373 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(pInfos[i].dstAccelerationStructure);
5374 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(pInfos[i].srcAccelerationStructure);
5375 if (dst_as_state != nullptr) {
5376 dst_as_state->built = true;
5377 dst_as_state->build_info_khr.initialize(pInfos);
5378 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
5379 }
5380 if (src_as_state != nullptr) {
5381 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
5382 }
5383 }
5384 cb_state->hasBuildAccelerationStructureCmd = true;
5385}
5386
5387void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer,
5388 const VkCopyAccelerationStructureInfoKHR *pInfo) {
5389 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5390 if (cb_state) {
5391 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(pInfo->src);
5392 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(pInfo->dst);
5393 if (dst_as_state != nullptr && src_as_state != nullptr) {
5394 dst_as_state->built = true;
5395 dst_as_state->build_info_khr = src_as_state->build_info_khr;
5396 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
5397 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
5398 }
5399 }
5400}