blob: 4039fa9f625dfc79596c0ce079b897010986f2c5 [file] [log] [blame]
Tony-LunarG73719992020-01-15 10:20:28 -07001/* Copyright (c) 2015-2020 The Khronos Group Inc.
2 * Copyright (c) 2015-2020 Valve Corporation
3 * Copyright (c) 2015-2020 LunarG, Inc.
4 * Copyright (C) 2015-2020 Google Inc.
locke-lunargd556cc32019-09-17 01:21:23 -06005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
locke-lunargd556cc32019-09-17 01:21:23 -060023#include <cmath>
24#include <set>
25#include <sstream>
26#include <string>
27
28#include "vk_enum_string_helper.h"
29#include "vk_format_utils.h"
30#include "vk_layer_data.h"
31#include "vk_layer_utils.h"
32#include "vk_layer_logging.h"
33#include "vk_typemap_helper.h"
34
35#include "chassis.h"
36#include "state_tracker.h"
37#include "shader_validation.h"
38
39using std::max;
40using std::string;
41using std::stringstream;
42using std::unique_ptr;
43using std::unordered_map;
44using std::unordered_set;
45using std::vector;
46
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -060047void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
48 if (add_obj) {
49 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
50 // Call base class
51 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
52 }
53}
54
locke-lunargd556cc32019-09-17 01:21:23 -060055#ifdef VK_USE_PLATFORM_ANDROID_KHR
56// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
57// This could also move into a seperate core_validation_android.cpp file... ?
58
59void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
60 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
61 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
Spencer Fricke224c9852020-04-06 07:47:29 -070062 is_node->external_ahb = true;
locke-lunargd556cc32019-09-17 01:21:23 -060063 }
64 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
65 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
66 is_node->has_ahb_format = true;
67 is_node->ahb_format = ext_fmt_android->externalFormat;
Spencer Fricke6bba8c72020-04-06 07:41:21 -070068 // VUID 01894 will catch if not found in map
69 auto it = ahb_ext_formats_map.find(ext_fmt_android->externalFormat);
70 if (it != ahb_ext_formats_map.end()) {
71 is_node->format_features = it->second;
72 }
locke-lunargd556cc32019-09-17 01:21:23 -060073 }
74}
75
76void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -070077 VkSamplerYcbcrConversion ycbcr_conversion,
78 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state) {
locke-lunargd556cc32019-09-17 01:21:23 -060079 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
80 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
81 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
sfricke-samsungbe3584f2020-04-22 14:58:06 -070082 // VUID 01894 will catch if not found in map
83 auto it = ahb_ext_formats_map.find(ext_format_android->externalFormat);
84 if (it != ahb_ext_formats_map.end()) {
85 ycbcr_state->format_features = it->second;
86 }
locke-lunargd556cc32019-09-17 01:21:23 -060087 }
88};
89
90void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
91 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
92};
93
Spencer Fricke6bba8c72020-04-06 07:41:21 -070094void ValidationStateTracker::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
95 VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties, VkResult result) {
96 if (VK_SUCCESS != result) return;
97 auto ahb_format_props = lvl_find_in_chain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
98 if (ahb_format_props) {
99 ahb_ext_formats_map.insert({ahb_format_props->externalFormat, ahb_format_props->formatFeatures});
100 }
101}
102
locke-lunargd556cc32019-09-17 01:21:23 -0600103#else
104
105void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
106
107void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700108 VkSamplerYcbcrConversion ycbcr_conversion,
109 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state){};
locke-lunargd556cc32019-09-17 01:21:23 -0600110
111void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
112
113#endif // VK_USE_PLATFORM_ANDROID_KHR
114
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -0600115std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPipelineLayout(PIPELINE_LAYOUT_STATE const *layout_data,
116 uint32_t set) {
117 std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> dsl = nullptr;
118 if (layout_data && (set < layout_data->set_layouts.size())) {
119 dsl = layout_data->set_layouts[set];
120 }
121 return dsl;
122}
123
Petr Kraus44f1c482020-04-25 20:09:25 +0200124void AddImageStateProps(IMAGE_STATE &image_state, const VkDevice device, const VkPhysicalDevice physical_device) {
125 // Add feature support according to Image Format Features (vkspec.html#resources-image-format-features)
126 // if format is AHB external format then the features are already set
127 if (image_state.has_ahb_format == false) {
128 const VkImageTiling image_tiling = image_state.createInfo.tiling;
129 const VkFormat image_format = image_state.createInfo.format;
130 if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
131 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {
132 VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, nullptr};
133 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state.image, &drm_format_properties);
134
135 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
136 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
137 nullptr};
138 format_properties_2.pNext = (void *)&drm_properties_list;
139 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
140
141 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
142 if ((drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier &
143 drm_format_properties.drmFormatModifier) != 0) {
144 image_state.format_features |=
145 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
146 }
147 }
148 } else {
149 VkFormatProperties format_properties;
150 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_format, &format_properties);
151 image_state.format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
152 : format_properties.optimalTilingFeatures;
153 }
154 }
155}
156
locke-lunargd556cc32019-09-17 01:21:23 -0600157void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
158 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
159 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500160 auto is_node = std::make_shared<IMAGE_STATE>(*pImage, pCreateInfo);
sfricke-samsung71bc6572020-04-29 15:49:43 -0700161 is_node->disjoint = ((pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT) != 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600162 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
163 RecordCreateImageANDROID(pCreateInfo, is_node.get());
164 }
165 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
166 if (swapchain_info) {
167 is_node->create_from_swapchain = swapchain_info->swapchain;
168 }
169
locke-lunargd556cc32019-09-17 01:21:23 -0600170 // Record the memory requirements in case they won't be queried
Spencer Fricke224c9852020-04-06 07:47:29 -0700171 // External AHB memory can't be quired until after memory is bound
172 if (is_node->external_ahb == false) {
sfricke-samsung71bc6572020-04-29 15:49:43 -0700173 if (is_node->disjoint == false) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700174 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
175 } else {
176 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
177 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
178 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
179 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
180 mem_req_info2.pNext = &image_plane_req;
181 mem_req_info2.image = *pImage;
182
183 assert(plane_count != 0); // assumes each format has at least first plane
184 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
185 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
186 is_node->plane0_requirements = mem_reqs2.memoryRequirements;
187
188 if (plane_count >= 2) {
189 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
190 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
191 is_node->plane1_requirements = mem_reqs2.memoryRequirements;
192 }
193 if (plane_count >= 3) {
194 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
195 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
196 is_node->plane2_requirements = mem_reqs2.memoryRequirements;
197 }
198 }
locke-lunargd556cc32019-09-17 01:21:23 -0600199 }
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700200
Petr Kraus44f1c482020-04-25 20:09:25 +0200201 AddImageStateProps(*is_node, device, physical_device);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700202
locke-lunargd556cc32019-09-17 01:21:23 -0600203 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
204}
205
206void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
207 if (!image) return;
208 IMAGE_STATE *image_state = GetImageState(image);
209 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
210 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
211 // Clean up memory mapping, bindings and range references for image
212 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700213 RemoveImageMemoryRange(image, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600214 }
215 if (image_state->bind_swapchain) {
216 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
217 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -0600218 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -0600219 }
220 }
221 RemoveAliasingImage(image_state);
222 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500223 image_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600224 // Remove image from imageMap
225 imageMap.erase(image);
226}
227
228void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
229 VkImageLayout imageLayout, const VkClearColorValue *pColor,
230 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
231 auto cb_node = GetCBState(commandBuffer);
232 auto image_state = GetImageState(image);
233 if (cb_node && image_state) {
234 AddCommandBufferBindingImage(cb_node, image_state);
235 }
236}
237
238void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
239 VkImageLayout imageLayout,
240 const VkClearDepthStencilValue *pDepthStencil,
241 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
242 auto cb_node = GetCBState(commandBuffer);
243 auto image_state = GetImageState(image);
244 if (cb_node && image_state) {
245 AddCommandBufferBindingImage(cb_node, image_state);
246 }
247}
248
249void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
250 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
251 uint32_t regionCount, const VkImageCopy *pRegions) {
252 auto cb_node = GetCBState(commandBuffer);
253 auto src_image_state = GetImageState(srcImage);
254 auto dst_image_state = GetImageState(dstImage);
255
256 // Update bindings between images and cmd buffer
257 AddCommandBufferBindingImage(cb_node, src_image_state);
258 AddCommandBufferBindingImage(cb_node, dst_image_state);
259}
260
261void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
262 VkImageLayout srcImageLayout, VkImage dstImage,
263 VkImageLayout dstImageLayout, uint32_t regionCount,
264 const VkImageResolve *pRegions) {
265 auto cb_node = GetCBState(commandBuffer);
266 auto src_image_state = GetImageState(srcImage);
267 auto dst_image_state = GetImageState(dstImage);
268
269 // Update bindings between images and cmd buffer
270 AddCommandBufferBindingImage(cb_node, src_image_state);
271 AddCommandBufferBindingImage(cb_node, dst_image_state);
272}
273
274void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
275 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
276 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
277 auto cb_node = GetCBState(commandBuffer);
278 auto src_image_state = GetImageState(srcImage);
279 auto dst_image_state = GetImageState(dstImage);
280
281 // Update bindings between images and cmd buffer
282 AddCommandBufferBindingImage(cb_node, src_image_state);
283 AddCommandBufferBindingImage(cb_node, dst_image_state);
284}
285
286void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
287 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
288 VkResult result) {
289 if (result != VK_SUCCESS) return;
290 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500291 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600292
293 // Get a set of requirements in the case the app does not
294 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
295
296 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
297}
298
299void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
300 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
301 VkResult result) {
302 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500303 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
304 bufferViewMap[*pView] = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600305}
306
307void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
308 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
309 VkResult result) {
310 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500311 auto image_state = GetImageShared(pCreateInfo->image);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700312 auto image_view_state = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
313
314 // Add feature support according to Image View Format Features (vkspec.html#resources-image-view-format-features)
315 const VkImageTiling image_tiling = image_state->createInfo.tiling;
316 const VkFormat image_view_format = pCreateInfo->format;
317 if (image_state->has_ahb_format == true) {
318 // The ImageView uses same Image's format feature since they share same AHB
319 image_view_state->format_features = image_state->format_features;
320 } else if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
321 // Parameter validation should catch if this is used without VK_EXT_image_drm_format_modifier
322 assert(device_extensions.vk_ext_image_drm_format_modifier);
323 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
324 nullptr};
325 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state->image, &drm_format_properties);
326
327 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
328 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
329 nullptr};
330 format_properties_2.pNext = (void *)&drm_properties_list;
331 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
332
333 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
334 if ((drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier & drm_format_properties.drmFormatModifier) !=
335 0) {
336 image_view_state->format_features |=
337 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
338 }
339 }
340 } else {
341 VkFormatProperties format_properties;
342 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_view_format, &format_properties);
343 image_view_state->format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
344 : format_properties.optimalTilingFeatures;
345 }
346
347 imageViewMap.insert(std::make_pair(*pView, std::move(image_view_state)));
locke-lunargd556cc32019-09-17 01:21:23 -0600348}
349
350void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
351 uint32_t regionCount, const VkBufferCopy *pRegions) {
352 auto cb_node = GetCBState(commandBuffer);
353 auto src_buffer_state = GetBufferState(srcBuffer);
354 auto dst_buffer_state = GetBufferState(dstBuffer);
355
356 // Update bindings between buffers and cmd buffer
357 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
358 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
359}
360
361void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
362 const VkAllocationCallbacks *pAllocator) {
363 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
364 if (!image_view_state) return;
365 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
366
367 // Any bound cmd buffers are now invalid
368 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500369 image_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600370 imageViewMap.erase(imageView);
371}
372
373void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
374 if (!buffer) return;
375 auto buffer_state = GetBufferState(buffer);
376 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
377
378 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
379 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700380 RemoveBufferMemoryRange(buffer, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600381 }
382 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500383 buffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600384 bufferMap.erase(buffer_state->buffer);
385}
386
387void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
388 const VkAllocationCallbacks *pAllocator) {
389 if (!bufferView) return;
390 auto buffer_view_state = GetBufferViewState(bufferView);
391 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
392
393 // Any bound cmd buffers are now invalid
394 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500395 buffer_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600396 bufferViewMap.erase(bufferView);
397}
398
399void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
400 VkDeviceSize size, uint32_t data) {
401 auto cb_node = GetCBState(commandBuffer);
402 auto buffer_state = GetBufferState(dstBuffer);
403 // Update bindings between buffer and cmd buffer
404 AddCommandBufferBindingBuffer(cb_node, buffer_state);
405}
406
407void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
408 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
409 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
410 auto cb_node = GetCBState(commandBuffer);
411 auto src_image_state = GetImageState(srcImage);
412 auto dst_buffer_state = GetBufferState(dstBuffer);
413
414 // Update bindings between buffer/image and cmd buffer
415 AddCommandBufferBindingImage(cb_node, src_image_state);
416 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
417}
418
419void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
420 VkImageLayout dstImageLayout, uint32_t regionCount,
421 const VkBufferImageCopy *pRegions) {
422 auto cb_node = GetCBState(commandBuffer);
423 auto src_buffer_state = GetBufferState(srcBuffer);
424 auto dst_image_state = GetImageState(dstImage);
425
426 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
427 AddCommandBufferBindingImage(cb_node, dst_image_state);
428}
429
430// Get the image viewstate for a given framebuffer attachment
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300431IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(CMD_BUFFER_STATE *cb, FRAMEBUFFER_STATE *framebuffer,
432 uint32_t index) {
433 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) {
434 assert(index < cb->imagelessFramebufferAttachments.size());
435 return cb->imagelessFramebufferAttachments[index];
436 }
locke-lunargd556cc32019-09-17 01:21:23 -0600437 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
438 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
439 return GetImageViewState(image_view);
440}
441
442// Get the image viewstate for a given framebuffer attachment
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300443const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const CMD_BUFFER_STATE *cb,
444 const FRAMEBUFFER_STATE *framebuffer,
locke-lunargd556cc32019-09-17 01:21:23 -0600445 uint32_t index) const {
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300446 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) {
447 assert(index < cb->imagelessFramebufferAttachments.size());
448 return cb->imagelessFramebufferAttachments[index];
449 }
locke-lunargd556cc32019-09-17 01:21:23 -0600450 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
451 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
452 return GetImageViewState(image_view);
453}
454
455void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
locke-lunargd556cc32019-09-17 01:21:23 -0600456 std::unordered_set<VkImage> *bound_images = nullptr;
457
locke-lunargb3584732019-10-28 20:18:36 -0600458 if (image_state->bind_swapchain) {
459 auto swapchain_state = GetSwapchainState(image_state->bind_swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -0600460 if (swapchain_state) {
locke-lunargb3584732019-10-28 20:18:36 -0600461 bound_images = &swapchain_state->images[image_state->bind_swapchain_imageIndex].bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600462 }
463 } else {
locke-lunargcf04d582019-11-26 00:31:50 -0700464 if (image_state->binding.mem_state) {
465 bound_images = &image_state->binding.mem_state->bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600466 }
467 }
468
469 if (bound_images) {
470 for (const auto &handle : *bound_images) {
471 if (handle != image_state->image) {
472 auto is = GetImageState(handle);
473 if (is && is->IsCompatibleAliasing(image_state)) {
474 auto inserted = is->aliasing_images.emplace(image_state->image);
475 if (inserted.second) {
476 image_state->aliasing_images.emplace(handle);
477 }
478 }
479 }
480 }
481 }
482}
483
484void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
485 for (const auto &image : image_state->aliasing_images) {
486 auto is = GetImageState(image);
487 if (is) {
488 is->aliasing_images.erase(image_state->image);
489 }
490 }
491 image_state->aliasing_images.clear();
492}
493
494void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
495 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
496 // reference. It doesn't need two ways clear.
497 for (const auto &handle : bound_images) {
498 auto is = GetImageState(handle);
499 if (is) {
500 is->aliasing_images.clear();
501 }
502 }
503}
504
Jeff Bolz310775c2019-10-09 00:46:33 -0500505const EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) const {
506 auto it = eventMap.find(event);
507 if (it == eventMap.end()) {
508 return nullptr;
509 }
510 return &it->second;
511}
512
locke-lunargd556cc32019-09-17 01:21:23 -0600513EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
514 auto it = eventMap.find(event);
515 if (it == eventMap.end()) {
516 return nullptr;
517 }
518 return &it->second;
519}
520
521const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
522 auto it = queueMap.find(queue);
523 if (it == queueMap.cend()) {
524 return nullptr;
525 }
526 return &it->second;
527}
528
529QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
530 auto it = queueMap.find(queue);
531 if (it == queueMap.end()) {
532 return nullptr;
533 }
534 return &it->second;
535}
536
537const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
538 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
539 auto it = phys_dev_map->find(phys);
540 if (it == phys_dev_map->end()) {
541 return nullptr;
542 }
543 return &it->second;
544}
545
546PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
547 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
548 auto it = phys_dev_map->find(phys);
549 if (it == phys_dev_map->end()) {
550 return nullptr;
551 }
552 return &it->second;
553}
554
555PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
556const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
557
558// Return ptr to memory binding for given handle of specified type
559template <typename State, typename Result>
560static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
561 switch (typed_handle.type) {
562 case kVulkanObjectTypeImage:
563 return state->GetImageState(typed_handle.Cast<VkImage>());
564 case kVulkanObjectTypeBuffer:
565 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
566 case kVulkanObjectTypeAccelerationStructureNV:
567 return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
568 default:
569 break;
570 }
571 return nullptr;
572}
573
574const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
575 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
576}
577
578BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
579 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
580}
581
582void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
583 assert(object != NULL);
584
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500585 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo);
586 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600587
588 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
589 if (dedicated) {
590 mem_info->is_dedicated = true;
591 mem_info->dedicated_buffer = dedicated->buffer;
592 mem_info->dedicated_image = dedicated->image;
593 }
594 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
595 if (export_info) {
596 mem_info->is_export = true;
597 mem_info->export_handle_type_flags = export_info->handleTypes;
598 }
599}
600
601// Create binding link between given sampler and command buffer node
602void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
603 if (disabled.command_buffer_state) {
604 return;
605 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500606 AddCommandBufferBinding(sampler_state->cb_bindings,
607 VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600608}
609
610// Create binding link between given image node and command buffer node
611void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
612 if (disabled.command_buffer_state) {
613 return;
614 }
615 // Skip validation if this image was created through WSI
616 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
617 // First update cb binding for image
Jeff Bolzadbfa852019-10-04 13:53:30 -0500618 if (AddCommandBufferBinding(image_state->cb_bindings,
619 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600620 // Now update CB binding in MemObj mini CB list
621 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700622 // Now update CBInfo's Mem reference list
623 AddCommandBufferBinding(mem_binding->cb_bindings,
624 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600625 }
626 }
627 }
628}
629
630// Create binding link between given image view node and its image with command buffer node
631void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
632 if (disabled.command_buffer_state) {
633 return;
634 }
635 // First add bindings for imageView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500636 if (AddCommandBufferBinding(view_state->cb_bindings,
637 VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600638 // Only need to continue if this is a new item
Jeff Bolzadbfa852019-10-04 13:53:30 -0500639 auto image_state = view_state->image_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600640 // Add bindings for image within imageView
641 if (image_state) {
642 AddCommandBufferBindingImage(cb_node, image_state);
643 }
644 }
645}
646
647// Create binding link between given buffer node and command buffer node
648void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
649 if (disabled.command_buffer_state) {
650 return;
651 }
652 // First update cb binding for buffer
Jeff Bolzadbfa852019-10-04 13:53:30 -0500653 if (AddCommandBufferBinding(buffer_state->cb_bindings,
654 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600655 // Now update CB binding in MemObj mini CB list
656 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700657 // Now update CBInfo's Mem reference list
658 AddCommandBufferBinding(mem_binding->cb_bindings,
659 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600660 }
661 }
662}
663
664// Create binding link between given buffer view node and its buffer with command buffer node
665void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
666 if (disabled.command_buffer_state) {
667 return;
668 }
669 // First add bindings for bufferView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500670 if (AddCommandBufferBinding(view_state->cb_bindings,
671 VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
672 auto buffer_state = view_state->buffer_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600673 // Add bindings for buffer within bufferView
674 if (buffer_state) {
675 AddCommandBufferBindingBuffer(cb_node, buffer_state);
676 }
677 }
678}
679
680// Create binding link between given acceleration structure and command buffer node
681void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
682 ACCELERATION_STRUCTURE_STATE *as_state) {
683 if (disabled.command_buffer_state) {
684 return;
685 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500686 if (AddCommandBufferBinding(
687 as_state->cb_bindings,
688 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600689 // Now update CB binding in MemObj mini CB list
690 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700691 // Now update CBInfo's Mem reference list
692 AddCommandBufferBinding(mem_binding->cb_bindings,
693 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600694 }
695 }
696}
697
locke-lunargd556cc32019-09-17 01:21:23 -0600698// Clear a single object binding from given memory object
locke-lunarg5f59e782019-12-19 10:32:23 -0700699void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
locke-lunargd556cc32019-09-17 01:21:23 -0600700 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
701 if (mem_info) {
702 mem_info->obj_bindings.erase(typed_handle);
703 }
704}
705
706// ClearMemoryObjectBindings clears the binding of objects to memory
707// For the given object it pulls the memory bindings and makes sure that the bindings
708// no longer refer to the object being cleared. This occurs when objects are destroyed.
709void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
710 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
711 if (mem_binding) {
712 if (!mem_binding->sparse) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700713 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600714 } else { // Sparse, clear all bindings
715 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700716 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600717 }
718 }
719 }
720}
721
722// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
723// Corresponding valid usage checks are in ValidateSetMemBinding().
724void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
725 const VulkanTypedHandle &typed_handle) {
726 assert(mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600727
728 if (mem != VK_NULL_HANDLE) {
locke-lunargcf04d582019-11-26 00:31:50 -0700729 mem_binding->binding.mem_state = GetShared<DEVICE_MEMORY_STATE>(mem);
730 if (mem_binding->binding.mem_state) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700731 mem_binding->binding.offset = memory_offset;
732 mem_binding->binding.size = mem_binding->requirements.size;
locke-lunargcf04d582019-11-26 00:31:50 -0700733 mem_binding->binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -0600734 // For image objects, make sure default memory state is correctly set
735 // TODO : What's the best/correct way to handle this?
736 if (kVulkanObjectTypeImage == typed_handle.type) {
737 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
738 if (image_state) {
739 VkImageCreateInfo ici = image_state->createInfo;
740 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
741 // TODO:: More memory state transition stuff.
742 }
743 }
744 }
locke-lunargcf04d582019-11-26 00:31:50 -0700745 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
locke-lunargd556cc32019-09-17 01:21:23 -0600746 }
747 }
748}
749
750// For NULL mem case, clear any previous binding Else...
751// Make sure given object is in its object map
752// IF a previous binding existed, update binding
753// Add reference from objectInfo to memoryInfo
754// Add reference off of object's binding info
755// Return VK_TRUE if addition is successful, VK_FALSE otherwise
locke-lunargcf04d582019-11-26 00:31:50 -0700756bool ValidationStateTracker::SetSparseMemBinding(const VkDeviceMemory mem, const VkDeviceSize mem_offset,
757 const VkDeviceSize mem_size, const VulkanTypedHandle &typed_handle) {
locke-lunargd556cc32019-09-17 01:21:23 -0600758 bool skip = VK_FALSE;
759 // Handle NULL case separately, just clear previous binding & decrement reference
locke-lunargcf04d582019-11-26 00:31:50 -0700760 if (mem == VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -0600761 // TODO : This should cause the range of the resource to be unbound according to spec
762 } else {
763 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
764 assert(mem_binding);
765 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
766 assert(mem_binding->sparse);
locke-lunargcf04d582019-11-26 00:31:50 -0700767 MEM_BINDING binding = {GetShared<DEVICE_MEMORY_STATE>(mem), mem_offset, mem_size};
768 if (binding.mem_state) {
769 binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -0600770 // Need to set mem binding for this object
771 mem_binding->sparse_bindings.insert(binding);
772 mem_binding->UpdateBoundMemorySet();
773 }
774 }
775 }
776 return skip;
777}
778
locke-lunargd556cc32019-09-17 01:21:23 -0600779void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, const VkPipelineBindPoint bind_point) {
780 auto &state = cb_state->lastBound[bind_point];
781 PIPELINE_STATE *pPipe = state.pipeline_state;
782 if (VK_NULL_HANDLE != state.pipeline_layout) {
783 for (const auto &set_binding_pair : pPipe->active_slots) {
784 uint32_t setIndex = set_binding_pair.first;
785 // Pull the set node
786 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
787 if (!descriptor_set->IsPushDescriptor()) {
788 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
789
790 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
791 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
792 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
793 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
794
795 if (reduced_map.IsManyDescriptors()) {
796 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
797 descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
798 }
799
800 // We can skip updating the state if "nothing" has changed since the last validation.
801 // See CoreChecks::ValidateCmdBufDrawState for more details.
Jeff Bolz56308942019-10-06 22:05:23 -0500802 bool descriptor_set_changed =
locke-lunargd556cc32019-09-17 01:21:23 -0600803 !reduced_map.IsManyDescriptors() ||
804 // Update if descriptor set (or contents) has changed
805 state.per_set[setIndex].validated_set != descriptor_set ||
806 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
807 (!disabled.image_layout_validation &&
Jeff Bolz56308942019-10-06 22:05:23 -0500808 state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
809 bool need_update = descriptor_set_changed ||
810 // Update if previous bindingReqMap doesn't include new bindingReqMap
811 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
812 state.per_set[setIndex].validated_set_binding_req_map.end(),
813 binding_req_map.begin(), binding_req_map.end());
locke-lunargd556cc32019-09-17 01:21:23 -0600814
815 if (need_update) {
816 // Bind this set and its active descriptor resources to the command buffer
Jeff Bolz56308942019-10-06 22:05:23 -0500817 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
818 // Only record the bindings that haven't already been recorded
819 BindingReqMap delta_reqs;
820 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
821 state.per_set[setIndex].validated_set_binding_req_map.begin(),
822 state.per_set[setIndex].validated_set_binding_req_map.end(),
823 std::inserter(delta_reqs, delta_reqs.begin()));
locke-gb3ce08f2019-09-30 12:30:56 -0600824 descriptor_set->UpdateDrawState(this, cb_state, pPipe, delta_reqs);
Jeff Bolz56308942019-10-06 22:05:23 -0500825 } else {
locke-gb3ce08f2019-09-30 12:30:56 -0600826 descriptor_set->UpdateDrawState(this, cb_state, pPipe, binding_req_map);
Jeff Bolz56308942019-10-06 22:05:23 -0500827 }
locke-lunargd556cc32019-09-17 01:21:23 -0600828
829 state.per_set[setIndex].validated_set = descriptor_set;
830 state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
831 state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
832 if (reduced_map.IsManyDescriptors()) {
833 // Check whether old == new before assigning, the equality check is much cheaper than
834 // freeing and reallocating the map.
835 if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
836 state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
837 }
838 } else {
839 state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
840 }
841 }
842 }
843 }
844 }
845 if (!pPipe->vertex_binding_descriptions_.empty()) {
846 cb_state->vertex_buffer_used = true;
847 }
848}
849
850// Remove set from setMap and delete the set
851void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500852 descriptor_set->destroyed = true;
Jeff Bolzadbfa852019-10-04 13:53:30 -0500853 const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500854 // Any bound cmd buffers are now invalid
Jeff Bolzadbfa852019-10-04 13:53:30 -0500855 InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500856
locke-lunargd556cc32019-09-17 01:21:23 -0600857 setMap.erase(descriptor_set->GetSet());
858}
859
860// Free all DS Pools including their Sets & related sub-structs
861// NOTE : Calls to this function should be wrapped in mutex
862void ValidationStateTracker::DeleteDescriptorSetPools() {
863 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
864 // Remove this pools' sets from setMap and delete them
865 for (auto ds : ii->second->sets) {
866 FreeDescriptorSet(ds);
867 }
868 ii->second->sets.clear();
869 ii = descriptorPoolMap.erase(ii);
870 }
871}
872
873// For given object struct return a ptr of BASE_NODE type for its wrapping struct
874BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500875 if (object_struct.node) {
876#ifdef _DEBUG
877 // assert that lookup would find the same object
878 VulkanTypedHandle other = object_struct;
879 other.node = nullptr;
880 assert(object_struct.node == GetStateStructPtrFromObject(other));
881#endif
882 return object_struct.node;
883 }
locke-lunargd556cc32019-09-17 01:21:23 -0600884 BASE_NODE *base_ptr = nullptr;
885 switch (object_struct.type) {
886 case kVulkanObjectTypeDescriptorSet: {
887 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
888 break;
889 }
890 case kVulkanObjectTypeSampler: {
891 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
892 break;
893 }
894 case kVulkanObjectTypeQueryPool: {
895 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
896 break;
897 }
898 case kVulkanObjectTypePipeline: {
899 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
900 break;
901 }
902 case kVulkanObjectTypeBuffer: {
903 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
904 break;
905 }
906 case kVulkanObjectTypeBufferView: {
907 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
908 break;
909 }
910 case kVulkanObjectTypeImage: {
911 base_ptr = GetImageState(object_struct.Cast<VkImage>());
912 break;
913 }
914 case kVulkanObjectTypeImageView: {
915 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
916 break;
917 }
918 case kVulkanObjectTypeEvent: {
919 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
920 break;
921 }
922 case kVulkanObjectTypeDescriptorPool: {
923 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
924 break;
925 }
926 case kVulkanObjectTypeCommandPool: {
927 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
928 break;
929 }
930 case kVulkanObjectTypeFramebuffer: {
931 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
932 break;
933 }
934 case kVulkanObjectTypeRenderPass: {
935 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
936 break;
937 }
938 case kVulkanObjectTypeDeviceMemory: {
939 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
940 break;
941 }
942 case kVulkanObjectTypeAccelerationStructureNV: {
943 base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
944 break;
945 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500946 case kVulkanObjectTypeUnknown:
947 // This can happen if an element of the object_bindings vector has been
948 // zeroed out, after an object is destroyed.
949 break;
locke-lunargd556cc32019-09-17 01:21:23 -0600950 default:
951 // TODO : Any other objects to be handled here?
952 assert(0);
953 break;
954 }
955 return base_ptr;
956}
957
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700958VkFormatFeatureFlags ValidationStateTracker::GetPotentialFormatFeatures(VkFormat format) const {
959 VkFormatFeatureFlags format_features = 0;
960
961 if (format != VK_FORMAT_UNDEFINED) {
962 VkFormatProperties format_properties;
963 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
964 format_features |= format_properties.linearTilingFeatures;
965 format_features |= format_properties.optimalTilingFeatures;
966 if (device_extensions.vk_ext_image_drm_format_modifier) {
967 // VK_KHR_get_physical_device_properties2 is required in this case
968 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2};
969 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
970 nullptr};
971 format_properties_2.pNext = (void *)&drm_properties_list;
972 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
973 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
974 format_features |= drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
975 }
976 }
977 }
978
979 return format_features;
980}
981
locke-lunargd556cc32019-09-17 01:21:23 -0600982// Tie the VulkanTypedHandle to the cmd buffer which includes:
983// Add object_binding to cmd buffer
984// Add cb_binding to object
Jeff Bolzadbfa852019-10-04 13:53:30 -0500985bool ValidationStateTracker::AddCommandBufferBinding(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_bindings,
locke-lunargd556cc32019-09-17 01:21:23 -0600986 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
987 if (disabled.command_buffer_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500988 return false;
locke-lunargd556cc32019-09-17 01:21:23 -0600989 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500990 // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
991 // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
992 auto inserted = cb_bindings.insert({cb_node, -1});
993 if (inserted.second) {
994 cb_node->object_bindings.push_back(obj);
995 inserted.first->second = (int)cb_node->object_bindings.size() - 1;
996 return true;
997 }
998 return false;
locke-lunargd556cc32019-09-17 01:21:23 -0600999}
1000
1001// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
1002void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
1003 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
1004 if (base_obj) base_obj->cb_bindings.erase(cb_node);
1005}
1006
1007// Reset the command buffer state
1008// Maintain the createInfo and set state to CB_NEW, but clear all other state
1009void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
1010 CMD_BUFFER_STATE *pCB = GetCBState(cb);
1011 if (pCB) {
1012 pCB->in_use.store(0);
1013 // Reset CB state (note that createInfo is not cleared)
1014 pCB->commandBuffer = cb;
1015 memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
1016 memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
1017 pCB->hasDrawCmd = false;
1018 pCB->hasTraceRaysCmd = false;
1019 pCB->hasBuildAccelerationStructureCmd = false;
1020 pCB->hasDispatchCmd = false;
1021 pCB->state = CB_NEW;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001022 pCB->commandCount = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06001023 pCB->submitCount = 0;
1024 pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
1025 pCB->status = 0;
1026 pCB->static_status = 0;
1027 pCB->viewportMask = 0;
1028 pCB->scissorMask = 0;
1029
1030 for (auto &item : pCB->lastBound) {
1031 item.second.reset();
1032 }
1033
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07001034 pCB->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo();
locke-lunargd556cc32019-09-17 01:21:23 -06001035 pCB->activeRenderPass = nullptr;
1036 pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
1037 pCB->activeSubpass = 0;
1038 pCB->broken_bindings.clear();
1039 pCB->waitedEvents.clear();
1040 pCB->events.clear();
1041 pCB->writeEventsBeforeWait.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001042 pCB->activeQueries.clear();
1043 pCB->startedQueries.clear();
1044 pCB->image_layout_map.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001045 pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
1046 pCB->vertex_buffer_used = false;
1047 pCB->primaryCommandBuffer = VK_NULL_HANDLE;
1048 // If secondary, invalidate any primary command buffer that may call us.
1049 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001050 InvalidateLinkedCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -06001051 }
1052
1053 // Remove reverse command buffer links.
1054 for (auto pSubCB : pCB->linkedCommandBuffers) {
1055 pSubCB->linkedCommandBuffers.erase(pCB);
1056 }
1057 pCB->linkedCommandBuffers.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001058 pCB->queue_submit_functions.clear();
1059 pCB->cmd_execute_commands_functions.clear();
1060 pCB->eventUpdates.clear();
1061 pCB->queryUpdates.clear();
1062
1063 // Remove object bindings
1064 for (const auto &obj : pCB->object_bindings) {
1065 RemoveCommandBufferBinding(obj, pCB);
1066 }
1067 pCB->object_bindings.clear();
1068 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
1069 for (auto framebuffer : pCB->framebuffers) {
1070 auto fb_state = GetFramebufferState(framebuffer);
1071 if (fb_state) fb_state->cb_bindings.erase(pCB);
1072 }
1073 pCB->framebuffers.clear();
1074 pCB->activeFramebuffer = VK_NULL_HANDLE;
1075 memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
1076
1077 pCB->qfo_transfer_image_barriers.Reset();
1078 pCB->qfo_transfer_buffer_barriers.Reset();
1079
1080 // Clean up the label data
1081 ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
1082 pCB->debug_label.Reset();
locke-gb3ce08f2019-09-30 12:30:56 -06001083 pCB->validate_descriptorsets_in_queuesubmit.clear();
Attilio Provenzano02859b22020-02-27 14:17:28 +00001084
1085 // Best practices info
1086 pCB->small_indexed_draw_call_count = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06001087 }
1088 if (command_buffer_reset_callback) {
1089 (*command_buffer_reset_callback)(cb);
1090 }
1091}
1092
1093void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
1094 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
1095 VkResult result) {
1096 if (VK_SUCCESS != result) return;
1097
1098 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
1099 if (nullptr == enabled_features_found) {
1100 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
1101 if (features2) {
1102 enabled_features_found = &(features2->features);
1103 }
1104 }
1105
1106 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
1107 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
1108 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
1109
1110 if (nullptr == enabled_features_found) {
1111 state_tracker->enabled_features.core = {};
1112 } else {
1113 state_tracker->enabled_features.core = *enabled_features_found;
1114 }
1115
1116 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
1117 // previously set them through an explicit API call.
1118 uint32_t count;
1119 auto pd_state = GetPhysicalDeviceState(gpu);
1120 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
1121 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
1122 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
1123 // Save local link to this device's physical device state
1124 state_tracker->physical_device_state = pd_state;
1125
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001126 const auto *vulkan_12_features = lvl_find_in_chain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
1127 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001128 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001129 } else {
sfricke-samsung27c70722020-05-02 08:42:39 -07001130 // Set Extension Feature Aliases to false as there is no struct to check
1131 state_tracker->enabled_features.core12.drawIndirectCount = VK_FALSE;
1132 state_tracker->enabled_features.core12.samplerMirrorClampToEdge = VK_FALSE;
1133 state_tracker->enabled_features.core12.descriptorIndexing = VK_FALSE;
1134 state_tracker->enabled_features.core12.samplerFilterMinmax = VK_FALSE;
1135 state_tracker->enabled_features.core12.shaderOutputLayer = VK_FALSE;
1136 state_tracker->enabled_features.core12.shaderOutputViewportIndex = VK_FALSE;
1137
1138 // These structs are only allowed in pNext chain if there is no VkPhysicalDeviceVulkan12Features
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001139
1140 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
1141 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001142 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
1143 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
1144 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
1145 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001146 }
1147
1148 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
1149 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001150 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
1151 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001152 }
1153
1154 const auto *descriptor_indexing_features =
1155 lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
1156 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001157 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
1158 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
1159 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
1160 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
1161 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
1162 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
1163 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
1164 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
1165 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
1166 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
1167 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
1168 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
1169 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
1170 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
1171 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
1172 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
1173 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
1174 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
1175 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
1176 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
1177 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
1178 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
1179 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
1180 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
1181 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
1182 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
1183 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
1184 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
1185 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
1186 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
1187 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
1188 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
1189 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
1190 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
1191 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
1192 descriptor_indexing_features->descriptorBindingPartiallyBound;
1193 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
1194 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
1195 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001196 }
1197
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001198 const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001199 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001200 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001201 }
1202
1203 const auto *imageless_framebuffer_features =
1204 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
1205 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001206 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001207 }
1208
1209 const auto *uniform_buffer_standard_layout_features =
1210 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
1211 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001212 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
1213 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001214 }
1215
1216 const auto *subgroup_extended_types_features =
1217 lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
1218 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001219 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
1220 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001221 }
1222
1223 const auto *separate_depth_stencil_layouts_features =
1224 lvl_find_in_chain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
1225 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001226 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
1227 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001228 }
1229
1230 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
1231 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001232 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001233 }
1234
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001235 const auto *timeline_semaphore_features = lvl_find_in_chain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001236 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001237 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001238 }
1239
1240 const auto *buffer_device_address = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
1241 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001242 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
1243 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1244 buffer_device_address->bufferDeviceAddressCaptureReplay;
1245 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1246 buffer_device_address->bufferDeviceAddressMultiDevice;
1247 }
1248 }
1249
1250 const auto *vulkan_11_features = lvl_find_in_chain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
1251 if (vulkan_11_features) {
1252 state_tracker->enabled_features.core11 = *vulkan_11_features;
1253 } else {
1254 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
1255
1256 const auto *sixteen_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
1257 if (sixteen_bit_storage_features) {
1258 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
1259 sixteen_bit_storage_features->storageBuffer16BitAccess;
1260 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
1261 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
1262 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
1263 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
1264 }
1265
1266 const auto *multiview_features = lvl_find_in_chain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
1267 if (multiview_features) {
1268 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1269 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1270 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1271 }
1272
1273 const auto *variable_pointers_features = lvl_find_in_chain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
1274 if (variable_pointers_features) {
1275 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1276 variable_pointers_features->variablePointersStorageBuffer;
1277 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1278 }
1279
1280 const auto *protected_memory_features = lvl_find_in_chain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
1281 if (protected_memory_features) {
1282 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1283 }
1284
1285 const auto *ycbcr_conversion_features =
1286 lvl_find_in_chain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
1287 if (ycbcr_conversion_features) {
1288 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1289 }
1290
1291 const auto *shader_draw_parameters_features =
1292 lvl_find_in_chain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
1293 if (shader_draw_parameters_features) {
1294 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001295 }
1296 }
1297
locke-lunargd556cc32019-09-17 01:21:23 -06001298 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
1299 state_tracker->physical_device_count =
1300 device_group_ci && device_group_ci->physicalDeviceCount > 0 ? device_group_ci->physicalDeviceCount : 1;
1301
locke-lunargd556cc32019-09-17 01:21:23 -06001302 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
1303 if (exclusive_scissor_features) {
1304 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1305 }
1306
1307 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1308 if (shading_rate_image_features) {
1309 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1310 }
1311
1312 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1313 if (mesh_shader_features) {
1314 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1315 }
1316
1317 const auto *inline_uniform_block_features =
1318 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1319 if (inline_uniform_block_features) {
1320 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1321 }
1322
1323 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1324 if (transform_feedback_features) {
1325 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1326 }
1327
locke-lunargd556cc32019-09-17 01:21:23 -06001328 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1329 if (vtx_attrib_div_features) {
1330 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1331 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001332
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001333 const auto *buffer_device_address_ext = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
1334 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001335 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001336 }
1337
1338 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1339 if (cooperative_matrix_features) {
1340 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1341 }
1342
locke-lunargd556cc32019-09-17 01:21:23 -06001343 const auto *compute_shader_derivatives_features =
1344 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1345 if (compute_shader_derivatives_features) {
1346 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1347 }
1348
1349 const auto *fragment_shader_barycentric_features =
1350 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1351 if (fragment_shader_barycentric_features) {
1352 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1353 }
1354
1355 const auto *shader_image_footprint_features =
1356 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1357 if (shader_image_footprint_features) {
1358 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1359 }
1360
1361 const auto *fragment_shader_interlock_features =
1362 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1363 if (fragment_shader_interlock_features) {
1364 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1365 }
1366
1367 const auto *demote_to_helper_invocation_features =
1368 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1369 if (demote_to_helper_invocation_features) {
1370 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1371 }
1372
1373 const auto *texel_buffer_alignment_features =
1374 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1375 if (texel_buffer_alignment_features) {
1376 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1377 }
1378
locke-lunargd556cc32019-09-17 01:21:23 -06001379 const auto *pipeline_exe_props_features =
1380 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1381 if (pipeline_exe_props_features) {
1382 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1383 }
1384
Jeff Bolz82f854d2019-09-17 14:56:47 -05001385 const auto *dedicated_allocation_image_aliasing_features =
1386 lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
1387 if (dedicated_allocation_image_aliasing_features) {
1388 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1389 *dedicated_allocation_image_aliasing_features;
1390 }
1391
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001392 const auto *performance_query_features = lvl_find_in_chain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
1393 if (performance_query_features) {
1394 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1395 }
1396
Tobias Hector782bcde2019-11-28 16:19:42 +00001397 const auto *device_coherent_memory_features = lvl_find_in_chain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
1398 if (device_coherent_memory_features) {
1399 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1400 }
1401
sfricke-samsungcead0802020-01-30 22:20:10 -08001402 const auto *ycbcr_image_array_features = lvl_find_in_chain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
1403 if (ycbcr_image_array_features) {
1404 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1405 }
1406
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001407 const auto *ray_tracing_features = lvl_find_in_chain<VkPhysicalDeviceRayTracingFeaturesKHR>(pCreateInfo->pNext);
1408 if (ray_tracing_features) {
1409 state_tracker->enabled_features.ray_tracing_features = *ray_tracing_features;
1410 }
1411
Jeff Bolz165818a2020-05-08 11:19:03 -05001412 const auto *robustness2_features = lvl_find_in_chain<VkPhysicalDeviceRobustness2FeaturesEXT>(pCreateInfo->pNext);
1413 if (robustness2_features) {
1414 state_tracker->enabled_features.robustness2_features = *robustness2_features;
1415 }
1416
locke-lunargd556cc32019-09-17 01:21:23 -06001417 // Store physical device properties and physical device mem limits into CoreChecks structs
1418 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1419 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001420 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1421 &state_tracker->phys_dev_props_core11);
1422 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1423 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001424
1425 const auto &dev_ext = state_tracker->device_extensions;
1426 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1427
1428 if (dev_ext.vk_khr_push_descriptor) {
1429 // Get the needed push_descriptor limits
1430 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1431 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1432 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1433 }
1434
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001435 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
1436 VkPhysicalDeviceDescriptorIndexingPropertiesEXT descriptor_indexing_prop;
1437 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1438 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1439 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1440 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1441 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1442 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1443 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1444 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1445 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1446 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1447 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1448 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1449 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1450 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1451 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1452 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1453 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1454 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1455 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1456 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1457 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1458 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1459 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1460 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1461 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1462 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1463 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
1464 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1465 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
1466 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
1467 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
1468 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
1469 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
1470 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
1471 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1472 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1473 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
1474 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
1475 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1476 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
1477 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
1478 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
1479 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
1480 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
1481 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
1482 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
1483 }
1484
locke-lunargd556cc32019-09-17 01:21:23 -06001485 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1486 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1487 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1488 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001489
1490 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
1491 VkPhysicalDeviceDepthStencilResolvePropertiesKHR depth_stencil_resolve_props;
1492 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
1493 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
1494 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
1495 depth_stencil_resolve_props.supportedStencilResolveModes;
1496 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
1497 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
1498 }
1499
locke-lunargd556cc32019-09-17 01:21:23 -06001500 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001501 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
1502 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing, &phys_dev_props->ray_tracing_propsKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06001503 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1504 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001505 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
sfricke-samsung8f658d42020-05-03 20:12:24 -07001506 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_sample_locations, &phys_dev_props->sample_locations_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001507
1508 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
1509 VkPhysicalDeviceTimelineSemaphorePropertiesKHR timeline_semaphore_props;
1510 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
1511 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
1512 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
1513 }
1514
1515 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
1516 VkPhysicalDeviceFloatControlsPropertiesKHR float_controls_props;
1517 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
1518 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
1519 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
1520 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
1521 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
1522 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
1523 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
1524 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
1525 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
1526 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
1527 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
1528 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
1529 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
1530 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
1531 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
1532 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
1533 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
1534 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
1535 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
1536 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
1537 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
1538 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07001539
locke-lunargd556cc32019-09-17 01:21:23 -06001540 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1541 // Get the needed cooperative_matrix properties
1542 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1543 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
1544 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1545 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1546
1547 uint32_t numCooperativeMatrixProperties = 0;
1548 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
1549 state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
1550 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
1551
1552 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
1553 state_tracker->cooperative_matrix_properties.data());
1554 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001555 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06001556 // Get the needed subgroup limits
1557 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
1558 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
1559 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1560
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001561 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
1562 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
1563 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
1564 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06001565 }
1566
1567 // Store queue family data
1568 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1569 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001570 const VkDeviceQueueCreateInfo &queue_create_info = pCreateInfo->pQueueCreateInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06001571 state_tracker->queue_family_index_map.insert(
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001572 std::make_pair(queue_create_info.queueFamilyIndex, queue_create_info.queueCount));
1573 state_tracker->queue_family_create_flags_map.insert(
1574 std::make_pair(queue_create_info.queueFamilyIndex, queue_create_info.flags));
locke-lunargd556cc32019-09-17 01:21:23 -06001575 }
1576 }
1577}
1578
1579void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1580 if (!device) return;
1581
locke-lunargd556cc32019-09-17 01:21:23 -06001582 // Reset all command buffers before destroying them, to unlink object_bindings.
1583 for (auto &commandBuffer : commandBufferMap) {
1584 ResetCommandBufferState(commandBuffer.first);
1585 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001586 pipelineMap.clear();
1587 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001588 commandBufferMap.clear();
1589
1590 // This will also delete all sets in the pool & remove them from setMap
1591 DeleteDescriptorSetPools();
1592 // All sets should be removed
1593 assert(setMap.empty());
1594 descriptorSetLayoutMap.clear();
1595 imageViewMap.clear();
1596 imageMap.clear();
1597 bufferViewMap.clear();
1598 bufferMap.clear();
1599 // Queues persist until device is destroyed
1600 queueMap.clear();
1601}
1602
1603// Loop through bound objects and increment their in_use counts.
1604void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
1605 for (auto obj : cb_node->object_bindings) {
1606 auto base_obj = GetStateStructPtrFromObject(obj);
1607 if (base_obj) {
1608 base_obj->in_use.fetch_add(1);
1609 }
1610 }
1611}
1612
1613// Track which resources are in-flight by atomically incrementing their "in_use" count
1614void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1615 cb_node->submitCount++;
1616 cb_node->in_use.fetch_add(1);
1617
1618 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
1619 IncrementBoundObjects(cb_node);
1620 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1621 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1622 // should then be flagged prior to calling this function
1623 for (auto event : cb_node->writeEventsBeforeWait) {
1624 auto event_state = GetEventState(event);
1625 if (event_state) event_state->write_in_use++;
1626 }
1627}
1628
1629// Decrement in-use count for objects bound to command buffer
1630void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
1631 BASE_NODE *base_obj = nullptr;
1632 for (auto obj : cb_node->object_bindings) {
1633 base_obj = GetStateStructPtrFromObject(obj);
1634 if (base_obj) {
1635 base_obj->in_use.fetch_sub(1);
1636 }
1637 }
1638}
1639
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001640void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
locke-lunargd556cc32019-09-17 01:21:23 -06001641 std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
1642
1643 // Roll this queue forward, one submission at a time.
1644 while (pQueue->seq < seq) {
1645 auto &submission = pQueue->submissions.front();
1646
1647 for (auto &wait : submission.waitSemaphores) {
1648 auto pSemaphore = GetSemaphoreState(wait.semaphore);
1649 if (pSemaphore) {
1650 pSemaphore->in_use.fetch_sub(1);
1651 }
1652 auto &lastSeq = otherQueueSeqs[wait.queue];
1653 lastSeq = std::max(lastSeq, wait.seq);
1654 }
1655
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001656 for (auto &signal : submission.signalSemaphores) {
1657 auto pSemaphore = GetSemaphoreState(signal.semaphore);
locke-lunargd556cc32019-09-17 01:21:23 -06001658 if (pSemaphore) {
1659 pSemaphore->in_use.fetch_sub(1);
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001660 if (pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && pSemaphore->payload < signal.payload) {
1661 pSemaphore->payload = signal.payload;
1662 }
locke-lunargd556cc32019-09-17 01:21:23 -06001663 }
1664 }
1665
1666 for (auto &semaphore : submission.externalSemaphores) {
1667 auto pSemaphore = GetSemaphoreState(semaphore);
1668 if (pSemaphore) {
1669 pSemaphore->in_use.fetch_sub(1);
1670 }
1671 }
1672
1673 for (auto cb : submission.cbs) {
1674 auto cb_node = GetCBState(cb);
1675 if (!cb_node) {
1676 continue;
1677 }
1678 // First perform decrement on general case bound objects
1679 DecrementBoundResources(cb_node);
1680 for (auto event : cb_node->writeEventsBeforeWait) {
1681 auto eventNode = eventMap.find(event);
1682 if (eventNode != eventMap.end()) {
1683 eventNode->second.write_in_use--;
1684 }
1685 }
Jeff Bolz310775c2019-10-09 00:46:33 -05001686 QueryMap localQueryToStateMap;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001687 VkQueryPool first_pool = VK_NULL_HANDLE;
Jeff Bolz310775c2019-10-09 00:46:33 -05001688 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001689 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &localQueryToStateMap);
Jeff Bolz310775c2019-10-09 00:46:33 -05001690 }
1691
1692 for (auto queryStatePair : localQueryToStateMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001693 if (queryStatePair.second == QUERYSTATE_ENDED) {
1694 queryToStateMap[queryStatePair.first] = QUERYSTATE_AVAILABLE;
1695 }
locke-lunargd556cc32019-09-17 01:21:23 -06001696 }
locke-lunargd556cc32019-09-17 01:21:23 -06001697 cb_node->in_use.fetch_sub(1);
1698 }
1699
1700 auto pFence = GetFenceState(submission.fence);
1701 if (pFence && pFence->scope == kSyncScopeInternal) {
1702 pFence->state = FENCE_RETIRED;
1703 }
1704
1705 pQueue->submissions.pop_front();
1706 pQueue->seq++;
1707 }
1708
1709 // Roll other queues forward to the highest seq we saw a wait for
1710 for (auto qs : otherQueueSeqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001711 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001712 }
1713}
1714
1715// Submit a fence to a queue, delimiting previous fences and previous untracked
1716// work by it.
1717static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1718 pFence->state = FENCE_INFLIGHT;
1719 pFence->signaler.first = pQueue->queue;
1720 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1721}
1722
1723void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1724 VkFence fence, VkResult result) {
Mark Lobodzinski09379db2020-05-07 08:22:01 -06001725 if (result != VK_SUCCESS) return;
locke-lunargd556cc32019-09-17 01:21:23 -06001726 uint64_t early_retire_seq = 0;
1727 auto pQueue = GetQueueState(queue);
1728 auto pFence = GetFenceState(fence);
1729
1730 if (pFence) {
1731 if (pFence->scope == kSyncScopeInternal) {
1732 // Mark fence in use
1733 SubmitFence(pQueue, pFence, std::max(1u, submitCount));
1734 if (!submitCount) {
1735 // If no submissions, but just dropping a fence on the end of the queue,
1736 // record an empty submission with just the fence, so we can determine
1737 // its completion.
1738 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001739 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001740 }
1741 } else {
1742 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1743 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1744 }
1745 }
1746
1747 // Now process each individual submit
1748 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1749 std::vector<VkCommandBuffer> cbs;
1750 const VkSubmitInfo *submit = &pSubmits[submit_idx];
1751 vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001752 vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06001753 vector<VkSemaphore> semaphore_externals;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001754 const uint64_t next_seq = pQueue->seq + pQueue->submissions.size() + 1;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001755 auto *timeline_semaphore_submit = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001756 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
1757 VkSemaphore semaphore = submit->pWaitSemaphores[i];
1758 auto pSemaphore = GetSemaphoreState(semaphore);
1759 if (pSemaphore) {
1760 if (pSemaphore->scope == kSyncScopeInternal) {
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001761 SEMAPHORE_WAIT wait;
1762 wait.semaphore = semaphore;
1763 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
1764 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1765 wait.queue = pSemaphore->signaler.first;
1766 wait.seq = pSemaphore->signaler.second;
1767 semaphore_waits.push_back(wait);
1768 pSemaphore->in_use.fetch_add(1);
1769 }
1770 pSemaphore->signaler.first = VK_NULL_HANDLE;
1771 pSemaphore->signaled = false;
1772 } else if (pSemaphore->payload < timeline_semaphore_submit->pWaitSemaphoreValues[i]) {
1773 wait.queue = queue;
1774 wait.seq = next_seq;
1775 wait.payload = timeline_semaphore_submit->pWaitSemaphoreValues[i];
1776 semaphore_waits.push_back(wait);
locke-lunargd556cc32019-09-17 01:21:23 -06001777 pSemaphore->in_use.fetch_add(1);
1778 }
locke-lunargd556cc32019-09-17 01:21:23 -06001779 } else {
1780 semaphore_externals.push_back(semaphore);
1781 pSemaphore->in_use.fetch_add(1);
1782 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1783 pSemaphore->scope = kSyncScopeInternal;
1784 }
1785 }
1786 }
1787 }
1788 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
1789 VkSemaphore semaphore = submit->pSignalSemaphores[i];
1790 auto pSemaphore = GetSemaphoreState(semaphore);
1791 if (pSemaphore) {
1792 if (pSemaphore->scope == kSyncScopeInternal) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001793 SEMAPHORE_SIGNAL signal;
1794 signal.semaphore = semaphore;
1795 signal.seq = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001796 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
1797 pSemaphore->signaler.first = queue;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001798 pSemaphore->signaler.second = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001799 pSemaphore->signaled = true;
1800 } else {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001801 signal.payload = timeline_semaphore_submit->pSignalSemaphoreValues[i];
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001802 }
locke-lunargd556cc32019-09-17 01:21:23 -06001803 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001804 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06001805 } else {
1806 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001807 early_retire_seq = std::max(early_retire_seq, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001808 }
1809 }
1810 }
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001811 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
1812 uint32_t perf_pass = perf_submit ? perf_submit->counterPassIndex : 0;
1813
locke-lunargd556cc32019-09-17 01:21:23 -06001814 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
1815 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
1816 if (cb_node) {
1817 cbs.push_back(submit->pCommandBuffers[i]);
1818 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
1819 cbs.push_back(secondaryCmdBuffer->commandBuffer);
1820 IncrementResources(secondaryCmdBuffer);
1821 }
1822 IncrementResources(cb_node);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001823
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001824 VkQueryPool first_pool = VK_NULL_HANDLE;
1825 EventToStageMap localEventToStageMap;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001826 QueryMap localQueryToStateMap;
1827 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001828 function(nullptr, /*do_validate*/ false, first_pool, perf_pass, &localQueryToStateMap);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001829 }
1830
1831 for (auto queryStatePair : localQueryToStateMap) {
1832 queryToStateMap[queryStatePair.first] = queryStatePair.second;
1833 }
1834
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001835 for (auto &function : cb_node->eventUpdates) {
1836 function(nullptr, /*do_validate*/ false, &localEventToStageMap);
1837 }
1838
1839 for (auto eventStagePair : localEventToStageMap) {
1840 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
1841 }
locke-lunargd556cc32019-09-17 01:21:23 -06001842 }
1843 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001844
locke-lunargd556cc32019-09-17 01:21:23 -06001845 pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001846 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, perf_pass);
locke-lunargd556cc32019-09-17 01:21:23 -06001847 }
1848
1849 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001850 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001851 }
1852}
1853
1854void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1855 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1856 VkResult result) {
1857 if (VK_SUCCESS == result) {
1858 AddMemObjInfo(device, *pMemory, pAllocateInfo);
1859 }
1860 return;
1861}
1862
1863void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1864 if (!mem) return;
1865 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
1866 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
1867
1868 // Clear mem binding for any bound objects
1869 for (const auto &obj : mem_info->obj_bindings) {
1870 BINDABLE *bindable_state = nullptr;
1871 switch (obj.type) {
1872 case kVulkanObjectTypeImage:
1873 bindable_state = GetImageState(obj.Cast<VkImage>());
1874 break;
1875 case kVulkanObjectTypeBuffer:
1876 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
1877 break;
1878 case kVulkanObjectTypeAccelerationStructureNV:
1879 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
1880 break;
1881
1882 default:
1883 // Should only have acceleration structure, buffer, or image objects bound to memory
1884 assert(0);
1885 }
1886
1887 if (bindable_state) {
Jeff Bolz41e29052020-03-29 22:33:55 -05001888 // Remove any sparse bindings bound to the resource that use this memory.
1889 for (auto it = bindable_state->sparse_bindings.begin(); it != bindable_state->sparse_bindings.end();) {
1890 auto nextit = it;
1891 nextit++;
1892
1893 auto &sparse_mem_binding = *it;
1894 if (sparse_mem_binding.mem_state.get() == mem_info) {
1895 bindable_state->sparse_bindings.erase(it);
1896 }
1897
1898 it = nextit;
1899 }
locke-lunargd556cc32019-09-17 01:21:23 -06001900 bindable_state->UpdateBoundMemorySet();
1901 }
1902 }
1903 // Any bound cmd buffers are now invalid
1904 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
1905 RemoveAliasingImages(mem_info->bound_images);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001906 mem_info->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001907 memObjMap.erase(mem);
1908}
1909
1910void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
1911 VkFence fence, VkResult result) {
1912 if (result != VK_SUCCESS) return;
1913 uint64_t early_retire_seq = 0;
1914 auto pFence = GetFenceState(fence);
1915 auto pQueue = GetQueueState(queue);
1916
1917 if (pFence) {
1918 if (pFence->scope == kSyncScopeInternal) {
1919 SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
1920 if (!bindInfoCount) {
1921 // No work to do, just dropping a fence in the queue by itself.
1922 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001923 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001924 }
1925 } else {
1926 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1927 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1928 }
1929 }
1930
1931 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
1932 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
1933 // Track objects tied to memory
1934 for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
1935 for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
1936 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07001937 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06001938 VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
1939 }
1940 }
1941 for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
1942 for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
1943 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07001944 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06001945 VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
1946 }
1947 }
1948 for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
1949 for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
1950 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
1951 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
1952 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
locke-lunargcf04d582019-11-26 00:31:50 -07001953 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, size,
locke-lunargd556cc32019-09-17 01:21:23 -06001954 VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
1955 }
1956 }
1957
1958 std::vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001959 std::vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06001960 std::vector<VkSemaphore> semaphore_externals;
1961 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
1962 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
1963 auto pSemaphore = GetSemaphoreState(semaphore);
1964 if (pSemaphore) {
1965 if (pSemaphore->scope == kSyncScopeInternal) {
1966 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1967 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1968 pSemaphore->in_use.fetch_add(1);
1969 }
1970 pSemaphore->signaler.first = VK_NULL_HANDLE;
1971 pSemaphore->signaled = false;
1972 } else {
1973 semaphore_externals.push_back(semaphore);
1974 pSemaphore->in_use.fetch_add(1);
1975 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1976 pSemaphore->scope = kSyncScopeInternal;
1977 }
1978 }
1979 }
1980 }
1981 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
1982 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
1983 auto pSemaphore = GetSemaphoreState(semaphore);
1984 if (pSemaphore) {
1985 if (pSemaphore->scope == kSyncScopeInternal) {
1986 pSemaphore->signaler.first = queue;
1987 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1988 pSemaphore->signaled = true;
1989 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001990
1991 SEMAPHORE_SIGNAL signal;
1992 signal.semaphore = semaphore;
1993 signal.seq = pSemaphore->signaler.second;
1994 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06001995 } else {
1996 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1997 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1998 }
1999 }
2000 }
2001
2002 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002003 bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002004 }
2005
2006 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002007 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002008 }
2009}
2010
2011void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
2012 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
2013 VkResult result) {
2014 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002015 auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002016 semaphore_state->signaler.first = VK_NULL_HANDLE;
2017 semaphore_state->signaler.second = 0;
2018 semaphore_state->signaled = false;
2019 semaphore_state->scope = kSyncScopeInternal;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002020 semaphore_state->type = VK_SEMAPHORE_TYPE_BINARY_KHR;
2021 semaphore_state->payload = 0;
2022 auto semaphore_type_create_info = lvl_find_in_chain<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo->pNext);
2023 if (semaphore_type_create_info) {
2024 semaphore_state->type = semaphore_type_create_info->semaphoreType;
2025 semaphore_state->payload = semaphore_type_create_info->initialValue;
2026 }
locke-lunargd556cc32019-09-17 01:21:23 -06002027 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
2028}
2029
2030void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
2031 VkSemaphoreImportFlagsKHR flags) {
2032 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
2033 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
2034 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
2035 sema_node->scope == kSyncScopeInternal) {
2036 sema_node->scope = kSyncScopeExternalTemporary;
2037 } else {
2038 sema_node->scope = kSyncScopeExternalPermanent;
2039 }
2040 }
2041}
2042
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002043void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR *pSignalInfo,
2044 VkResult result) {
2045 auto *pSemaphore = GetSemaphoreState(pSignalInfo->semaphore);
2046 pSemaphore->payload = pSignalInfo->value;
2047}
2048
locke-lunargd556cc32019-09-17 01:21:23 -06002049void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
2050 auto mem_info = GetDevMemState(mem);
2051 if (mem_info) {
2052 mem_info->mapped_range.offset = offset;
2053 mem_info->mapped_range.size = size;
2054 mem_info->p_driver_data = *ppData;
2055 }
2056}
2057
2058void ValidationStateTracker::RetireFence(VkFence fence) {
2059 auto pFence = GetFenceState(fence);
2060 if (pFence && pFence->scope == kSyncScopeInternal) {
2061 if (pFence->signaler.first != VK_NULL_HANDLE) {
2062 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002063 RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002064 } else {
2065 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
2066 // the fence as retired.
2067 pFence->state = FENCE_RETIRED;
2068 }
2069 }
2070}
2071
2072void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2073 VkBool32 waitAll, uint64_t timeout, VkResult result) {
2074 if (VK_SUCCESS != result) return;
2075
2076 // When we know that all fences are complete we can clean/remove their CBs
2077 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
2078 for (uint32_t i = 0; i < fenceCount; i++) {
2079 RetireFence(pFences[i]);
2080 }
2081 }
2082 // NOTE : Alternate case not handled here is when some fences have completed. In
2083 // this case for app to guarantee which fences completed it will have to call
2084 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
2085}
2086
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002087void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
2088 auto pSemaphore = GetSemaphoreState(semaphore);
2089 if (pSemaphore) {
2090 for (auto &pair : queueMap) {
2091 QUEUE_STATE &queueState = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002092 uint64_t max_seq = 0;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002093 for (const auto &submission : queueState.submissions) {
2094 for (const auto &signalSemaphore : submission.signalSemaphores) {
2095 if (signalSemaphore.semaphore == semaphore && signalSemaphore.payload <= until_payload) {
Tony-LunarG47d5e272020-04-07 15:35:55 -06002096 if (signalSemaphore.seq > max_seq) {
2097 max_seq = signalSemaphore.seq;
2098 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002099 }
2100 }
2101 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06002102 if (max_seq) {
2103 RetireWorkOnQueue(&queueState, max_seq);
2104 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002105 }
2106 }
2107}
2108
John Zulauff89de662020-04-13 18:57:34 -06002109void ValidationStateTracker::RecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2110 VkResult result) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002111 if (VK_SUCCESS != result) return;
2112
2113 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
2114 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
2115 }
2116}
2117
John Zulauff89de662020-04-13 18:57:34 -06002118void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2119 VkResult result) {
2120 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2121}
2122
2123void ValidationStateTracker::PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo,
2124 uint64_t timeout, VkResult result) {
2125 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2126}
2127
locke-lunargd556cc32019-09-17 01:21:23 -06002128void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
2129 if (VK_SUCCESS != result) return;
2130 RetireFence(fence);
2131}
2132
2133void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
2134 // Add queue to tracking set only if it is new
2135 auto queue_is_new = queues.emplace(queue);
2136 if (queue_is_new.second == true) {
2137 QUEUE_STATE *queue_state = &queueMap[queue];
2138 queue_state->queue = queue;
2139 queue_state->queueFamilyIndex = queue_family_index;
2140 queue_state->seq = 0;
2141 }
2142}
2143
2144void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2145 VkQueue *pQueue) {
2146 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2147}
2148
2149void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2150 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2151}
2152
2153void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2154 if (VK_SUCCESS != result) return;
2155 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002156 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002157}
2158
2159void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2160 if (VK_SUCCESS != result) return;
2161 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002162 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002163 }
2164}
2165
2166void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2167 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002168 auto fence_state = GetFenceState(fence);
2169 fence_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002170 fenceMap.erase(fence);
2171}
2172
2173void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2174 const VkAllocationCallbacks *pAllocator) {
2175 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002176 auto semaphore_state = GetSemaphoreState(semaphore);
2177 semaphore_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002178 semaphoreMap.erase(semaphore);
2179}
2180
2181void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2182 if (!event) return;
2183 EVENT_STATE *event_state = GetEventState(event);
2184 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
2185 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
2186 eventMap.erase(event);
2187}
2188
2189void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2190 const VkAllocationCallbacks *pAllocator) {
2191 if (!queryPool) return;
2192 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
2193 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
2194 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002195 qp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002196 queryPoolMap.erase(queryPool);
2197}
2198
2199// Object with given handle is being bound to memory w/ given mem_info struct.
2200// Track the newly bound memory range with given memoryOffset
2201// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
2202// and non-linear range incorrectly overlap.
locke-lunargd556cc32019-09-17 01:21:23 -06002203void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002204 VkDeviceSize memoryOffset) {
locke-lunargd556cc32019-09-17 01:21:23 -06002205 if (typed_handle.type == kVulkanObjectTypeImage) {
2206 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
2207 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002208 mem_info->bound_buffers.insert(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002209 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002210 mem_info->bound_acceleration_structures.insert(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002211 } else {
2212 // Unsupported object type
2213 assert(false);
2214 }
2215}
2216
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002217void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2218 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002219}
2220
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002221void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2222 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002223}
2224
2225void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002226 VkDeviceSize mem_offset) {
2227 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002228}
2229
2230// This function will remove the handle-to-index mapping from the appropriate map.
2231static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
2232 if (typed_handle.type == kVulkanObjectTypeImage) {
2233 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
2234 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002235 mem_info->bound_buffers.erase(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002236 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002237 mem_info->bound_acceleration_structures.erase(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002238 } else {
2239 // Unsupported object type
2240 assert(false);
2241 }
2242}
2243
2244void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
2245 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
2246}
2247
2248void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
2249 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
2250}
2251
2252void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
2253 RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
2254}
2255
2256void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2257 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2258 if (buffer_state) {
2259 // Track bound memory range information
2260 auto mem_info = GetDevMemState(mem);
2261 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002262 InsertBufferMemoryRange(buffer, mem_info, memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06002263 }
2264 // Track objects tied to memory
2265 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
2266 }
2267}
2268
2269void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2270 VkDeviceSize memoryOffset, VkResult result) {
2271 if (VK_SUCCESS != result) return;
2272 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2273}
2274
2275void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
2276 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2277 for (uint32_t i = 0; i < bindInfoCount; i++) {
2278 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2279 }
2280}
2281
2282void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
2283 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2284 for (uint32_t i = 0; i < bindInfoCount; i++) {
2285 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2286 }
2287}
2288
Spencer Fricke6c127102020-04-16 06:25:20 -07002289void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer) {
locke-lunargd556cc32019-09-17 01:21:23 -06002290 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2291 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002292 buffer_state->memory_requirements_checked = true;
2293 }
2294}
2295
2296void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2297 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002298 RecordGetBufferMemoryRequirementsState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002299}
2300
2301void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
2302 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2303 VkMemoryRequirements2KHR *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002304 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002305}
2306
2307void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
2308 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2309 VkMemoryRequirements2KHR *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002310 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002311}
2312
Spencer Fricke6c127102020-04-16 06:25:20 -07002313void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002314 const VkImagePlaneMemoryRequirementsInfo *plane_info =
2315 (pInfo == nullptr) ? nullptr : lvl_find_in_chain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002316 IMAGE_STATE *image_state = GetImageState(image);
2317 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002318 if (plane_info != nullptr) {
2319 // Multi-plane image
2320 image_state->memory_requirements_checked = false; // Each image plane needs to be checked itself
2321 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
2322 image_state->plane0_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002323 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
2324 image_state->plane1_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002325 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
2326 image_state->plane2_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002327 }
2328 } else {
2329 // Single Plane image
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002330 image_state->memory_requirements_checked = true;
2331 }
locke-lunargd556cc32019-09-17 01:21:23 -06002332 }
2333}
2334
2335void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2336 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002337 RecordGetImageMemoryRequirementsState(image, nullptr);
locke-lunargd556cc32019-09-17 01:21:23 -06002338}
2339
2340void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2341 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002342 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002343}
2344
2345void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2346 const VkImageMemoryRequirementsInfo2 *pInfo,
2347 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002348 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002349}
2350
2351static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2352 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2353 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2354 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2355 image_state->sparse_metadata_required = true;
2356 }
2357}
2358
2359void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2360 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2361 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2362 auto image_state = GetImageState(image);
2363 image_state->get_sparse_reqs_called = true;
2364 if (!pSparseMemoryRequirements) return;
2365 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2366 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2367 }
2368}
2369
2370void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
2371 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2372 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2373 auto image_state = GetImageState(pInfo->image);
2374 image_state->get_sparse_reqs_called = true;
2375 if (!pSparseMemoryRequirements) return;
2376 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2377 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2378 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2379 }
2380}
2381
2382void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
2383 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2384 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2385 auto image_state = GetImageState(pInfo->image);
2386 image_state->get_sparse_reqs_called = true;
2387 if (!pSparseMemoryRequirements) return;
2388 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2389 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2390 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2391 }
2392}
2393
2394void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2395 const VkAllocationCallbacks *pAllocator) {
2396 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002397 auto shader_module_state = GetShaderModuleState(shaderModule);
2398 shader_module_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002399 shaderModuleMap.erase(shaderModule);
2400}
2401
2402void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2403 const VkAllocationCallbacks *pAllocator) {
2404 if (!pipeline) return;
2405 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
2406 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
2407 // Any bound cmd buffers are now invalid
2408 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002409 pipeline_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002410 pipelineMap.erase(pipeline);
2411}
2412
2413void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2414 const VkAllocationCallbacks *pAllocator) {
2415 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002416 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
2417 pipeline_layout_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002418 pipelineLayoutMap.erase(pipelineLayout);
2419}
2420
2421void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2422 const VkAllocationCallbacks *pAllocator) {
2423 if (!sampler) return;
2424 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
2425 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
2426 // Any bound cmd buffers are now invalid
2427 if (sampler_state) {
2428 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
2429 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002430 sampler_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002431 samplerMap.erase(sampler);
2432}
2433
2434void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2435 const VkAllocationCallbacks *pAllocator) {
2436 if (!descriptorSetLayout) return;
2437 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2438 if (layout_it != descriptorSetLayoutMap.end()) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002439 layout_it->second.get()->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002440 descriptorSetLayoutMap.erase(layout_it);
2441 }
2442}
2443
2444void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2445 const VkAllocationCallbacks *pAllocator) {
2446 if (!descriptorPool) return;
2447 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
2448 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
2449 if (desc_pool_state) {
2450 // Any bound cmd buffers are now invalid
2451 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
2452 // Free sets that were in this pool
2453 for (auto ds : desc_pool_state->sets) {
2454 FreeDescriptorSet(ds);
2455 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002456 desc_pool_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002457 descriptorPoolMap.erase(descriptorPool);
2458 }
2459}
2460
2461// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2462void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2463 const VkCommandBuffer *command_buffers) {
2464 for (uint32_t i = 0; i < command_buffer_count; i++) {
2465 auto cb_state = GetCBState(command_buffers[i]);
2466 // Remove references to command buffer's state and delete
2467 if (cb_state) {
2468 // reset prior to delete, removing various references to it.
2469 // TODO: fix this, it's insane.
2470 ResetCommandBufferState(cb_state->commandBuffer);
2471 // Remove the cb_state's references from COMMAND_POOL_STATEs
2472 pool_state->commandBuffers.erase(command_buffers[i]);
2473 // Remove the cb debug labels
2474 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
2475 // Remove CBState from CB map
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002476 cb_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002477 commandBufferMap.erase(cb_state->commandBuffer);
2478 }
2479 }
2480}
2481
2482void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2483 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
2484 auto pPool = GetCommandPoolState(commandPool);
2485 FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
2486}
2487
2488void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2489 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2490 VkResult result) {
2491 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002492 auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002493 cmd_pool_state->createFlags = pCreateInfo->flags;
2494 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
2495 commandPoolMap[*pCommandPool] = std::move(cmd_pool_state);
2496}
2497
2498void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2499 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2500 VkResult result) {
2501 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002502 auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002503 query_pool_state->createInfo = *pCreateInfo;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002504 query_pool_state->pool = *pQueryPool;
2505 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
2506 const auto *perf = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
2507 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
2508
2509 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2510 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2511 switch (counter.scope) {
2512 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
2513 query_pool_state->has_perf_scope_command_buffer = true;
2514 break;
2515 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
2516 query_pool_state->has_perf_scope_render_pass = true;
2517 break;
2518 default:
2519 break;
2520 }
2521 }
2522
2523 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
2524 &query_pool_state->n_performance_passes);
2525 }
2526
locke-lunargd556cc32019-09-17 01:21:23 -06002527 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
2528
2529 QueryObject query_obj{*pQueryPool, 0u};
2530 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2531 query_obj.query = i;
2532 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2533 }
2534}
2535
2536void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2537 const VkAllocationCallbacks *pAllocator) {
2538 if (!commandPool) return;
2539 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2540 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2541 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2542 if (cp_state) {
2543 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2544 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2545 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002546 cp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002547 commandPoolMap.erase(commandPool);
2548 }
2549}
2550
2551void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2552 VkCommandPoolResetFlags flags, VkResult result) {
2553 if (VK_SUCCESS != result) return;
2554 // Reset all of the CBs allocated from this pool
2555 auto command_pool_state = GetCommandPoolState(commandPool);
2556 for (auto cmdBuffer : command_pool_state->commandBuffers) {
2557 ResetCommandBufferState(cmdBuffer);
2558 }
2559}
2560
2561void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2562 VkResult result) {
2563 for (uint32_t i = 0; i < fenceCount; ++i) {
2564 auto pFence = GetFenceState(pFences[i]);
2565 if (pFence) {
2566 if (pFence->scope == kSyncScopeInternal) {
2567 pFence->state = FENCE_UNSIGNALED;
2568 } else if (pFence->scope == kSyncScopeExternalTemporary) {
2569 pFence->scope = kSyncScopeInternal;
2570 }
2571 }
2572 }
2573}
2574
Jeff Bolzadbfa852019-10-04 13:53:30 -05002575// For given cb_nodes, invalidate them and track object causing invalidation.
2576// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
2577// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
2578// can also unlink objects from command buffers.
2579void ValidationStateTracker::InvalidateCommandBuffers(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_nodes,
2580 const VulkanTypedHandle &obj, bool unlink) {
2581 for (const auto &cb_node_pair : cb_nodes) {
2582 auto &cb_node = cb_node_pair.first;
2583 if (cb_node->state == CB_RECORDING) {
2584 cb_node->state = CB_INVALID_INCOMPLETE;
2585 } else if (cb_node->state == CB_RECORDED) {
2586 cb_node->state = CB_INVALID_COMPLETE;
2587 }
2588 cb_node->broken_bindings.push_back(obj);
2589
2590 // if secondary, then propagate the invalidation to the primaries that will call us.
2591 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2592 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
2593 }
2594 if (unlink) {
2595 int index = cb_node_pair.second;
2596 assert(cb_node->object_bindings[index] == obj);
2597 cb_node->object_bindings[index] = VulkanTypedHandle();
2598 }
2599 }
2600 if (unlink) {
2601 cb_nodes.clear();
2602 }
2603}
2604
2605void ValidationStateTracker::InvalidateLinkedCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
2606 const VulkanTypedHandle &obj) {
locke-lunargd556cc32019-09-17 01:21:23 -06002607 for (auto cb_node : cb_nodes) {
2608 if (cb_node->state == CB_RECORDING) {
2609 cb_node->state = CB_INVALID_INCOMPLETE;
2610 } else if (cb_node->state == CB_RECORDED) {
2611 cb_node->state = CB_INVALID_COMPLETE;
2612 }
2613 cb_node->broken_bindings.push_back(obj);
2614
2615 // if secondary, then propagate the invalidation to the primaries that will call us.
2616 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002617 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
locke-lunargd556cc32019-09-17 01:21:23 -06002618 }
2619 }
2620}
2621
2622void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2623 const VkAllocationCallbacks *pAllocator) {
2624 if (!framebuffer) return;
2625 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
2626 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
2627 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002628 framebuffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002629 frameBufferMap.erase(framebuffer);
2630}
2631
2632void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2633 const VkAllocationCallbacks *pAllocator) {
2634 if (!renderPass) return;
2635 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
2636 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
2637 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002638 rp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002639 renderPassMap.erase(renderPass);
2640}
2641
2642void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2643 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2644 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002645 auto fence_state = std::make_shared<FENCE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002646 fence_state->fence = *pFence;
2647 fence_state->createInfo = *pCreateInfo;
2648 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
2649 fenceMap[*pFence] = std::move(fence_state);
2650}
2651
2652bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2653 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2654 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002655 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002656 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2657 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2658 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2659 cgpl_state->pipe_state.reserve(count);
2660 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002661 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002662 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002663 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002664 }
2665 return false;
2666}
2667
2668void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2669 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2670 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2671 VkResult result, void *cgpl_state_data) {
2672 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2673 // This API may create pipelines regardless of the return value
2674 for (uint32_t i = 0; i < count; i++) {
2675 if (pPipelines[i] != VK_NULL_HANDLE) {
2676 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2677 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2678 }
2679 }
2680 cgpl_state->pipe_state.clear();
2681}
2682
2683bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2684 const VkComputePipelineCreateInfo *pCreateInfos,
2685 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002686 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002687 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2688 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2689 ccpl_state->pipe_state.reserve(count);
2690 for (uint32_t i = 0; i < count; i++) {
2691 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002692 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002693 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002694 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002695 }
2696 return false;
2697}
2698
2699void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2700 const VkComputePipelineCreateInfo *pCreateInfos,
2701 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2702 VkResult result, void *ccpl_state_data) {
2703 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2704
2705 // This API may create pipelines regardless of the return value
2706 for (uint32_t i = 0; i < count; i++) {
2707 if (pPipelines[i] != VK_NULL_HANDLE) {
2708 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2709 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2710 }
2711 }
2712 ccpl_state->pipe_state.clear();
2713}
2714
2715bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2716 uint32_t count,
2717 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2718 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002719 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002720 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2721 crtpl_state->pipe_state.reserve(count);
2722 for (uint32_t i = 0; i < count; i++) {
2723 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002724 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002725 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002726 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002727 }
2728 return false;
2729}
2730
2731void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2732 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2733 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2734 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2735 // This API may create pipelines regardless of the return value
2736 for (uint32_t i = 0; i < count; i++) {
2737 if (pPipelines[i] != VK_NULL_HANDLE) {
2738 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2739 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2740 }
2741 }
2742 crtpl_state->pipe_state.clear();
2743}
2744
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002745bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkPipelineCache pipelineCache,
2746 uint32_t count,
2747 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2748 const VkAllocationCallbacks *pAllocator,
2749 VkPipeline *pPipelines, void *crtpl_state_data) const {
2750 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2751 crtpl_state->pipe_state.reserve(count);
2752 for (uint32_t i = 0; i < count; i++) {
2753 // Create and initialize internal tracking data structure
2754 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
2755 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
2756 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
2757 }
2758 return false;
2759}
2760
2761void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(
2762 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2763 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2764 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2765 // This API may create pipelines regardless of the return value
2766 for (uint32_t i = 0; i < count; i++) {
2767 if (pPipelines[i] != VK_NULL_HANDLE) {
2768 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2769 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2770 }
2771 }
2772 crtpl_state->pipe_state.clear();
2773}
2774
locke-lunargd556cc32019-09-17 01:21:23 -06002775void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2776 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2777 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002778 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002779}
2780
2781void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2782 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2783 const VkAllocationCallbacks *pAllocator,
2784 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2785 if (VK_SUCCESS != result) return;
2786 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2787}
2788
2789// For repeatable sorting, not very useful for "memory in range" search
2790struct PushConstantRangeCompare {
2791 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2792 if (lhs->offset == rhs->offset) {
2793 if (lhs->size == rhs->size) {
2794 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2795 return lhs->stageFlags < rhs->stageFlags;
2796 }
2797 // If the offsets are the same then sorting by the end of range is useful for validation
2798 return lhs->size < rhs->size;
2799 }
2800 return lhs->offset < rhs->offset;
2801 }
2802};
2803
2804static PushConstantRangesDict push_constant_ranges_dict;
2805
2806PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2807 if (!info->pPushConstantRanges) {
2808 // Hand back the empty entry (creating as needed)...
2809 return push_constant_ranges_dict.look_up(PushConstantRanges());
2810 }
2811
2812 // Sort the input ranges to ensure equivalent ranges map to the same id
2813 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2814 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2815 sorted.insert(info->pPushConstantRanges + i);
2816 }
2817
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07002818 PushConstantRanges ranges;
2819 ranges.reserve(sorted.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002820 for (const auto range : sorted) {
2821 ranges.emplace_back(*range);
2822 }
2823 return push_constant_ranges_dict.look_up(std::move(ranges));
2824}
2825
2826// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2827static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2828
2829// Dictionary of canonical form of the "compatible for set" records
2830static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2831
2832static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2833 const PipelineLayoutSetLayoutsId set_layouts_id) {
2834 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2835}
2836
2837void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2838 const VkAllocationCallbacks *pAllocator,
2839 VkPipelineLayout *pPipelineLayout, VkResult result) {
2840 if (VK_SUCCESS != result) return;
2841
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002842 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002843 pipeline_layout_state->layout = *pPipelineLayout;
2844 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2845 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2846 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002847 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002848 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2849 }
2850
2851 // Get canonical form IDs for the "compatible for set" contents
2852 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
2853 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
2854 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
2855
2856 // Create table of "compatible for set N" cannonical forms for trivial accept validation
2857 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2858 pipeline_layout_state->compat_for_set.emplace_back(
2859 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
2860 }
2861 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
2862}
2863
2864void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
2865 const VkAllocationCallbacks *pAllocator,
2866 VkDescriptorPool *pDescriptorPool, VkResult result) {
2867 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002868 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002869}
2870
2871void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2872 VkDescriptorPoolResetFlags flags, VkResult result) {
2873 if (VK_SUCCESS != result) return;
2874 DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
2875 // TODO: validate flags
2876 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
2877 for (auto ds : pPool->sets) {
2878 FreeDescriptorSet(ds);
2879 }
2880 pPool->sets.clear();
2881 // Reset available count for each type and available sets for this pool
2882 for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
2883 pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
2884 }
2885 pPool->availableSets = pPool->maxSets;
2886}
2887
2888bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
2889 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002890 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002891 // Always update common data
2892 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2893 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2894 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
2895
2896 return false;
2897}
2898
2899// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
2900void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
2901 VkDescriptorSet *pDescriptorSets, VkResult result,
2902 void *ads_state_data) {
2903 if (VK_SUCCESS != result) return;
2904 // All the updates are contained in a single cvdescriptorset function
2905 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2906 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2907 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
2908}
2909
2910void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
2911 const VkDescriptorSet *pDescriptorSets) {
2912 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
2913 // Update available descriptor sets in pool
2914 pool_state->availableSets += count;
2915
2916 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
2917 for (uint32_t i = 0; i < count; ++i) {
2918 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
2919 auto descriptor_set = setMap[pDescriptorSets[i]].get();
2920 uint32_t type_index = 0, descriptor_count = 0;
2921 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
2922 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
2923 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
2924 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
2925 }
2926 FreeDescriptorSet(descriptor_set);
2927 pool_state->sets.erase(descriptor_set);
2928 }
2929 }
2930}
2931
2932void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
2933 const VkWriteDescriptorSet *pDescriptorWrites,
2934 uint32_t descriptorCopyCount,
2935 const VkCopyDescriptorSet *pDescriptorCopies) {
2936 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
2937 pDescriptorCopies);
2938}
2939
2940void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
2941 VkCommandBuffer *pCommandBuffer, VkResult result) {
2942 if (VK_SUCCESS != result) return;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002943 auto pPool = GetCommandPoolShared(pCreateInfo->commandPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002944 if (pPool) {
2945 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
2946 // Add command buffer to its commandPool map
2947 pPool->commandBuffers.insert(pCommandBuffer[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002948 auto pCB = std::make_shared<CMD_BUFFER_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002949 pCB->createInfo = *pCreateInfo;
2950 pCB->device = device;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002951 pCB->command_pool = pPool;
locke-lunargd556cc32019-09-17 01:21:23 -06002952 // Add command buffer to map
2953 commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
2954 ResetCommandBufferState(pCommandBuffer[i]);
2955 }
2956 }
2957}
2958
2959// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
2960void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002961 AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
locke-lunargd556cc32019-09-17 01:21:23 -06002962 cb_state);
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -07002963 // If imageless fb, skip fb binding
2964 if (fb_state->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002965 const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
2966 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03002967 auto view_state = GetAttachmentImageViewState(cb_state, fb_state, attachment);
locke-lunargd556cc32019-09-17 01:21:23 -06002968 if (view_state) {
2969 AddCommandBufferBindingImageView(cb_state, view_state);
2970 }
2971 }
2972}
2973
2974void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
2975 const VkCommandBufferBeginInfo *pBeginInfo) {
2976 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2977 if (!cb_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002978 if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
2979 // Secondary Command Buffer
2980 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
2981 if (pInfo) {
2982 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
2983 assert(pInfo->renderPass);
2984 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
2985 if (framebuffer) {
2986 // Connect this framebuffer and its children to this cmdBuffer
2987 AddFramebufferBinding(cb_state, framebuffer);
2988 }
2989 }
2990 }
2991 }
2992 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
2993 ResetCommandBufferState(commandBuffer);
2994 }
2995 // Set updated state here in case implicit reset occurs above
2996 cb_state->state = CB_RECORDING;
2997 cb_state->beginInfo = *pBeginInfo;
2998 if (cb_state->beginInfo.pInheritanceInfo) {
2999 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
3000 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
3001 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
3002 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
3003 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
3004 cb_state->activeRenderPass = GetRenderPassState(cb_state->beginInfo.pInheritanceInfo->renderPass);
3005 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
3006 cb_state->activeFramebuffer = cb_state->beginInfo.pInheritanceInfo->framebuffer;
3007 cb_state->framebuffers.insert(cb_state->beginInfo.pInheritanceInfo->framebuffer);
3008 }
3009 }
3010
3011 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
3012 if (chained_device_group_struct) {
3013 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
3014 } else {
3015 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
3016 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003017
3018 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06003019}
3020
3021void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
3022 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3023 if (!cb_state) return;
3024 // Cached validation is specific to a specific recording of a specific command buffer.
3025 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
3026 descriptor_set->ClearCachedValidation(cb_state);
3027 }
3028 cb_state->validated_descriptor_sets.clear();
3029 if (VK_SUCCESS == result) {
3030 cb_state->state = CB_RECORDED;
3031 }
3032}
3033
3034void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
3035 VkResult result) {
3036 if (VK_SUCCESS == result) {
3037 ResetCommandBufferState(commandBuffer);
3038 }
3039}
3040
3041CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
3042 // initially assume everything is static state
3043 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
3044
3045 if (ds) {
3046 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
3047 switch (ds->pDynamicStates[i]) {
3048 case VK_DYNAMIC_STATE_LINE_WIDTH:
3049 flags &= ~CBSTATUS_LINE_WIDTH_SET;
3050 break;
3051 case VK_DYNAMIC_STATE_DEPTH_BIAS:
3052 flags &= ~CBSTATUS_DEPTH_BIAS_SET;
3053 break;
3054 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
3055 flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
3056 break;
3057 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
3058 flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
3059 break;
3060 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
3061 flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
3062 break;
3063 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
3064 flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
3065 break;
3066 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
3067 flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
3068 break;
3069 case VK_DYNAMIC_STATE_SCISSOR:
3070 flags &= ~CBSTATUS_SCISSOR_SET;
3071 break;
3072 case VK_DYNAMIC_STATE_VIEWPORT:
3073 flags &= ~CBSTATUS_VIEWPORT_SET;
3074 break;
3075 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
3076 flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
3077 break;
3078 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
3079 flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
3080 break;
3081 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
3082 flags &= ~CBSTATUS_LINE_STIPPLE_SET;
3083 break;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003084 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
3085 flags &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
3086 break;
locke-lunargd556cc32019-09-17 01:21:23 -06003087 default:
3088 break;
3089 }
3090 }
3091 }
3092
3093 return flags;
3094}
3095
3096// Validation cache:
3097// CV is the bottommost implementor of this extension. Don't pass calls down.
3098// utility function to set collective state for pipeline
3099void SetPipelineState(PIPELINE_STATE *pPipe) {
3100 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
3101 if (pPipe->graphicsPipelineCI.pColorBlendState) {
3102 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
3103 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
3104 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3105 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3106 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3107 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3108 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3109 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3110 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3111 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
3112 pPipe->blendConstantsEnabled = true;
3113 }
3114 }
3115 }
3116 }
sfricke-samsung8f658d42020-05-03 20:12:24 -07003117 // Check if sample location is enabled
3118 if (pPipe->graphicsPipelineCI.pMultisampleState) {
3119 const VkPipelineSampleLocationsStateCreateInfoEXT *sample_location_state =
3120 lvl_find_in_chain<VkPipelineSampleLocationsStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pMultisampleState->pNext);
3121 if (sample_location_state != nullptr) {
3122 pPipe->sample_location_enabled = sample_location_state->sampleLocationsEnable;
3123 }
3124 }
locke-lunargd556cc32019-09-17 01:21:23 -06003125}
3126
3127void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
3128 VkPipeline pipeline) {
3129 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3130 assert(cb_state);
3131
3132 auto pipe_state = GetPipelineState(pipeline);
3133 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
3134 cb_state->status &= ~cb_state->static_status;
3135 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
3136 cb_state->status |= cb_state->static_status;
3137 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003138 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout->layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003139 cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
3140 SetPipelineState(pipe_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003141 AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003142}
3143
3144void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3145 uint32_t viewportCount, const VkViewport *pViewports) {
3146 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3147 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
3148 cb_state->status |= CBSTATUS_VIEWPORT_SET;
3149}
3150
3151void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
3152 uint32_t exclusiveScissorCount,
3153 const VkRect2D *pExclusiveScissors) {
3154 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3155 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
3156 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
3157 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
3158}
3159
3160void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3161 VkImageLayout imageLayout) {
3162 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3163
3164 if (imageView != VK_NULL_HANDLE) {
3165 auto view_state = GetImageViewState(imageView);
3166 AddCommandBufferBindingImageView(cb_state, view_state);
3167 }
3168}
3169
3170void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3171 uint32_t viewportCount,
3172 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3173 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3174 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3175 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3176 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
3177}
3178
3179void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3180 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3181 const VkAllocationCallbacks *pAllocator,
3182 VkAccelerationStructureNV *pAccelerationStructure,
3183 VkResult result) {
3184 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003185 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003186
3187 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3188 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
3189 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3190 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
3191 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3192 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3193
3194 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
3195 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3196 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
3197 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3198 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3199 &as_state->build_scratch_memory_requirements);
3200
3201 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
3202 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3203 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
3204 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3205 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3206 &as_state->update_scratch_memory_requirements);
3207
3208 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3209}
3210
Jeff Bolz95176d02020-04-01 00:36:16 -05003211void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3212 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3213 const VkAllocationCallbacks *pAllocator,
3214 VkAccelerationStructureKHR *pAccelerationStructure,
3215 VkResult result) {
3216 if (VK_SUCCESS != result) return;
3217 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
3218
3219 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3220 VkAccelerationStructureMemoryRequirementsInfoKHR as_memory_requirements_info = {};
3221 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3222 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR;
3223 as_memory_requirements_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3224 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3225 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &as_memory_requirements_info, &as_state->memory_requirements);
3226
3227 VkAccelerationStructureMemoryRequirementsInfoKHR scratch_memory_req_info = {};
3228 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3229 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR;
3230 scratch_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3231 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3232 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &scratch_memory_req_info,
3233 &as_state->build_scratch_memory_requirements);
3234
3235 VkAccelerationStructureMemoryRequirementsInfoKHR update_memory_req_info = {};
3236 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3237 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR;
3238 update_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3239 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3240 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &update_memory_req_info,
3241 &as_state->update_scratch_memory_requirements);
3242
3243 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3244}
3245
locke-lunargd556cc32019-09-17 01:21:23 -06003246void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
3247 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
3248 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
3249 if (as_state != nullptr) {
3250 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
3251 as_state->memory_requirements = *pMemoryRequirements;
3252 as_state->memory_requirements_checked = true;
3253 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
3254 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
3255 as_state->build_scratch_memory_requirements_checked = true;
3256 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
3257 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
3258 as_state->update_scratch_memory_requirements_checked = true;
3259 }
3260 }
3261}
3262
Jeff Bolz95176d02020-04-01 00:36:16 -05003263void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryCommon(
3264 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result,
3265 bool isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003266 if (VK_SUCCESS != result) return;
3267 for (uint32_t i = 0; i < bindInfoCount; i++) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003268 const VkBindAccelerationStructureMemoryInfoKHR &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06003269
3270 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
3271 if (as_state) {
3272 // Track bound memory range information
3273 auto mem_info = GetDevMemState(info.memory);
3274 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07003275 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06003276 }
3277 // Track objects tied to memory
3278 SetMemBinding(info.memory, as_state, info.memoryOffset,
Jeff Bolz95176d02020-04-01 00:36:16 -05003279 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR));
locke-lunargd556cc32019-09-17 01:21:23 -06003280
3281 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05003282 // XXX TODO: Query device address for KHR extension
3283 if (enabled.gpu_validation && isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003284 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
3285 }
3286 }
3287 }
3288}
3289
Jeff Bolz95176d02020-04-01 00:36:16 -05003290void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
3291 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
3292 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, true);
3293}
3294
3295void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryKHR(
3296 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result) {
3297 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, false);
3298}
3299
locke-lunargd556cc32019-09-17 01:21:23 -06003300void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
3301 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
3302 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
3303 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3304 if (cb_state == nullptr) {
3305 return;
3306 }
3307
3308 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3309 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3310 if (dst_as_state != nullptr) {
3311 dst_as_state->built = true;
3312 dst_as_state->build_info.initialize(pInfo);
3313 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3314 }
3315 if (src_as_state != nullptr) {
3316 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3317 }
3318 cb_state->hasBuildAccelerationStructureCmd = true;
3319}
3320
3321void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
3322 VkAccelerationStructureNV dst,
3323 VkAccelerationStructureNV src,
3324 VkCopyAccelerationStructureModeNV mode) {
3325 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3326 if (cb_state) {
3327 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3328 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3329 if (dst_as_state != nullptr && src_as_state != nullptr) {
3330 dst_as_state->built = true;
3331 dst_as_state->build_info = src_as_state->build_info;
3332 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3333 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3334 }
3335 }
3336}
3337
Jeff Bolz95176d02020-04-01 00:36:16 -05003338void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
3339 VkAccelerationStructureKHR accelerationStructure,
3340 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06003341 if (!accelerationStructure) return;
3342 auto *as_state = GetAccelerationStructureState(accelerationStructure);
3343 if (as_state) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003344 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06003345 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
3346 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07003347 RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -06003348 }
3349 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003350 as_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003351 accelerationStructureMap.erase(accelerationStructure);
3352 }
3353}
3354
Jeff Bolz95176d02020-04-01 00:36:16 -05003355void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
3356 VkAccelerationStructureNV accelerationStructure,
3357 const VkAllocationCallbacks *pAllocator) {
3358 PreCallRecordDestroyAccelerationStructureKHR(device, accelerationStructure, pAllocator);
3359}
3360
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003361void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3362 uint32_t viewportCount,
3363 const VkViewportWScalingNV *pViewportWScalings) {
3364 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3365 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
3366}
3367
locke-lunargd556cc32019-09-17 01:21:23 -06003368void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
3369 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3370 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
3371}
3372
3373void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
3374 uint16_t lineStipplePattern) {
3375 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3376 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
3377}
3378
3379void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
3380 float depthBiasClamp, float depthBiasSlopeFactor) {
3381 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3382 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
3383}
3384
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003385void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
3386 const VkRect2D *pScissors) {
3387 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3388 cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
3389 cb_state->status |= CBSTATUS_SCISSOR_SET;
3390}
3391
locke-lunargd556cc32019-09-17 01:21:23 -06003392void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
3393 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3394 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
3395}
3396
3397void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
3398 float maxDepthBounds) {
3399 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3400 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
3401}
3402
3403void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3404 uint32_t compareMask) {
3405 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3406 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
3407}
3408
3409void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3410 uint32_t writeMask) {
3411 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3412 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
3413}
3414
3415void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3416 uint32_t reference) {
3417 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3418 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
3419}
3420
3421// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
3422// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
3423// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
3424void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
3425 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
3426 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
3427 cvdescriptorset::DescriptorSet *push_descriptor_set,
3428 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
3429 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
3430 // Defensive
3431 assert(pipeline_layout);
3432 if (!pipeline_layout) return;
3433
3434 uint32_t required_size = first_set + set_count;
3435 const uint32_t last_binding_index = required_size - 1;
3436 assert(last_binding_index < pipeline_layout->compat_for_set.size());
3437
3438 // Some useful shorthand
3439 auto &last_bound = cb_state->lastBound[pipeline_bind_point];
3440 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
3441 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
3442
3443 // We need this three times in this function, but nowhere else
3444 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
3445 if (ds && ds->IsPushDescriptor()) {
3446 assert(ds == last_bound.push_descriptor_set.get());
3447 last_bound.push_descriptor_set = nullptr;
3448 return true;
3449 }
3450 return false;
3451 };
3452
3453 // Clean up the "disturbed" before and after the range to be set
3454 if (required_size < current_size) {
3455 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
3456 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
3457 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
3458 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
3459 }
3460 } else {
3461 // We're not disturbing past last, so leave the upper binding data alone.
3462 required_size = current_size;
3463 }
3464 }
3465
3466 // We resize if we need more set entries or if those past "last" are disturbed
3467 if (required_size != current_size) {
3468 last_bound.per_set.resize(required_size);
3469 }
3470
3471 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
3472 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
3473 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
3474 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3475 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
3476 last_bound.per_set[set_idx].dynamicOffsets.clear();
3477 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
3478 }
3479 }
3480
3481 // Now update the bound sets with the input sets
3482 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
3483 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
3484 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
3485 cvdescriptorset::DescriptorSet *descriptor_set =
3486 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
3487
3488 // Record binding (or push)
3489 if (descriptor_set != last_bound.push_descriptor_set.get()) {
3490 // Only cleanup the push descriptors if they aren't the currently used set.
3491 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3492 }
3493 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
3494 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
3495
3496 if (descriptor_set) {
3497 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
3498 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
3499 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
3500 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
3501 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
3502 input_dynamic_offsets = end_offset;
3503 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
3504 } else {
3505 last_bound.per_set[set_idx].dynamicOffsets.clear();
3506 }
3507 if (!descriptor_set->IsPushDescriptor()) {
3508 // Can't cache validation of push_descriptors
3509 cb_state->validated_descriptor_sets.insert(descriptor_set);
3510 }
3511 }
3512 }
3513}
3514
3515// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
3516void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
3517 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3518 uint32_t firstSet, uint32_t setCount,
3519 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
3520 const uint32_t *pDynamicOffsets) {
3521 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3522 auto pipeline_layout = GetPipelineLayout(layout);
3523
3524 // Resize binding arrays
3525 uint32_t last_set_index = firstSet + setCount - 1;
3526 if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
3527 cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
3528 }
3529
3530 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
3531 dynamicOffsetCount, pDynamicOffsets);
3532 cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
3533 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3534}
3535
3536void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
3537 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
3538 const VkWriteDescriptorSet *pDescriptorWrites) {
3539 const auto &pipeline_layout = GetPipelineLayout(layout);
3540 // Short circuit invalid updates
3541 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
3542 !pipeline_layout->set_layouts[set]->IsPushDescriptor())
3543 return;
3544
3545 // We need a descriptor set to update the bindings with, compatible with the passed layout
3546 const auto dsl = pipeline_layout->set_layouts[set];
3547 auto &last_bound = cb_state->lastBound[pipelineBindPoint];
3548 auto &push_descriptor_set = last_bound.push_descriptor_set;
3549 // If we are disturbing the current push_desriptor_set clear it
3550 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
John Zulaufd2c3dae2019-12-12 11:02:17 -07003551 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003552 }
3553
3554 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3555 nullptr);
3556 last_bound.pipeline_layout = layout;
3557
3558 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003559 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003560}
3561
3562void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3563 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3564 uint32_t set, uint32_t descriptorWriteCount,
3565 const VkWriteDescriptorSet *pDescriptorWrites) {
3566 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3567 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3568}
3569
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003570void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3571 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3572 const void *pValues) {
3573 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3574 if (cb_state != nullptr) {
3575 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3576
3577 auto &push_constant_data = cb_state->push_constant_data;
3578 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3579 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
3580 }
3581}
3582
locke-lunargd556cc32019-09-17 01:21:23 -06003583void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3584 VkIndexType indexType) {
3585 auto buffer_state = GetBufferState(buffer);
3586 auto cb_state = GetCBState(commandBuffer);
3587
3588 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
3589 cb_state->index_buffer_binding.buffer = buffer;
3590 cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
3591 cb_state->index_buffer_binding.offset = offset;
3592 cb_state->index_buffer_binding.index_type = indexType;
3593 // Add binding for this index buffer to this commandbuffer
3594 AddCommandBufferBindingBuffer(cb_state, buffer_state);
3595}
3596
3597void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3598 uint32_t bindingCount, const VkBuffer *pBuffers,
3599 const VkDeviceSize *pOffsets) {
3600 auto cb_state = GetCBState(commandBuffer);
3601
3602 uint32_t end = firstBinding + bindingCount;
3603 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3604 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3605 }
3606
3607 for (uint32_t i = 0; i < bindingCount; ++i) {
3608 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
3609 vertex_buffer_binding.buffer = pBuffers[i];
3610 vertex_buffer_binding.offset = pOffsets[i];
3611 // Add binding for this vertex buffer to this commandbuffer
Jeff Bolz165818a2020-05-08 11:19:03 -05003612 if (pBuffers[i]) {
3613 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
3614 }
locke-lunargd556cc32019-09-17 01:21:23 -06003615 }
3616}
3617
3618void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3619 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
3620 auto cb_state = GetCBState(commandBuffer);
3621 auto dst_buffer_state = GetBufferState(dstBuffer);
3622
3623 // Update bindings between buffer and cmd buffer
3624 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
3625}
3626
Jeff Bolz310775c2019-10-09 00:46:33 -05003627bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags stageMask,
3628 EventToStageMap *localEventToStageMap) {
3629 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003630 return false;
3631}
3632
3633void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3634 VkPipelineStageFlags stageMask) {
3635 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3636 auto event_state = GetEventState(event);
3637 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003638 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003639 }
3640 cb_state->events.push_back(event);
3641 if (!cb_state->waitedEvents.count(event)) {
3642 cb_state->writeEventsBeforeWait.push_back(event);
3643 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003644 cb_state->eventUpdates.emplace_back(
3645 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3646 return SetEventStageMask(event, stageMask, localEventToStageMap);
3647 });
locke-lunargd556cc32019-09-17 01:21:23 -06003648}
3649
3650void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3651 VkPipelineStageFlags stageMask) {
3652 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3653 auto event_state = GetEventState(event);
3654 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003655 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003656 }
3657 cb_state->events.push_back(event);
3658 if (!cb_state->waitedEvents.count(event)) {
3659 cb_state->writeEventsBeforeWait.push_back(event);
3660 }
3661
3662 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003663 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
3664 return SetEventStageMask(event, VkPipelineStageFlags(0), localEventToStageMap);
3665 });
locke-lunargd556cc32019-09-17 01:21:23 -06003666}
3667
3668void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3669 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3670 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3671 uint32_t bufferMemoryBarrierCount,
3672 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3673 uint32_t imageMemoryBarrierCount,
3674 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3675 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3676 for (uint32_t i = 0; i < eventCount; ++i) {
3677 auto event_state = GetEventState(pEvents[i]);
3678 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003679 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
3680 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003681 }
3682 cb_state->waitedEvents.insert(pEvents[i]);
3683 cb_state->events.push_back(pEvents[i]);
3684 }
3685}
3686
Jeff Bolz310775c2019-10-09 00:46:33 -05003687bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
3688 (*localQueryToStateMap)[object] = value;
3689 return false;
3690}
3691
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003692bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
3693 QueryState value, QueryMap *localQueryToStateMap) {
Jeff Bolz310775c2019-10-09 00:46:33 -05003694 for (uint32_t i = 0; i < queryCount; i++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003695 QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
Jeff Bolz310775c2019-10-09 00:46:33 -05003696 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06003697 }
3698 return false;
3699}
3700
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003701QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex,
3702 uint32_t perfPass) const {
3703 QueryObject query = QueryObject(QueryObject(queryPool, queryIndex), perfPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003704
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003705 auto iter = localQueryToStateMap->find(query);
3706 if (iter != localQueryToStateMap->end()) return iter->second;
Jeff Bolz310775c2019-10-09 00:46:33 -05003707
Jeff Bolz310775c2019-10-09 00:46:33 -05003708 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06003709}
3710
3711void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003712 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003713 cb_state->activeQueries.insert(query_obj);
3714 cb_state->startedQueries.insert(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003715 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3716 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3717 QueryMap *localQueryToStateMap) {
3718 SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
3719 return false;
3720 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003721 auto pool_state = GetQueryPoolState(query_obj.pool);
3722 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3723 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003724}
3725
3726void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3727 VkFlags flags) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003728 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003729 QueryObject query = {queryPool, slot};
3730 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3731 RecordCmdBeginQuery(cb_state, query);
3732}
3733
3734void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003735 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003736 cb_state->activeQueries.erase(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003737 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3738 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3739 QueryMap *localQueryToStateMap) {
3740 return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3741 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003742 auto pool_state = GetQueryPoolState(query_obj.pool);
3743 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3744 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003745}
3746
3747void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003748 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003749 QueryObject query_obj = {queryPool, slot};
3750 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3751 RecordCmdEndQuery(cb_state, query_obj);
3752}
3753
3754void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3755 uint32_t firstQuery, uint32_t queryCount) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003756 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003757 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3758
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02003759 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
3760 QueryObject query = {queryPool, slot};
3761 cb_state->resetQueries.insert(query);
3762 }
3763
Jeff Bolz310775c2019-10-09 00:46:33 -05003764 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003765 bool do_validate, VkQueryPool &firstPerfQueryPool,
3766 uint32_t perfQueryPass,
3767 QueryMap *localQueryToStateMap) {
3768 return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06003769 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003770 auto pool_state = GetQueryPoolState(queryPool);
3771 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003772 cb_state);
3773}
3774
3775void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3776 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3777 VkDeviceSize dstOffset, VkDeviceSize stride,
3778 VkQueryResultFlags flags) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003779 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003780 auto cb_state = GetCBState(commandBuffer);
3781 auto dst_buff_state = GetBufferState(dstBuffer);
3782 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003783 auto pool_state = GetQueryPoolState(queryPool);
3784 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003785 cb_state);
3786}
3787
3788void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3789 VkQueryPool queryPool, uint32_t slot) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003790 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003791 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003792 auto pool_state = GetQueryPoolState(queryPool);
3793 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003794 cb_state);
3795 QueryObject query = {queryPool, slot};
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003796 cb_state->queryUpdates.emplace_back([query](const ValidationStateTracker *device_data, bool do_validate,
3797 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3798 QueryMap *localQueryToStateMap) {
3799 return SetQueryState(QueryObject(query, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3800 });
locke-lunargd556cc32019-09-17 01:21:23 -06003801}
3802
3803void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3804 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3805 VkResult result) {
3806 if (VK_SUCCESS != result) return;
3807 // Shadow create info and store in map
Jeff Bolz6ae39612019-10-11 20:57:36 -05003808 auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
locke-lunargd556cc32019-09-17 01:21:23 -06003809
3810 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
3811 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
3812 VkImageView view = pCreateInfo->pAttachments[i];
3813 auto view_state = GetImageViewState(view);
3814 if (!view_state) {
3815 continue;
3816 }
3817 }
3818 }
3819 frameBufferMap[*pFramebuffer] = std::move(fb_state);
3820}
3821
3822void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3823 RENDER_PASS_STATE *render_pass) {
3824 auto &subpass_to_node = render_pass->subpassToNode;
3825 subpass_to_node.resize(pCreateInfo->subpassCount);
3826 auto &self_dependencies = render_pass->self_dependencies;
3827 self_dependencies.resize(pCreateInfo->subpassCount);
3828
3829 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
3830 subpass_to_node[i].pass = i;
3831 self_dependencies[i].clear();
3832 }
3833 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
3834 const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
3835 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
3836 if (dependency.srcSubpass == dependency.dstSubpass) {
3837 self_dependencies[dependency.srcSubpass].push_back(i);
3838 } else {
3839 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
3840 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
3841 }
3842 }
3843 }
3844}
3845
3846static void MarkAttachmentFirstUse(RENDER_PASS_STATE *render_pass, uint32_t index, bool is_read) {
3847 if (index == VK_ATTACHMENT_UNUSED) return;
3848
3849 if (!render_pass->attachment_first_read.count(index)) render_pass->attachment_first_read[index] = is_read;
3850}
3851
3852void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
3853 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
3854 VkRenderPass *pRenderPass) {
3855 render_pass->renderPass = *pRenderPass;
3856 auto create_info = render_pass->createInfo.ptr();
3857
3858 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
3859
3860 for (uint32_t i = 0; i < create_info->subpassCount; ++i) {
3861 const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[i];
3862 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
3863 MarkAttachmentFirstUse(render_pass.get(), subpass.pColorAttachments[j].attachment, false);
3864
3865 // resolve attachments are considered to be written
3866 if (subpass.pResolveAttachments) {
3867 MarkAttachmentFirstUse(render_pass.get(), subpass.pResolveAttachments[j].attachment, false);
3868 }
3869 }
3870 if (subpass.pDepthStencilAttachment) {
3871 MarkAttachmentFirstUse(render_pass.get(), subpass.pDepthStencilAttachment->attachment, false);
3872 }
3873 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
3874 MarkAttachmentFirstUse(render_pass.get(), subpass.pInputAttachments[j].attachment, true);
3875 }
3876 }
3877
3878 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
3879 renderPassMap[*pRenderPass] = std::move(render_pass);
3880}
3881
3882// Style note:
3883// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
3884// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
3885// construction or assignment.
3886void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
3887 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3888 VkResult result) {
3889 if (VK_SUCCESS != result) return;
3890 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3891 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
3892}
3893
Tony-LunarG977448c2019-12-02 14:52:02 -07003894void ValidationStateTracker::RecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3895 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3896 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06003897 if (VK_SUCCESS != result) return;
3898 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3899 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
3900}
3901
Tony-LunarG977448c2019-12-02 14:52:02 -07003902void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3903 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3904 VkResult result) {
3905 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
3906}
3907
3908void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3909 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3910 VkResult result) {
3911 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
3912}
3913
locke-lunargd556cc32019-09-17 01:21:23 -06003914void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
3915 const VkRenderPassBeginInfo *pRenderPassBegin,
3916 const VkSubpassContents contents) {
3917 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3918 auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
3919 auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
3920
3921 if (render_pass_state) {
3922 cb_state->activeFramebuffer = pRenderPassBegin->framebuffer;
3923 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07003924 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06003925 cb_state->activeSubpass = 0;
3926 cb_state->activeSubpassContents = contents;
3927 cb_state->framebuffers.insert(pRenderPassBegin->framebuffer);
3928 // Connect this framebuffer and its children to this cmdBuffer
3929 AddFramebufferBinding(cb_state, framebuffer);
3930 // Connect this RP to cmdBuffer
Jeff Bolzadbfa852019-10-04 13:53:30 -05003931 AddCommandBufferBinding(render_pass_state->cb_bindings,
3932 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state),
3933 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003934
3935 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
3936 if (chained_device_group_struct) {
3937 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
3938 } else {
3939 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
3940 }
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003941
3942 cb_state->imagelessFramebufferAttachments.clear();
3943 auto attachment_info_struct = lvl_find_in_chain<VkRenderPassAttachmentBeginInfo>(pRenderPassBegin->pNext);
3944 if (attachment_info_struct) {
3945 for (uint32_t i = 0; i < attachment_info_struct->attachmentCount; i++) {
3946 IMAGE_VIEW_STATE *img_view_state = GetImageViewState(attachment_info_struct->pAttachments[i]);
3947 cb_state->imagelessFramebufferAttachments.push_back(img_view_state);
3948 }
3949 }
locke-lunargd556cc32019-09-17 01:21:23 -06003950 }
3951}
3952
3953void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
3954 const VkRenderPassBeginInfo *pRenderPassBegin,
3955 VkSubpassContents contents) {
3956 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
3957}
3958
3959void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
3960 const VkRenderPassBeginInfo *pRenderPassBegin,
3961 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
3962 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3963}
3964
Tony-LunarG977448c2019-12-02 14:52:02 -07003965void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
3966 const VkRenderPassBeginInfo *pRenderPassBegin,
3967 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
3968 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3969}
3970
locke-lunargd556cc32019-09-17 01:21:23 -06003971void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3972 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3973 cb_state->activeSubpass++;
3974 cb_state->activeSubpassContents = contents;
3975}
3976
3977void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3978 RecordCmdNextSubpass(commandBuffer, contents);
3979}
3980
3981void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
3982 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
3983 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3984 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3985}
3986
Tony-LunarG977448c2019-12-02 14:52:02 -07003987void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
3988 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
3989 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3990 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3991}
3992
locke-lunargd556cc32019-09-17 01:21:23 -06003993void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
3994 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3995 cb_state->activeRenderPass = nullptr;
3996 cb_state->activeSubpass = 0;
3997 cb_state->activeFramebuffer = VK_NULL_HANDLE;
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003998 cb_state->imagelessFramebufferAttachments.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06003999}
4000
4001void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
4002 RecordCmdEndRenderPassState(commandBuffer);
4003}
4004
4005void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
4006 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4007 RecordCmdEndRenderPassState(commandBuffer);
4008}
4009
Tony-LunarG977448c2019-12-02 14:52:02 -07004010void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
4011 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4012 RecordCmdEndRenderPassState(commandBuffer);
4013}
locke-lunargd556cc32019-09-17 01:21:23 -06004014void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
4015 const VkCommandBuffer *pCommandBuffers) {
4016 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4017
4018 CMD_BUFFER_STATE *sub_cb_state = NULL;
4019 for (uint32_t i = 0; i < commandBuffersCount; i++) {
4020 sub_cb_state = GetCBState(pCommandBuffers[i]);
4021 assert(sub_cb_state);
4022 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
4023 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
4024 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
4025 // from the validation step to the recording step
4026 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
4027 }
4028 }
4029
4030 // Propagate inital layout and current layout state to the primary cmd buffer
4031 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
4032 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
4033 // for those other classes.
4034 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
4035 const auto image = sub_layout_map_entry.first;
4036 const auto *image_state = GetImageState(image);
4037 if (!image_state) continue; // Can't set layouts of a dead image
4038
4039 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
4040 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
4041 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
4042 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
4043 }
4044
4045 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
4046 cb_state->linkedCommandBuffers.insert(sub_cb_state);
4047 sub_cb_state->linkedCommandBuffers.insert(cb_state);
4048 for (auto &function : sub_cb_state->queryUpdates) {
4049 cb_state->queryUpdates.push_back(function);
4050 }
4051 for (auto &function : sub_cb_state->queue_submit_functions) {
4052 cb_state->queue_submit_functions.push_back(function);
4053 }
4054 }
4055}
4056
4057void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
4058 VkFlags flags, void **ppData, VkResult result) {
4059 if (VK_SUCCESS != result) return;
4060 RecordMappedMemory(mem, offset, size, ppData);
4061}
4062
4063void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
4064 auto mem_info = GetDevMemState(mem);
4065 if (mem_info) {
4066 mem_info->mapped_range = MemRange();
4067 mem_info->p_driver_data = nullptr;
4068 }
4069}
4070
4071void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
4072 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
4073 if (image_state) {
4074 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
4075 if (swapchain_info) {
4076 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
4077 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -06004078 swapchain->images[swapchain_info->imageIndex].bound_images.emplace(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -06004079 image_state->bind_swapchain = swapchain_info->swapchain;
4080 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
4081 }
4082 } else {
4083 // Track bound memory range information
4084 auto mem_info = GetDevMemState(bindInfo.memory);
4085 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07004086 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06004087 }
4088
4089 // Track objects tied to memory
4090 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
4091 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
4092 }
Tony-LunarG330cf4c2020-03-04 16:29:03 -07004093 if ((image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) || swapchain_info) {
locke-lunargd556cc32019-09-17 01:21:23 -06004094 AddAliasingImage(image_state);
4095 }
4096 }
4097}
4098
4099void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
4100 VkDeviceSize memoryOffset, VkResult result) {
4101 if (VK_SUCCESS != result) return;
4102 VkBindImageMemoryInfo bindInfo = {};
4103 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
4104 bindInfo.image = image;
4105 bindInfo.memory = mem;
4106 bindInfo.memoryOffset = memoryOffset;
4107 UpdateBindImageMemoryState(bindInfo);
4108}
4109
4110void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
4111 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
4112 if (VK_SUCCESS != result) return;
4113 for (uint32_t i = 0; i < bindInfoCount; i++) {
4114 UpdateBindImageMemoryState(pBindInfos[i]);
4115 }
4116}
4117
4118void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
4119 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
4120 if (VK_SUCCESS != result) return;
4121 for (uint32_t i = 0; i < bindInfoCount; i++) {
4122 UpdateBindImageMemoryState(pBindInfos[i]);
4123 }
4124}
4125
4126void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
4127 auto event_state = GetEventState(event);
4128 if (event_state) {
4129 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
4130 }
locke-lunargd556cc32019-09-17 01:21:23 -06004131}
4132
4133void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
4134 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
4135 VkResult result) {
4136 if (VK_SUCCESS != result) return;
4137 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
4138 pImportSemaphoreFdInfo->flags);
4139}
4140
4141void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
4142 VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
4143 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
4144 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4145 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
4146 semaphore_state->scope = kSyncScopeExternalPermanent;
4147 }
4148}
4149
4150#ifdef VK_USE_PLATFORM_WIN32_KHR
4151void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
4152 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
4153 if (VK_SUCCESS != result) return;
4154 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
4155 pImportSemaphoreWin32HandleInfo->flags);
4156}
4157
4158void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
4159 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4160 HANDLE *pHandle, VkResult result) {
4161 if (VK_SUCCESS != result) return;
4162 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
4163}
4164
4165void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
4166 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
4167 if (VK_SUCCESS != result) return;
4168 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
4169 pImportFenceWin32HandleInfo->flags);
4170}
4171
4172void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
4173 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4174 HANDLE *pHandle, VkResult result) {
4175 if (VK_SUCCESS != result) return;
4176 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
4177}
4178#endif
4179
4180void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
4181 VkResult result) {
4182 if (VK_SUCCESS != result) return;
4183 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
4184}
4185
4186void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
4187 VkFenceImportFlagsKHR flags) {
4188 FENCE_STATE *fence_node = GetFenceState(fence);
4189 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
4190 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
4191 fence_node->scope == kSyncScopeInternal) {
4192 fence_node->scope = kSyncScopeExternalTemporary;
4193 } else {
4194 fence_node->scope = kSyncScopeExternalPermanent;
4195 }
4196 }
4197}
4198
4199void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
4200 VkResult result) {
4201 if (VK_SUCCESS != result) return;
4202 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
4203}
4204
4205void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
4206 FENCE_STATE *fence_state = GetFenceState(fence);
4207 if (fence_state) {
4208 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4209 // Export with reference transference becomes external
4210 fence_state->scope = kSyncScopeExternalPermanent;
4211 } else if (fence_state->scope == kSyncScopeInternal) {
4212 // Export with copy transference has a side effect of resetting the fence
4213 fence_state->state = FENCE_UNSIGNALED;
4214 }
4215 }
4216}
4217
4218void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
4219 VkResult result) {
4220 if (VK_SUCCESS != result) return;
4221 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
4222}
4223
4224void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
4225 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
4226 if (VK_SUCCESS != result) return;
4227 eventMap[*pEvent].write_in_use = 0;
4228 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
4229}
4230
4231void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
4232 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
4233 SWAPCHAIN_NODE *old_swapchain_state) {
4234 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004235 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004236 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
4237 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
4238 swapchain_state->shared_presentable = true;
4239 }
4240 surface_state->swapchain = swapchain_state.get();
4241 swapchainMap[*pSwapchain] = std::move(swapchain_state);
4242 } else {
4243 surface_state->swapchain = nullptr;
4244 }
4245 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
4246 if (old_swapchain_state) {
4247 old_swapchain_state->retired = true;
4248 }
4249 return;
4250}
4251
4252void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
4253 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
4254 VkResult result) {
4255 auto surface_state = GetSurfaceState(pCreateInfo->surface);
4256 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
4257 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
4258}
4259
4260void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
4261 const VkAllocationCallbacks *pAllocator) {
4262 if (!swapchain) return;
4263 auto swapchain_data = GetSwapchainState(swapchain);
4264 if (swapchain_data) {
4265 for (const auto &swapchain_image : swapchain_data->images) {
locke-lunargb3584732019-10-28 20:18:36 -06004266 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image, kVulkanObjectTypeImage));
4267 imageMap.erase(swapchain_image.image);
4268 RemoveAliasingImages(swapchain_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06004269 }
4270
4271 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
4272 if (surface_state) {
4273 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
4274 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004275 swapchain_data->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004276 swapchainMap.erase(swapchain);
4277 }
4278}
4279
4280void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
4281 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
4282 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
4283 auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
4284 if (pSemaphore) {
4285 pSemaphore->signaler.first = VK_NULL_HANDLE;
4286 pSemaphore->signaled = false;
4287 }
4288 }
4289
4290 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
4291 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
4292 // confused itself just as much.
4293 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
4294 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
4295 // Mark the image as having been released to the WSI
4296 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
4297 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
locke-lunargb3584732019-10-28 20:18:36 -06004298 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004299 auto image_state = GetImageState(image);
4300 if (image_state) {
4301 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004302 if (image_state->shared_presentable) {
4303 image_state->layout_locked = true;
4304 }
locke-lunargd556cc32019-09-17 01:21:23 -06004305 }
4306 }
4307 }
4308 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
4309 // its semaphore waits) /never/ participate in any completion proof.
4310}
4311
4312void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
4313 const VkSwapchainCreateInfoKHR *pCreateInfos,
4314 const VkAllocationCallbacks *pAllocator,
4315 VkSwapchainKHR *pSwapchains, VkResult result) {
4316 if (pCreateInfos) {
4317 for (uint32_t i = 0; i < swapchainCount; i++) {
4318 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
4319 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
4320 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
4321 }
4322 }
4323}
4324
4325void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4326 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
4327 auto pFence = GetFenceState(fence);
4328 if (pFence && pFence->scope == kSyncScopeInternal) {
4329 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
4330 // import
4331 pFence->state = FENCE_INFLIGHT;
4332 pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
4333 }
4334
4335 auto pSemaphore = GetSemaphoreState(semaphore);
4336 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
4337 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
4338 // temporary import
4339 pSemaphore->signaled = true;
4340 pSemaphore->signaler.first = VK_NULL_HANDLE;
4341 }
4342
4343 // Mark the image as acquired.
4344 auto swapchain_data = GetSwapchainState(swapchain);
4345 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
locke-lunargb3584732019-10-28 20:18:36 -06004346 auto image = swapchain_data->images[*pImageIndex].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004347 auto image_state = GetImageState(image);
4348 if (image_state) {
4349 image_state->acquired = true;
4350 image_state->shared_presentable = swapchain_data->shared_presentable;
4351 }
4352 }
4353}
4354
4355void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4356 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
4357 VkResult result) {
4358 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4359 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
4360}
4361
4362void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
4363 uint32_t *pImageIndex, VkResult result) {
4364 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4365 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
4366 pAcquireInfo->fence, pImageIndex);
4367}
4368
4369void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
4370 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
4371 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
4372 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
4373 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
4374 phys_device_state.phys_device = pPhysicalDevices[i];
4375 // Init actual features for each physical device
4376 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
4377 }
4378 }
4379}
4380
4381// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
4382static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
4383 VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4384 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
4385
4386 if (!pQueueFamilyProperties) {
4387 if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
4388 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
4389 } else { // Save queue family properties
4390 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
4391
4392 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
4393 for (uint32_t i = 0; i < count; ++i) {
4394 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
4395 }
4396 }
4397}
4398
4399void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
4400 uint32_t *pQueueFamilyPropertyCount,
4401 VkQueueFamilyProperties *pQueueFamilyProperties) {
4402 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4403 assert(physical_device_state);
4404 VkQueueFamilyProperties2KHR *pqfp = nullptr;
4405 std::vector<VkQueueFamilyProperties2KHR> qfp;
4406 qfp.resize(*pQueueFamilyPropertyCount);
4407 if (pQueueFamilyProperties) {
4408 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
4409 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
4410 qfp[i].pNext = nullptr;
4411 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
4412 }
4413 pqfp = qfp.data();
4414 }
4415 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
4416}
4417
4418void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
4419 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4420 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4421 assert(physical_device_state);
4422 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4423 pQueueFamilyProperties);
4424}
4425
4426void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
4427 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4428 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4429 assert(physical_device_state);
4430 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4431 pQueueFamilyProperties);
4432}
4433void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
4434 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004435 if (!surface) return;
4436 auto surface_state = GetSurfaceState(surface);
4437 surface_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004438 surface_map.erase(surface);
4439}
4440
4441void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004442 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06004443}
4444
4445void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
4446 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
4447 const VkAllocationCallbacks *pAllocator,
4448 VkSurfaceKHR *pSurface, VkResult result) {
4449 if (VK_SUCCESS != result) return;
4450 RecordVulkanSurface(pSurface);
4451}
4452
4453#ifdef VK_USE_PLATFORM_ANDROID_KHR
4454void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
4455 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
4456 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4457 VkResult result) {
4458 if (VK_SUCCESS != result) return;
4459 RecordVulkanSurface(pSurface);
4460}
4461#endif // VK_USE_PLATFORM_ANDROID_KHR
4462
4463#ifdef VK_USE_PLATFORM_IOS_MVK
4464void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
4465 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4466 VkResult result) {
4467 if (VK_SUCCESS != result) return;
4468 RecordVulkanSurface(pSurface);
4469}
4470#endif // VK_USE_PLATFORM_IOS_MVK
4471
4472#ifdef VK_USE_PLATFORM_MACOS_MVK
4473void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
4474 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
4475 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4476 VkResult result) {
4477 if (VK_SUCCESS != result) return;
4478 RecordVulkanSurface(pSurface);
4479}
4480#endif // VK_USE_PLATFORM_MACOS_MVK
4481
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07004482#ifdef VK_USE_PLATFORM_METAL_EXT
4483void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
4484 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
4485 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4486 VkResult result) {
4487 if (VK_SUCCESS != result) return;
4488 RecordVulkanSurface(pSurface);
4489}
4490#endif // VK_USE_PLATFORM_METAL_EXT
4491
locke-lunargd556cc32019-09-17 01:21:23 -06004492#ifdef VK_USE_PLATFORM_WAYLAND_KHR
4493void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
4494 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
4495 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4496 VkResult result) {
4497 if (VK_SUCCESS != result) return;
4498 RecordVulkanSurface(pSurface);
4499}
4500#endif // VK_USE_PLATFORM_WAYLAND_KHR
4501
4502#ifdef VK_USE_PLATFORM_WIN32_KHR
4503void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
4504 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
4505 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4506 VkResult result) {
4507 if (VK_SUCCESS != result) return;
4508 RecordVulkanSurface(pSurface);
4509}
4510#endif // VK_USE_PLATFORM_WIN32_KHR
4511
4512#ifdef VK_USE_PLATFORM_XCB_KHR
4513void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
4514 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4515 VkResult result) {
4516 if (VK_SUCCESS != result) return;
4517 RecordVulkanSurface(pSurface);
4518}
4519#endif // VK_USE_PLATFORM_XCB_KHR
4520
4521#ifdef VK_USE_PLATFORM_XLIB_KHR
4522void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
4523 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4524 VkResult result) {
4525 if (VK_SUCCESS != result) return;
4526 RecordVulkanSurface(pSurface);
4527}
4528#endif // VK_USE_PLATFORM_XLIB_KHR
4529
Niklas Haas8b84af12020-04-19 22:20:11 +02004530void ValidationStateTracker::PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance,
4531 const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
4532 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4533 VkResult result) {
4534 if (VK_SUCCESS != result) return;
4535 RecordVulkanSurface(pSurface);
4536}
4537
Cort23cf2282019-09-20 18:58:18 +02004538void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004539 VkPhysicalDeviceFeatures *pFeatures) {
4540 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4541 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4542 physical_device_state->features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
4543 physical_device_state->features2.pNext = nullptr;
4544 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02004545}
4546
4547void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004548 VkPhysicalDeviceFeatures2 *pFeatures) {
4549 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4550 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4551 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004552}
4553
4554void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004555 VkPhysicalDeviceFeatures2 *pFeatures) {
4556 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4557 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4558 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004559}
4560
locke-lunargd556cc32019-09-17 01:21:23 -06004561void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
4562 VkSurfaceKHR surface,
4563 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
4564 VkResult result) {
4565 if (VK_SUCCESS != result) return;
4566 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4567 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004568 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004569 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
4570}
4571
4572void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
4573 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4574 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
4575 if (VK_SUCCESS != result) return;
4576 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4577 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004578 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004579 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
4580}
4581
4582void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
4583 VkSurfaceKHR surface,
4584 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
4585 VkResult result) {
4586 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4587 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004588 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004589 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
4590 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
4591 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
4592 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
4593 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
4594 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
4595 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
4596 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
4597 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
4598 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
4599}
4600
4601void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
4602 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
4603 VkBool32 *pSupported, VkResult result) {
4604 if (VK_SUCCESS != result) return;
4605 auto surface_state = GetSurfaceState(surface);
4606 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
4607}
4608
4609void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
4610 VkSurfaceKHR surface,
4611 uint32_t *pPresentModeCount,
4612 VkPresentModeKHR *pPresentModes,
4613 VkResult result) {
4614 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4615
4616 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
4617 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4618 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState;
4619
4620 if (*pPresentModeCount) {
4621 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4622 if (*pPresentModeCount > physical_device_state->present_modes.size())
4623 physical_device_state->present_modes.resize(*pPresentModeCount);
4624 }
4625 if (pPresentModes) {
4626 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4627 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
4628 physical_device_state->present_modes[i] = pPresentModes[i];
4629 }
4630 }
4631}
4632
4633void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
4634 uint32_t *pSurfaceFormatCount,
4635 VkSurfaceFormatKHR *pSurfaceFormats,
4636 VkResult result) {
4637 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4638
4639 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4640 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
4641
4642 if (*pSurfaceFormatCount) {
4643 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4644 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
4645 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
4646 }
4647 if (pSurfaceFormats) {
4648 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4649 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4650 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
4651 }
4652 }
4653}
4654
4655void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
4656 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4657 uint32_t *pSurfaceFormatCount,
4658 VkSurfaceFormat2KHR *pSurfaceFormats,
4659 VkResult result) {
4660 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4661
4662 auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
4663 if (*pSurfaceFormatCount) {
4664 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_COUNT) {
4665 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_COUNT;
4666 }
4667 if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
4668 physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
4669 }
4670 if (pSurfaceFormats) {
4671 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_DETAILS) {
4672 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_DETAILS;
4673 }
4674 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4675 physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
4676 }
4677 }
4678}
4679
4680void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4681 const VkDebugUtilsLabelEXT *pLabelInfo) {
4682 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4683}
4684
4685void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
4686 EndCmdDebugUtilsLabel(report_data, commandBuffer);
4687}
4688
4689void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4690 const VkDebugUtilsLabelEXT *pLabelInfo) {
4691 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4692
4693 // Squirrel away an easily accessible copy.
4694 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4695 cb_state->debug_label = LoggingLabel(pLabelInfo);
4696}
4697
4698void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
4699 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
4700 if (NULL != pPhysicalDeviceGroupProperties) {
4701 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
4702 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
4703 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
4704 auto &phys_device_state = physical_device_map[cur_phys_dev];
4705 phys_device_state.phys_device = cur_phys_dev;
4706 // Init actual features for each physical device
4707 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
4708 }
4709 }
4710 }
4711}
4712
4713void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
4714 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4715 VkResult result) {
4716 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4717 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4718}
4719
4720void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
4721 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4722 VkResult result) {
4723 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4724 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4725}
4726
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004727void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
4728 uint32_t queueFamilyIndex,
4729 uint32_t *pCounterCount,
4730 VkPerformanceCounterKHR *pCounters) {
4731 if (NULL == pCounters) return;
4732
4733 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4734 assert(physical_device_state);
4735
4736 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queueFamilyCounters(new QUEUE_FAMILY_PERF_COUNTERS());
4737 queueFamilyCounters->counters.resize(*pCounterCount);
4738 for (uint32_t i = 0; i < *pCounterCount; i++) queueFamilyCounters->counters[i] = pCounters[i];
4739
4740 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queueFamilyCounters);
4741}
4742
4743void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
4744 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
4745 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
4746 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4747 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
4748}
4749
4750void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
4751 VkResult result) {
4752 if (result == VK_SUCCESS) performance_lock_acquired = true;
4753}
4754
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004755void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
4756 performance_lock_acquired = false;
4757 for (auto &cmd_buffer : commandBufferMap) {
4758 cmd_buffer.second->performance_lock_released = true;
4759 }
4760}
4761
locke-lunargd556cc32019-09-17 01:21:23 -06004762void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
4763 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4764 const VkAllocationCallbacks *pAllocator) {
4765 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004766 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4767 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004768 desc_template_map.erase(descriptorUpdateTemplate);
4769}
4770
4771void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
4772 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4773 const VkAllocationCallbacks *pAllocator) {
4774 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004775 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4776 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004777 desc_template_map.erase(descriptorUpdateTemplate);
4778}
4779
4780void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
4781 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
4782 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004783 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004784 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
4785}
4786
4787void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
4788 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4789 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4790 if (VK_SUCCESS != result) return;
4791 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4792}
4793
4794void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
4795 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4796 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4797 if (VK_SUCCESS != result) return;
4798 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4799}
4800
4801void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
4802 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4803 const void *pData) {
4804 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
4805 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
4806 assert(0);
4807 } else {
4808 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
4809 // TODO: Record template push descriptor updates
4810 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
4811 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
4812 }
4813 }
4814}
4815
4816void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
4817 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4818 const void *pData) {
4819 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4820}
4821
4822void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
4823 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4824 const void *pData) {
4825 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4826}
4827
4828void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
4829 VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
4830 const void *pData) {
4831 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4832
4833 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4834 if (template_state) {
4835 auto layout_data = GetPipelineLayout(layout);
4836 auto dsl = GetDslFromPipelineLayout(layout_data, set);
4837 const auto &template_ci = template_state->create_info;
Jeff Bolz6ae39612019-10-11 20:57:36 -05004838 if (dsl && !dsl->destroyed) {
locke-lunargd556cc32019-09-17 01:21:23 -06004839 // Decode the template into a set of write updates
4840 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
4841 dsl->GetDescriptorSetLayout());
4842 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
4843 static_cast<uint32_t>(decoded_template.desc_writes.size()),
4844 decoded_template.desc_writes.data());
4845 }
4846 }
4847}
4848
4849void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
4850 uint32_t *pPropertyCount, void *pProperties) {
4851 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4852 if (*pPropertyCount) {
4853 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_COUNT) {
4854 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_COUNT;
Camden Stocker61050592019-11-27 12:03:09 -08004855 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004856 }
4857 physical_device_state->display_plane_property_count = *pPropertyCount;
4858 }
4859 if (pProperties) {
4860 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_DETAILS) {
4861 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004862 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004863 }
4864 }
4865}
4866
4867void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
4868 uint32_t *pPropertyCount,
4869 VkDisplayPlanePropertiesKHR *pProperties,
4870 VkResult result) {
4871 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4872 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4873}
4874
4875void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
4876 uint32_t *pPropertyCount,
4877 VkDisplayPlaneProperties2KHR *pProperties,
4878 VkResult result) {
4879 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4880 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4881}
4882
4883void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4884 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
4885 QueryObject query_obj = {queryPool, query, index};
4886 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4887 RecordCmdBeginQuery(cb_state, query_obj);
4888}
4889
4890void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4891 uint32_t query, uint32_t index) {
4892 QueryObject query_obj = {queryPool, query, index};
4893 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4894 RecordCmdEndQuery(cb_state, query_obj);
4895}
4896
4897void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
4898 VkSamplerYcbcrConversion ycbcr_conversion) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004899 auto ycbcr_state = std::make_shared<SAMPLER_YCBCR_CONVERSION_STATE>();
4900
locke-lunargd556cc32019-09-17 01:21:23 -06004901 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004902 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion, ycbcr_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06004903 }
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004904
4905 const VkFormat conversion_format = create_info->format;
4906
4907 if (conversion_format != VK_FORMAT_UNDEFINED) {
4908 // If format is VK_FORMAT_UNDEFINED, will be set by external AHB features
4909 ycbcr_state->format_features = GetPotentialFormatFeatures(conversion_format);
4910 }
4911
4912 ycbcr_state->chromaFilter = create_info->chromaFilter;
4913 ycbcr_state->format = conversion_format;
4914 samplerYcbcrConversionMap[ycbcr_conversion] = std::move(ycbcr_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004915}
4916
4917void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
4918 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4919 const VkAllocationCallbacks *pAllocator,
4920 VkSamplerYcbcrConversion *pYcbcrConversion,
4921 VkResult result) {
4922 if (VK_SUCCESS != result) return;
4923 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4924}
4925
4926void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
4927 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4928 const VkAllocationCallbacks *pAllocator,
4929 VkSamplerYcbcrConversion *pYcbcrConversion,
4930 VkResult result) {
4931 if (VK_SUCCESS != result) return;
4932 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4933}
4934
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004935void ValidationStateTracker::RecordDestroySamplerYcbcrConversionState(VkSamplerYcbcrConversion ycbcr_conversion) {
4936 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4937 RecordDestroySamplerYcbcrConversionANDROID(ycbcr_conversion);
4938 }
4939
4940 auto ycbcr_state = GetSamplerYcbcrConversionState(ycbcr_conversion);
4941 ycbcr_state->destroyed = true;
4942 samplerYcbcrConversionMap.erase(ycbcr_conversion);
4943}
4944
locke-lunargd556cc32019-09-17 01:21:23 -06004945void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
4946 const VkAllocationCallbacks *pAllocator) {
4947 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004948 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06004949}
4950
4951void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
4952 VkSamplerYcbcrConversion ycbcrConversion,
4953 const VkAllocationCallbacks *pAllocator) {
4954 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004955 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06004956}
4957
Tony-LunarG977448c2019-12-02 14:52:02 -07004958void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4959 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06004960 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07004961 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004962
4963 // Do nothing if the query pool has been destroyed.
4964 auto query_pool_state = GetQueryPoolState(queryPool);
4965 if (!query_pool_state) return;
4966
4967 // Reset the state of existing entries.
4968 QueryObject query_obj{queryPool, 0};
4969 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
4970 for (uint32_t i = 0; i < max_query_count; ++i) {
4971 query_obj.query = firstQuery + i;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004972 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004973 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
4974 for (uint32_t passIndex = 0; passIndex < query_pool_state->n_performance_passes; passIndex++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004975 query_obj.perf_pass = passIndex;
4976 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004977 }
4978 }
locke-lunargd556cc32019-09-17 01:21:23 -06004979 }
4980}
4981
Tony-LunarG977448c2019-12-02 14:52:02 -07004982void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4983 uint32_t queryCount) {
4984 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
4985}
4986
4987void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4988 uint32_t queryCount) {
4989 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
4990}
4991
locke-lunargd556cc32019-09-17 01:21:23 -06004992void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
4993 const TEMPLATE_STATE *template_state, const void *pData) {
4994 // Translate the templated update into a normal update for validation...
4995 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
4996 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
4997 decoded_update.desc_writes.data(), 0, NULL);
4998}
4999
5000// Update the common AllocateDescriptorSetsData
5001void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005002 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005003 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05005004 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06005005 if (layout) {
5006 ds_data->layout_nodes[i] = layout;
5007 // Count total descriptors required per type
5008 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
5009 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
5010 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
5011 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
5012 }
5013 }
5014 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
5015 }
5016}
5017
5018// Decrement allocated sets from the pool and insert new sets into set_map
5019void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
5020 const VkDescriptorSet *descriptor_sets,
5021 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
5022 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
5023 // Account for sets and individual descriptors allocated from pool
5024 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
5025 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
5026 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
5027 }
5028
5029 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
5030 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
5031
5032 // Create tracking object for each descriptor set; insert into global map and the pool's set.
5033 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
5034 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
5035
Jeff Bolz41a1ced2019-10-11 11:40:49 -05005036 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07005037 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06005038 pool_state->sets.insert(new_ds.get());
5039 new_ds->in_use.store(0);
5040 setMap[descriptor_sets[i]] = std::move(new_ds);
5041 }
5042}
5043
5044// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
5045void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
5046 UpdateDrawState(cb_state, bind_point);
5047 cb_state->hasDispatchCmd = true;
5048}
5049
locke-lunargd556cc32019-09-17 01:21:23 -06005050// Generic function to handle state update for all CmdDraw* type functions
5051void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
5052 UpdateStateCmdDrawDispatchType(cb_state, bind_point);
locke-lunargd556cc32019-09-17 01:21:23 -06005053 cb_state->hasDrawCmd = true;
5054}
5055
5056void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
5057 uint32_t firstVertex, uint32_t firstInstance) {
5058 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5059 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5060}
5061
5062void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
5063 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
5064 uint32_t firstInstance) {
5065 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5066 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5067}
5068
5069void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5070 uint32_t count, uint32_t stride) {
5071 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5072 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5073 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5074 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5075}
5076
5077void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5078 VkDeviceSize offset, uint32_t count, uint32_t stride) {
5079 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5080 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5081 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5082 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5083}
5084
5085void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
5086 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5087 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
5088}
5089
5090void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5091 VkDeviceSize offset) {
5092 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5093 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
5094 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5095 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5096}
5097
Tony-LunarG977448c2019-12-02 14:52:02 -07005098void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5099 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5100 uint32_t stride) {
5101 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5102 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5103 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
5104 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5105 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5106 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5107}
5108
locke-lunargd556cc32019-09-17 01:21:23 -06005109void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5110 VkDeviceSize offset, VkBuffer countBuffer,
5111 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5112 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005113 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
5114}
5115
5116void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5117 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5118 uint32_t maxDrawCount, uint32_t stride) {
5119 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
5120}
5121
5122void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5123 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5124 uint32_t maxDrawCount, uint32_t stride) {
locke-lunargd556cc32019-09-17 01:21:23 -06005125 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5126 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5127 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
5128 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5129 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5130 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5131}
5132
5133void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5134 VkDeviceSize offset, VkBuffer countBuffer,
5135 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5136 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005137 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
5138}
5139
5140void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
5141 VkDeviceSize offset, VkBuffer countBuffer,
5142 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5143 uint32_t stride) {
5144 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
locke-lunargd556cc32019-09-17 01:21:23 -06005145}
5146
5147void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
5148 uint32_t firstTask) {
5149 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5150 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5151}
5152
5153void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5154 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
5155 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5156 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5157 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5158 if (buffer_state) {
5159 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5160 }
5161}
5162
5163void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5164 VkDeviceSize offset, VkBuffer countBuffer,
5165 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5166 uint32_t stride) {
5167 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5168 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5169 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
5170 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5171 if (buffer_state) {
5172 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5173 }
5174 if (count_buffer_state) {
5175 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5176 }
5177}
5178
5179void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
5180 const VkAllocationCallbacks *pAllocator,
5181 VkShaderModule *pShaderModule, VkResult result,
5182 void *csm_state_data) {
5183 if (VK_SUCCESS != result) return;
5184 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
5185
5186 spv_target_env spirv_environment = ((api_version >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
5187 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005188 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
5189 csm_state->unique_shader_id)
5190 : std::make_shared<SHADER_MODULE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06005191 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
5192}
5193
5194void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005195 PIPELINE_STATE::StageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005196 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
5197 auto module = GetShaderModuleState(pStage->module);
5198 if (!module->has_valid_spirv) return;
5199
5200 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
5201 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
5202 if (entrypoint == module->end()) return;
5203
5204 // Mark accessible ids
5205 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
5206 ProcessExecutionModes(module, entrypoint, pipeline);
5207
5208 stage_state->descriptor_uses =
Mark Lobodzinskid8d658e2020-01-30 15:05:51 -07005209 CollectInterfaceByDescriptorSlot(module, stage_state->accessible_ids, &stage_state->has_writable_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06005210 // Capture descriptor uses for the pipeline
5211 for (auto use : stage_state->descriptor_uses) {
5212 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06005213 const uint32_t slot = use.first.first;
5214 auto &reqs = pipeline->active_slots[slot][use.first.second];
locke-lunargd556cc32019-09-17 01:21:23 -06005215 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
John Zulauf649edd52019-10-02 14:39:41 -06005216 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunargd556cc32019-09-17 01:21:23 -06005217 }
5218}
5219
5220void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
5221 if (cb_state == nullptr) {
5222 return;
5223 }
5224
5225 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
5226 if (pipeline_layout_state == nullptr) {
5227 return;
5228 }
5229
5230 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
5231 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
5232 cb_state->push_constant_data.clear();
5233 uint32_t size_needed = 0;
5234 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
5235 size_needed = std::max(size_needed, (push_constant_range.offset + push_constant_range.size));
5236 }
5237 cb_state->push_constant_data.resize(size_needed, 0);
5238 }
5239}
John Zulauf22b0fbe2019-10-15 06:26:16 -06005240
5241void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
5242 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
5243 VkResult result) {
5244 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
5245 auto swapchain_state = GetSwapchainState(swapchain);
5246
5247 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
5248
5249 if (pSwapchainImages) {
5250 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_DETAILS) {
5251 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_DETAILS;
5252 }
5253 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
locke-lunargb3584732019-10-28 20:18:36 -06005254 if (swapchain_state->images[i].image != VK_NULL_HANDLE) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06005255
5256 // Add imageMap entries for each swapchain image
5257 VkImageCreateInfo image_ci;
5258 image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -06005259 image_ci.pNext = nullptr; // to be set later
5260 image_ci.flags = 0; // to be updated below
John Zulauf22b0fbe2019-10-15 06:26:16 -06005261 image_ci.imageType = VK_IMAGE_TYPE_2D;
5262 image_ci.format = swapchain_state->createInfo.imageFormat;
5263 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
5264 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
5265 image_ci.extent.depth = 1;
5266 image_ci.mipLevels = 1;
5267 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
5268 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
5269 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
5270 image_ci.usage = swapchain_state->createInfo.imageUsage;
5271 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
5272 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
5273 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
5274 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5275
5276 image_ci.pNext = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(swapchain_state->createInfo.pNext);
5277
5278 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR)
5279 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
5280 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)
5281 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
5282 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
5283 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR);
5284
5285 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(pSwapchainImages[i], &image_ci);
5286 auto &image_state = imageMap[pSwapchainImages[i]];
5287 image_state->valid = false;
5288 image_state->create_from_swapchain = swapchain;
5289 image_state->bind_swapchain = swapchain;
5290 image_state->bind_swapchain_imageIndex = i;
Tony-LunarGe64e4fe2020-02-17 16:21:55 -07005291 image_state->is_swapchain_image = true;
locke-lunargb3584732019-10-28 20:18:36 -06005292 swapchain_state->images[i].image = pSwapchainImages[i];
5293 swapchain_state->images[i].bound_images.emplace(pSwapchainImages[i]);
Petr Kraus44f1c482020-04-25 20:09:25 +02005294
5295 AddImageStateProps(*image_state, device, physical_device);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005296 }
5297 }
5298
5299 if (*pSwapchainImageCount) {
5300 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_COUNT) {
5301 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_COUNT;
5302 }
5303 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
5304 }
5305}