blob: 26f4f260608ccaa804f696cf1368474140250389 [file] [log] [blame]
Tony-LunarG73719992020-01-15 10:20:28 -07001/* Copyright (c) 2015-2020 The Khronos Group Inc.
2 * Copyright (c) 2015-2020 Valve Corporation
3 * Copyright (c) 2015-2020 LunarG, Inc.
4 * Copyright (C) 2015-2020 Google Inc.
locke-lunargd556cc32019-09-17 01:21:23 -06005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
locke-lunargd556cc32019-09-17 01:21:23 -060023#include <cmath>
24#include <set>
25#include <sstream>
26#include <string>
27
28#include "vk_enum_string_helper.h"
29#include "vk_format_utils.h"
30#include "vk_layer_data.h"
31#include "vk_layer_utils.h"
32#include "vk_layer_logging.h"
33#include "vk_typemap_helper.h"
34
35#include "chassis.h"
36#include "state_tracker.h"
37#include "shader_validation.h"
38
39using std::max;
40using std::string;
41using std::stringstream;
42using std::unique_ptr;
43using std::unordered_map;
44using std::unordered_set;
45using std::vector;
46
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -060047void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
48 if (add_obj) {
49 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
50 // Call base class
51 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
52 }
53}
54
locke-lunargd556cc32019-09-17 01:21:23 -060055#ifdef VK_USE_PLATFORM_ANDROID_KHR
56// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
57// This could also move into a seperate core_validation_android.cpp file... ?
58
59void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
60 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
61 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
Spencer Fricke224c9852020-04-06 07:47:29 -070062 is_node->external_ahb = true;
locke-lunargd556cc32019-09-17 01:21:23 -060063 }
64 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
65 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
66 is_node->has_ahb_format = true;
67 is_node->ahb_format = ext_fmt_android->externalFormat;
Spencer Fricke6bba8c72020-04-06 07:41:21 -070068 // VUID 01894 will catch if not found in map
69 auto it = ahb_ext_formats_map.find(ext_fmt_android->externalFormat);
70 if (it != ahb_ext_formats_map.end()) {
71 is_node->format_features = it->second;
72 }
locke-lunargd556cc32019-09-17 01:21:23 -060073 }
74}
75
76void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -070077 VkSamplerYcbcrConversion ycbcr_conversion,
78 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state) {
locke-lunargd556cc32019-09-17 01:21:23 -060079 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
80 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
81 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
sfricke-samsungbe3584f2020-04-22 14:58:06 -070082 // VUID 01894 will catch if not found in map
83 auto it = ahb_ext_formats_map.find(ext_format_android->externalFormat);
84 if (it != ahb_ext_formats_map.end()) {
85 ycbcr_state->format_features = it->second;
86 }
locke-lunargd556cc32019-09-17 01:21:23 -060087 }
88};
89
90void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
91 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
92};
93
Spencer Fricke6bba8c72020-04-06 07:41:21 -070094void ValidationStateTracker::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
95 VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties, VkResult result) {
96 if (VK_SUCCESS != result) return;
97 auto ahb_format_props = lvl_find_in_chain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
98 if (ahb_format_props) {
99 ahb_ext_formats_map.insert({ahb_format_props->externalFormat, ahb_format_props->formatFeatures});
100 }
101}
102
locke-lunargd556cc32019-09-17 01:21:23 -0600103#else
104
105void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
106
107void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700108 VkSamplerYcbcrConversion ycbcr_conversion,
109 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state){};
locke-lunargd556cc32019-09-17 01:21:23 -0600110
111void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
112
113#endif // VK_USE_PLATFORM_ANDROID_KHR
114
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -0600115std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPipelineLayout(PIPELINE_LAYOUT_STATE const *layout_data,
116 uint32_t set) {
117 std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> dsl = nullptr;
118 if (layout_data && (set < layout_data->set_layouts.size())) {
119 dsl = layout_data->set_layouts[set];
120 }
121 return dsl;
122}
123
locke-lunargd556cc32019-09-17 01:21:23 -0600124void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
125 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
126 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500127 auto is_node = std::make_shared<IMAGE_STATE>(*pImage, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600128 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
129 RecordCreateImageANDROID(pCreateInfo, is_node.get());
130 }
131 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
132 if (swapchain_info) {
133 is_node->create_from_swapchain = swapchain_info->swapchain;
134 }
135
locke-lunargd556cc32019-09-17 01:21:23 -0600136 // Record the memory requirements in case they won't be queried
Spencer Fricke224c9852020-04-06 07:47:29 -0700137 // External AHB memory can't be quired until after memory is bound
138 if (is_node->external_ahb == false) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700139 if ((pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT) == 0) {
140 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
141 } else {
142 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
143 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
144 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
145 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
146 mem_req_info2.pNext = &image_plane_req;
147 mem_req_info2.image = *pImage;
148
149 assert(plane_count != 0); // assumes each format has at least first plane
150 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
151 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
152 is_node->plane0_requirements = mem_reqs2.memoryRequirements;
153
154 if (plane_count >= 2) {
155 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
156 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
157 is_node->plane1_requirements = mem_reqs2.memoryRequirements;
158 }
159 if (plane_count >= 3) {
160 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
161 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
162 is_node->plane2_requirements = mem_reqs2.memoryRequirements;
163 }
164 }
locke-lunargd556cc32019-09-17 01:21:23 -0600165 }
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700166
167 // Add feature support according to Image Format Features (vkspec.html#resources-image-format-features)
168 // if format is AHB external format then the features are already set
169 if (is_node->has_ahb_format == false) {
170 const VkImageTiling image_tiling = pCreateInfo->tiling;
171 const VkFormat image_format = pCreateInfo->format;
172 if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
173 // Parameter validation should catch if this is used without VK_EXT_image_drm_format_modifier
174 assert(device_extensions.vk_ext_image_drm_format_modifier);
175 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {
176 VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, nullptr};
177 DispatchGetImageDrmFormatModifierPropertiesEXT(device, *pImage, &drm_format_properties);
178
179 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
180 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
181 nullptr};
182 format_properties_2.pNext = (void *)&drm_properties_list;
183 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
184
185 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
186 if ((drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier &
187 drm_format_properties.drmFormatModifier) != 0) {
188 is_node->format_features |= drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
189 }
190 }
191 } else {
192 VkFormatProperties format_properties;
193 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_format, &format_properties);
194 is_node->format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
195 : format_properties.optimalTilingFeatures;
196 }
197 }
198
locke-lunargd556cc32019-09-17 01:21:23 -0600199 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
200}
201
202void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
203 if (!image) return;
204 IMAGE_STATE *image_state = GetImageState(image);
205 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
206 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
207 // Clean up memory mapping, bindings and range references for image
208 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700209 RemoveImageMemoryRange(image, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600210 }
211 if (image_state->bind_swapchain) {
212 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
213 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -0600214 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -0600215 }
216 }
217 RemoveAliasingImage(image_state);
218 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500219 image_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600220 // Remove image from imageMap
221 imageMap.erase(image);
222}
223
224void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
225 VkImageLayout imageLayout, const VkClearColorValue *pColor,
226 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
227 auto cb_node = GetCBState(commandBuffer);
228 auto image_state = GetImageState(image);
229 if (cb_node && image_state) {
230 AddCommandBufferBindingImage(cb_node, image_state);
231 }
232}
233
234void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
235 VkImageLayout imageLayout,
236 const VkClearDepthStencilValue *pDepthStencil,
237 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
238 auto cb_node = GetCBState(commandBuffer);
239 auto image_state = GetImageState(image);
240 if (cb_node && image_state) {
241 AddCommandBufferBindingImage(cb_node, image_state);
242 }
243}
244
245void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
246 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
247 uint32_t regionCount, const VkImageCopy *pRegions) {
248 auto cb_node = GetCBState(commandBuffer);
249 auto src_image_state = GetImageState(srcImage);
250 auto dst_image_state = GetImageState(dstImage);
251
252 // Update bindings between images and cmd buffer
253 AddCommandBufferBindingImage(cb_node, src_image_state);
254 AddCommandBufferBindingImage(cb_node, dst_image_state);
255}
256
257void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
258 VkImageLayout srcImageLayout, VkImage dstImage,
259 VkImageLayout dstImageLayout, uint32_t regionCount,
260 const VkImageResolve *pRegions) {
261 auto cb_node = GetCBState(commandBuffer);
262 auto src_image_state = GetImageState(srcImage);
263 auto dst_image_state = GetImageState(dstImage);
264
265 // Update bindings between images and cmd buffer
266 AddCommandBufferBindingImage(cb_node, src_image_state);
267 AddCommandBufferBindingImage(cb_node, dst_image_state);
268}
269
270void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
271 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
272 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
273 auto cb_node = GetCBState(commandBuffer);
274 auto src_image_state = GetImageState(srcImage);
275 auto dst_image_state = GetImageState(dstImage);
276
277 // Update bindings between images and cmd buffer
278 AddCommandBufferBindingImage(cb_node, src_image_state);
279 AddCommandBufferBindingImage(cb_node, dst_image_state);
280}
281
282void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
283 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
284 VkResult result) {
285 if (result != VK_SUCCESS) return;
286 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500287 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600288
289 // Get a set of requirements in the case the app does not
290 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
291
292 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
293}
294
295void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
296 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
297 VkResult result) {
298 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500299 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
300 bufferViewMap[*pView] = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600301}
302
303void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
304 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
305 VkResult result) {
306 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500307 auto image_state = GetImageShared(pCreateInfo->image);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700308 auto image_view_state = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
309
310 // Add feature support according to Image View Format Features (vkspec.html#resources-image-view-format-features)
311 const VkImageTiling image_tiling = image_state->createInfo.tiling;
312 const VkFormat image_view_format = pCreateInfo->format;
313 if (image_state->has_ahb_format == true) {
314 // The ImageView uses same Image's format feature since they share same AHB
315 image_view_state->format_features = image_state->format_features;
316 } else if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
317 // Parameter validation should catch if this is used without VK_EXT_image_drm_format_modifier
318 assert(device_extensions.vk_ext_image_drm_format_modifier);
319 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
320 nullptr};
321 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state->image, &drm_format_properties);
322
323 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
324 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
325 nullptr};
326 format_properties_2.pNext = (void *)&drm_properties_list;
327 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
328
329 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
330 if ((drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier & drm_format_properties.drmFormatModifier) !=
331 0) {
332 image_view_state->format_features |=
333 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
334 }
335 }
336 } else {
337 VkFormatProperties format_properties;
338 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_view_format, &format_properties);
339 image_view_state->format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
340 : format_properties.optimalTilingFeatures;
341 }
342
343 imageViewMap.insert(std::make_pair(*pView, std::move(image_view_state)));
locke-lunargd556cc32019-09-17 01:21:23 -0600344}
345
346void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
347 uint32_t regionCount, const VkBufferCopy *pRegions) {
348 auto cb_node = GetCBState(commandBuffer);
349 auto src_buffer_state = GetBufferState(srcBuffer);
350 auto dst_buffer_state = GetBufferState(dstBuffer);
351
352 // Update bindings between buffers and cmd buffer
353 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
354 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
355}
356
357void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
358 const VkAllocationCallbacks *pAllocator) {
359 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
360 if (!image_view_state) return;
361 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
362
363 // Any bound cmd buffers are now invalid
364 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500365 image_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600366 imageViewMap.erase(imageView);
367}
368
369void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
370 if (!buffer) return;
371 auto buffer_state = GetBufferState(buffer);
372 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
373
374 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
375 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700376 RemoveBufferMemoryRange(buffer, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600377 }
378 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500379 buffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600380 bufferMap.erase(buffer_state->buffer);
381}
382
383void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
384 const VkAllocationCallbacks *pAllocator) {
385 if (!bufferView) return;
386 auto buffer_view_state = GetBufferViewState(bufferView);
387 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
388
389 // Any bound cmd buffers are now invalid
390 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500391 buffer_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600392 bufferViewMap.erase(bufferView);
393}
394
395void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
396 VkDeviceSize size, uint32_t data) {
397 auto cb_node = GetCBState(commandBuffer);
398 auto buffer_state = GetBufferState(dstBuffer);
399 // Update bindings between buffer and cmd buffer
400 AddCommandBufferBindingBuffer(cb_node, buffer_state);
401}
402
403void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
404 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
405 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
406 auto cb_node = GetCBState(commandBuffer);
407 auto src_image_state = GetImageState(srcImage);
408 auto dst_buffer_state = GetBufferState(dstBuffer);
409
410 // Update bindings between buffer/image and cmd buffer
411 AddCommandBufferBindingImage(cb_node, src_image_state);
412 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
413}
414
415void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
416 VkImageLayout dstImageLayout, uint32_t regionCount,
417 const VkBufferImageCopy *pRegions) {
418 auto cb_node = GetCBState(commandBuffer);
419 auto src_buffer_state = GetBufferState(srcBuffer);
420 auto dst_image_state = GetImageState(dstImage);
421
422 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
423 AddCommandBufferBindingImage(cb_node, dst_image_state);
424}
425
426// Get the image viewstate for a given framebuffer attachment
427IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(FRAMEBUFFER_STATE *framebuffer, uint32_t index) {
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -0700428 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -0600429 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
430 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
431 return GetImageViewState(image_view);
432}
433
434// Get the image viewstate for a given framebuffer attachment
435const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const FRAMEBUFFER_STATE *framebuffer,
436 uint32_t index) const {
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -0700437 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -0600438 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
439 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
440 return GetImageViewState(image_view);
441}
442
443void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
locke-lunargd556cc32019-09-17 01:21:23 -0600444 std::unordered_set<VkImage> *bound_images = nullptr;
445
locke-lunargb3584732019-10-28 20:18:36 -0600446 if (image_state->bind_swapchain) {
447 auto swapchain_state = GetSwapchainState(image_state->bind_swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -0600448 if (swapchain_state) {
locke-lunargb3584732019-10-28 20:18:36 -0600449 bound_images = &swapchain_state->images[image_state->bind_swapchain_imageIndex].bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600450 }
451 } else {
locke-lunargcf04d582019-11-26 00:31:50 -0700452 if (image_state->binding.mem_state) {
453 bound_images = &image_state->binding.mem_state->bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600454 }
455 }
456
457 if (bound_images) {
458 for (const auto &handle : *bound_images) {
459 if (handle != image_state->image) {
460 auto is = GetImageState(handle);
461 if (is && is->IsCompatibleAliasing(image_state)) {
462 auto inserted = is->aliasing_images.emplace(image_state->image);
463 if (inserted.second) {
464 image_state->aliasing_images.emplace(handle);
465 }
466 }
467 }
468 }
469 }
470}
471
472void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
473 for (const auto &image : image_state->aliasing_images) {
474 auto is = GetImageState(image);
475 if (is) {
476 is->aliasing_images.erase(image_state->image);
477 }
478 }
479 image_state->aliasing_images.clear();
480}
481
482void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
483 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
484 // reference. It doesn't need two ways clear.
485 for (const auto &handle : bound_images) {
486 auto is = GetImageState(handle);
487 if (is) {
488 is->aliasing_images.clear();
489 }
490 }
491}
492
Jeff Bolz310775c2019-10-09 00:46:33 -0500493const EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) const {
494 auto it = eventMap.find(event);
495 if (it == eventMap.end()) {
496 return nullptr;
497 }
498 return &it->second;
499}
500
locke-lunargd556cc32019-09-17 01:21:23 -0600501EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
502 auto it = eventMap.find(event);
503 if (it == eventMap.end()) {
504 return nullptr;
505 }
506 return &it->second;
507}
508
509const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
510 auto it = queueMap.find(queue);
511 if (it == queueMap.cend()) {
512 return nullptr;
513 }
514 return &it->second;
515}
516
517QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
518 auto it = queueMap.find(queue);
519 if (it == queueMap.end()) {
520 return nullptr;
521 }
522 return &it->second;
523}
524
525const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
526 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
527 auto it = phys_dev_map->find(phys);
528 if (it == phys_dev_map->end()) {
529 return nullptr;
530 }
531 return &it->second;
532}
533
534PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
535 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
536 auto it = phys_dev_map->find(phys);
537 if (it == phys_dev_map->end()) {
538 return nullptr;
539 }
540 return &it->second;
541}
542
543PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
544const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
545
546// Return ptr to memory binding for given handle of specified type
547template <typename State, typename Result>
548static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
549 switch (typed_handle.type) {
550 case kVulkanObjectTypeImage:
551 return state->GetImageState(typed_handle.Cast<VkImage>());
552 case kVulkanObjectTypeBuffer:
553 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
554 case kVulkanObjectTypeAccelerationStructureNV:
555 return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
556 default:
557 break;
558 }
559 return nullptr;
560}
561
562const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
563 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
564}
565
566BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
567 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
568}
569
570void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
571 assert(object != NULL);
572
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500573 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo);
574 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600575
576 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
577 if (dedicated) {
578 mem_info->is_dedicated = true;
579 mem_info->dedicated_buffer = dedicated->buffer;
580 mem_info->dedicated_image = dedicated->image;
581 }
582 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
583 if (export_info) {
584 mem_info->is_export = true;
585 mem_info->export_handle_type_flags = export_info->handleTypes;
586 }
587}
588
589// Create binding link between given sampler and command buffer node
590void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
591 if (disabled.command_buffer_state) {
592 return;
593 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500594 AddCommandBufferBinding(sampler_state->cb_bindings,
595 VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600596}
597
598// Create binding link between given image node and command buffer node
599void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
600 if (disabled.command_buffer_state) {
601 return;
602 }
603 // Skip validation if this image was created through WSI
604 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
605 // First update cb binding for image
Jeff Bolzadbfa852019-10-04 13:53:30 -0500606 if (AddCommandBufferBinding(image_state->cb_bindings,
607 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600608 // Now update CB binding in MemObj mini CB list
609 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700610 // Now update CBInfo's Mem reference list
611 AddCommandBufferBinding(mem_binding->cb_bindings,
612 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600613 }
614 }
615 }
616}
617
618// Create binding link between given image view node and its image with command buffer node
619void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
620 if (disabled.command_buffer_state) {
621 return;
622 }
623 // First add bindings for imageView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500624 if (AddCommandBufferBinding(view_state->cb_bindings,
625 VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600626 // Only need to continue if this is a new item
Jeff Bolzadbfa852019-10-04 13:53:30 -0500627 auto image_state = view_state->image_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600628 // Add bindings for image within imageView
629 if (image_state) {
630 AddCommandBufferBindingImage(cb_node, image_state);
631 }
632 }
633}
634
635// Create binding link between given buffer node and command buffer node
636void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
637 if (disabled.command_buffer_state) {
638 return;
639 }
640 // First update cb binding for buffer
Jeff Bolzadbfa852019-10-04 13:53:30 -0500641 if (AddCommandBufferBinding(buffer_state->cb_bindings,
642 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600643 // Now update CB binding in MemObj mini CB list
644 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700645 // Now update CBInfo's Mem reference list
646 AddCommandBufferBinding(mem_binding->cb_bindings,
647 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600648 }
649 }
650}
651
652// Create binding link between given buffer view node and its buffer with command buffer node
653void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
654 if (disabled.command_buffer_state) {
655 return;
656 }
657 // First add bindings for bufferView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500658 if (AddCommandBufferBinding(view_state->cb_bindings,
659 VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
660 auto buffer_state = view_state->buffer_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600661 // Add bindings for buffer within bufferView
662 if (buffer_state) {
663 AddCommandBufferBindingBuffer(cb_node, buffer_state);
664 }
665 }
666}
667
668// Create binding link between given acceleration structure and command buffer node
669void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
670 ACCELERATION_STRUCTURE_STATE *as_state) {
671 if (disabled.command_buffer_state) {
672 return;
673 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500674 if (AddCommandBufferBinding(
675 as_state->cb_bindings,
676 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600677 // Now update CB binding in MemObj mini CB list
678 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700679 // Now update CBInfo's Mem reference list
680 AddCommandBufferBinding(mem_binding->cb_bindings,
681 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600682 }
683 }
684}
685
locke-lunargd556cc32019-09-17 01:21:23 -0600686// Clear a single object binding from given memory object
locke-lunarg5f59e782019-12-19 10:32:23 -0700687void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
locke-lunargd556cc32019-09-17 01:21:23 -0600688 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
689 if (mem_info) {
690 mem_info->obj_bindings.erase(typed_handle);
691 }
692}
693
694// ClearMemoryObjectBindings clears the binding of objects to memory
695// For the given object it pulls the memory bindings and makes sure that the bindings
696// no longer refer to the object being cleared. This occurs when objects are destroyed.
697void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
698 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
699 if (mem_binding) {
700 if (!mem_binding->sparse) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700701 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600702 } else { // Sparse, clear all bindings
703 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700704 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600705 }
706 }
707 }
708}
709
710// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
711// Corresponding valid usage checks are in ValidateSetMemBinding().
712void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
713 const VulkanTypedHandle &typed_handle) {
714 assert(mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600715
716 if (mem != VK_NULL_HANDLE) {
locke-lunargcf04d582019-11-26 00:31:50 -0700717 mem_binding->binding.mem_state = GetShared<DEVICE_MEMORY_STATE>(mem);
718 if (mem_binding->binding.mem_state) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700719 mem_binding->binding.offset = memory_offset;
720 mem_binding->binding.size = mem_binding->requirements.size;
locke-lunargcf04d582019-11-26 00:31:50 -0700721 mem_binding->binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -0600722 // For image objects, make sure default memory state is correctly set
723 // TODO : What's the best/correct way to handle this?
724 if (kVulkanObjectTypeImage == typed_handle.type) {
725 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
726 if (image_state) {
727 VkImageCreateInfo ici = image_state->createInfo;
728 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
729 // TODO:: More memory state transition stuff.
730 }
731 }
732 }
locke-lunargcf04d582019-11-26 00:31:50 -0700733 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
locke-lunargd556cc32019-09-17 01:21:23 -0600734 }
735 }
736}
737
738// For NULL mem case, clear any previous binding Else...
739// Make sure given object is in its object map
740// IF a previous binding existed, update binding
741// Add reference from objectInfo to memoryInfo
742// Add reference off of object's binding info
743// Return VK_TRUE if addition is successful, VK_FALSE otherwise
locke-lunargcf04d582019-11-26 00:31:50 -0700744bool ValidationStateTracker::SetSparseMemBinding(const VkDeviceMemory mem, const VkDeviceSize mem_offset,
745 const VkDeviceSize mem_size, const VulkanTypedHandle &typed_handle) {
locke-lunargd556cc32019-09-17 01:21:23 -0600746 bool skip = VK_FALSE;
747 // Handle NULL case separately, just clear previous binding & decrement reference
locke-lunargcf04d582019-11-26 00:31:50 -0700748 if (mem == VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -0600749 // TODO : This should cause the range of the resource to be unbound according to spec
750 } else {
751 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
752 assert(mem_binding);
753 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
754 assert(mem_binding->sparse);
locke-lunargcf04d582019-11-26 00:31:50 -0700755 MEM_BINDING binding = {GetShared<DEVICE_MEMORY_STATE>(mem), mem_offset, mem_size};
756 if (binding.mem_state) {
757 binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -0600758 // Need to set mem binding for this object
759 mem_binding->sparse_bindings.insert(binding);
760 mem_binding->UpdateBoundMemorySet();
761 }
762 }
763 }
764 return skip;
765}
766
locke-lunargd556cc32019-09-17 01:21:23 -0600767void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, const VkPipelineBindPoint bind_point) {
768 auto &state = cb_state->lastBound[bind_point];
769 PIPELINE_STATE *pPipe = state.pipeline_state;
770 if (VK_NULL_HANDLE != state.pipeline_layout) {
771 for (const auto &set_binding_pair : pPipe->active_slots) {
772 uint32_t setIndex = set_binding_pair.first;
773 // Pull the set node
774 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
775 if (!descriptor_set->IsPushDescriptor()) {
776 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
777
778 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
779 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
780 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
781 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
782
783 if (reduced_map.IsManyDescriptors()) {
784 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
785 descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
786 }
787
788 // We can skip updating the state if "nothing" has changed since the last validation.
789 // See CoreChecks::ValidateCmdBufDrawState for more details.
Jeff Bolz56308942019-10-06 22:05:23 -0500790 bool descriptor_set_changed =
locke-lunargd556cc32019-09-17 01:21:23 -0600791 !reduced_map.IsManyDescriptors() ||
792 // Update if descriptor set (or contents) has changed
793 state.per_set[setIndex].validated_set != descriptor_set ||
794 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
795 (!disabled.image_layout_validation &&
Jeff Bolz56308942019-10-06 22:05:23 -0500796 state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
797 bool need_update = descriptor_set_changed ||
798 // Update if previous bindingReqMap doesn't include new bindingReqMap
799 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
800 state.per_set[setIndex].validated_set_binding_req_map.end(),
801 binding_req_map.begin(), binding_req_map.end());
locke-lunargd556cc32019-09-17 01:21:23 -0600802
803 if (need_update) {
804 // Bind this set and its active descriptor resources to the command buffer
Jeff Bolz56308942019-10-06 22:05:23 -0500805 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
806 // Only record the bindings that haven't already been recorded
807 BindingReqMap delta_reqs;
808 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
809 state.per_set[setIndex].validated_set_binding_req_map.begin(),
810 state.per_set[setIndex].validated_set_binding_req_map.end(),
811 std::inserter(delta_reqs, delta_reqs.begin()));
locke-gb3ce08f2019-09-30 12:30:56 -0600812 descriptor_set->UpdateDrawState(this, cb_state, pPipe, delta_reqs);
Jeff Bolz56308942019-10-06 22:05:23 -0500813 } else {
locke-gb3ce08f2019-09-30 12:30:56 -0600814 descriptor_set->UpdateDrawState(this, cb_state, pPipe, binding_req_map);
Jeff Bolz56308942019-10-06 22:05:23 -0500815 }
locke-lunargd556cc32019-09-17 01:21:23 -0600816
817 state.per_set[setIndex].validated_set = descriptor_set;
818 state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
819 state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
820 if (reduced_map.IsManyDescriptors()) {
821 // Check whether old == new before assigning, the equality check is much cheaper than
822 // freeing and reallocating the map.
823 if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
824 state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
825 }
826 } else {
827 state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
828 }
829 }
830 }
831 }
832 }
833 if (!pPipe->vertex_binding_descriptions_.empty()) {
834 cb_state->vertex_buffer_used = true;
835 }
836}
837
838// Remove set from setMap and delete the set
839void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500840 descriptor_set->destroyed = true;
Jeff Bolzadbfa852019-10-04 13:53:30 -0500841 const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500842 // Any bound cmd buffers are now invalid
Jeff Bolzadbfa852019-10-04 13:53:30 -0500843 InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500844
locke-lunargd556cc32019-09-17 01:21:23 -0600845 setMap.erase(descriptor_set->GetSet());
846}
847
848// Free all DS Pools including their Sets & related sub-structs
849// NOTE : Calls to this function should be wrapped in mutex
850void ValidationStateTracker::DeleteDescriptorSetPools() {
851 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
852 // Remove this pools' sets from setMap and delete them
853 for (auto ds : ii->second->sets) {
854 FreeDescriptorSet(ds);
855 }
856 ii->second->sets.clear();
857 ii = descriptorPoolMap.erase(ii);
858 }
859}
860
861// For given object struct return a ptr of BASE_NODE type for its wrapping struct
862BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500863 if (object_struct.node) {
864#ifdef _DEBUG
865 // assert that lookup would find the same object
866 VulkanTypedHandle other = object_struct;
867 other.node = nullptr;
868 assert(object_struct.node == GetStateStructPtrFromObject(other));
869#endif
870 return object_struct.node;
871 }
locke-lunargd556cc32019-09-17 01:21:23 -0600872 BASE_NODE *base_ptr = nullptr;
873 switch (object_struct.type) {
874 case kVulkanObjectTypeDescriptorSet: {
875 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
876 break;
877 }
878 case kVulkanObjectTypeSampler: {
879 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
880 break;
881 }
882 case kVulkanObjectTypeQueryPool: {
883 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
884 break;
885 }
886 case kVulkanObjectTypePipeline: {
887 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
888 break;
889 }
890 case kVulkanObjectTypeBuffer: {
891 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
892 break;
893 }
894 case kVulkanObjectTypeBufferView: {
895 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
896 break;
897 }
898 case kVulkanObjectTypeImage: {
899 base_ptr = GetImageState(object_struct.Cast<VkImage>());
900 break;
901 }
902 case kVulkanObjectTypeImageView: {
903 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
904 break;
905 }
906 case kVulkanObjectTypeEvent: {
907 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
908 break;
909 }
910 case kVulkanObjectTypeDescriptorPool: {
911 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
912 break;
913 }
914 case kVulkanObjectTypeCommandPool: {
915 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
916 break;
917 }
918 case kVulkanObjectTypeFramebuffer: {
919 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
920 break;
921 }
922 case kVulkanObjectTypeRenderPass: {
923 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
924 break;
925 }
926 case kVulkanObjectTypeDeviceMemory: {
927 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
928 break;
929 }
930 case kVulkanObjectTypeAccelerationStructureNV: {
931 base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
932 break;
933 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500934 case kVulkanObjectTypeUnknown:
935 // This can happen if an element of the object_bindings vector has been
936 // zeroed out, after an object is destroyed.
937 break;
locke-lunargd556cc32019-09-17 01:21:23 -0600938 default:
939 // TODO : Any other objects to be handled here?
940 assert(0);
941 break;
942 }
943 return base_ptr;
944}
945
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700946VkFormatFeatureFlags ValidationStateTracker::GetPotentialFormatFeatures(VkFormat format) const {
947 VkFormatFeatureFlags format_features = 0;
948
949 if (format != VK_FORMAT_UNDEFINED) {
950 VkFormatProperties format_properties;
951 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
952 format_features |= format_properties.linearTilingFeatures;
953 format_features |= format_properties.optimalTilingFeatures;
954 if (device_extensions.vk_ext_image_drm_format_modifier) {
955 // VK_KHR_get_physical_device_properties2 is required in this case
956 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2};
957 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
958 nullptr};
959 format_properties_2.pNext = (void *)&drm_properties_list;
960 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
961 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
962 format_features |= drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
963 }
964 }
965 }
966
967 return format_features;
968}
969
locke-lunargd556cc32019-09-17 01:21:23 -0600970// Tie the VulkanTypedHandle to the cmd buffer which includes:
971// Add object_binding to cmd buffer
972// Add cb_binding to object
Jeff Bolzadbfa852019-10-04 13:53:30 -0500973bool ValidationStateTracker::AddCommandBufferBinding(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_bindings,
locke-lunargd556cc32019-09-17 01:21:23 -0600974 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
975 if (disabled.command_buffer_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500976 return false;
locke-lunargd556cc32019-09-17 01:21:23 -0600977 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500978 // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
979 // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
980 auto inserted = cb_bindings.insert({cb_node, -1});
981 if (inserted.second) {
982 cb_node->object_bindings.push_back(obj);
983 inserted.first->second = (int)cb_node->object_bindings.size() - 1;
984 return true;
985 }
986 return false;
locke-lunargd556cc32019-09-17 01:21:23 -0600987}
988
989// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
990void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
991 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
992 if (base_obj) base_obj->cb_bindings.erase(cb_node);
993}
994
995// Reset the command buffer state
996// Maintain the createInfo and set state to CB_NEW, but clear all other state
997void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
998 CMD_BUFFER_STATE *pCB = GetCBState(cb);
999 if (pCB) {
1000 pCB->in_use.store(0);
1001 // Reset CB state (note that createInfo is not cleared)
1002 pCB->commandBuffer = cb;
1003 memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
1004 memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
1005 pCB->hasDrawCmd = false;
1006 pCB->hasTraceRaysCmd = false;
1007 pCB->hasBuildAccelerationStructureCmd = false;
1008 pCB->hasDispatchCmd = false;
1009 pCB->state = CB_NEW;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001010 pCB->commandCount = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06001011 pCB->submitCount = 0;
1012 pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
1013 pCB->status = 0;
1014 pCB->static_status = 0;
1015 pCB->viewportMask = 0;
1016 pCB->scissorMask = 0;
1017
1018 for (auto &item : pCB->lastBound) {
1019 item.second.reset();
1020 }
1021
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07001022 pCB->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo();
locke-lunargd556cc32019-09-17 01:21:23 -06001023 pCB->activeRenderPass = nullptr;
1024 pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
1025 pCB->activeSubpass = 0;
1026 pCB->broken_bindings.clear();
1027 pCB->waitedEvents.clear();
1028 pCB->events.clear();
1029 pCB->writeEventsBeforeWait.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001030 pCB->activeQueries.clear();
1031 pCB->startedQueries.clear();
1032 pCB->image_layout_map.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001033 pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
1034 pCB->vertex_buffer_used = false;
1035 pCB->primaryCommandBuffer = VK_NULL_HANDLE;
1036 // If secondary, invalidate any primary command buffer that may call us.
1037 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001038 InvalidateLinkedCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -06001039 }
1040
1041 // Remove reverse command buffer links.
1042 for (auto pSubCB : pCB->linkedCommandBuffers) {
1043 pSubCB->linkedCommandBuffers.erase(pCB);
1044 }
1045 pCB->linkedCommandBuffers.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001046 pCB->queue_submit_functions.clear();
1047 pCB->cmd_execute_commands_functions.clear();
1048 pCB->eventUpdates.clear();
1049 pCB->queryUpdates.clear();
1050
1051 // Remove object bindings
1052 for (const auto &obj : pCB->object_bindings) {
1053 RemoveCommandBufferBinding(obj, pCB);
1054 }
1055 pCB->object_bindings.clear();
1056 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
1057 for (auto framebuffer : pCB->framebuffers) {
1058 auto fb_state = GetFramebufferState(framebuffer);
1059 if (fb_state) fb_state->cb_bindings.erase(pCB);
1060 }
1061 pCB->framebuffers.clear();
1062 pCB->activeFramebuffer = VK_NULL_HANDLE;
1063 memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
1064
1065 pCB->qfo_transfer_image_barriers.Reset();
1066 pCB->qfo_transfer_buffer_barriers.Reset();
1067
1068 // Clean up the label data
1069 ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
1070 pCB->debug_label.Reset();
locke-gb3ce08f2019-09-30 12:30:56 -06001071 pCB->validate_descriptorsets_in_queuesubmit.clear();
Attilio Provenzano02859b22020-02-27 14:17:28 +00001072
1073 // Best practices info
1074 pCB->small_indexed_draw_call_count = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06001075 }
1076 if (command_buffer_reset_callback) {
1077 (*command_buffer_reset_callback)(cb);
1078 }
1079}
1080
1081void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
1082 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
1083 VkResult result) {
1084 if (VK_SUCCESS != result) return;
1085
1086 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
1087 if (nullptr == enabled_features_found) {
1088 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
1089 if (features2) {
1090 enabled_features_found = &(features2->features);
1091 }
1092 }
1093
1094 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
1095 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
1096 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
1097
1098 if (nullptr == enabled_features_found) {
1099 state_tracker->enabled_features.core = {};
1100 } else {
1101 state_tracker->enabled_features.core = *enabled_features_found;
1102 }
1103
1104 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
1105 // previously set them through an explicit API call.
1106 uint32_t count;
1107 auto pd_state = GetPhysicalDeviceState(gpu);
1108 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
1109 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
1110 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
1111 // Save local link to this device's physical device state
1112 state_tracker->physical_device_state = pd_state;
1113
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001114 const auto *vulkan_12_features = lvl_find_in_chain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
1115 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001116 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001117 } else {
1118 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan12Features
1119
1120 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
1121 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001122 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
1123 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
1124 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
1125 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001126 }
1127
1128 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
1129 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001130 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
1131 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001132 }
1133
1134 const auto *descriptor_indexing_features =
1135 lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
1136 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001137 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
1138 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
1139 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
1140 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
1141 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
1142 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
1143 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
1144 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
1145 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
1146 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
1147 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
1148 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
1149 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
1150 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
1151 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
1152 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
1153 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
1154 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
1155 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
1156 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
1157 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
1158 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
1159 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
1160 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
1161 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
1162 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
1163 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
1164 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
1165 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
1166 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
1167 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
1168 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
1169 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
1170 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
1171 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
1172 descriptor_indexing_features->descriptorBindingPartiallyBound;
1173 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
1174 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
1175 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001176 }
1177
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001178 const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001179 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001180 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001181 }
1182
1183 const auto *imageless_framebuffer_features =
1184 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
1185 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001186 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001187 }
1188
1189 const auto *uniform_buffer_standard_layout_features =
1190 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
1191 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001192 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
1193 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001194 }
1195
1196 const auto *subgroup_extended_types_features =
1197 lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
1198 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001199 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
1200 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001201 }
1202
1203 const auto *separate_depth_stencil_layouts_features =
1204 lvl_find_in_chain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
1205 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001206 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
1207 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001208 }
1209
1210 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
1211 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001212 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001213 }
1214
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001215 const auto *timeline_semaphore_features = lvl_find_in_chain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001216 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001217 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001218 }
1219
1220 const auto *buffer_device_address = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
1221 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001222 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
1223 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1224 buffer_device_address->bufferDeviceAddressCaptureReplay;
1225 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1226 buffer_device_address->bufferDeviceAddressMultiDevice;
1227 }
1228 }
1229
1230 const auto *vulkan_11_features = lvl_find_in_chain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
1231 if (vulkan_11_features) {
1232 state_tracker->enabled_features.core11 = *vulkan_11_features;
1233 } else {
1234 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
1235
1236 const auto *sixteen_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
1237 if (sixteen_bit_storage_features) {
1238 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
1239 sixteen_bit_storage_features->storageBuffer16BitAccess;
1240 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
1241 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
1242 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
1243 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
1244 }
1245
1246 const auto *multiview_features = lvl_find_in_chain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
1247 if (multiview_features) {
1248 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1249 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1250 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1251 }
1252
1253 const auto *variable_pointers_features = lvl_find_in_chain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
1254 if (variable_pointers_features) {
1255 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1256 variable_pointers_features->variablePointersStorageBuffer;
1257 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1258 }
1259
1260 const auto *protected_memory_features = lvl_find_in_chain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
1261 if (protected_memory_features) {
1262 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1263 }
1264
1265 const auto *ycbcr_conversion_features =
1266 lvl_find_in_chain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
1267 if (ycbcr_conversion_features) {
1268 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1269 }
1270
1271 const auto *shader_draw_parameters_features =
1272 lvl_find_in_chain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
1273 if (shader_draw_parameters_features) {
1274 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001275 }
1276 }
1277
locke-lunargd556cc32019-09-17 01:21:23 -06001278 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
1279 state_tracker->physical_device_count =
1280 device_group_ci && device_group_ci->physicalDeviceCount > 0 ? device_group_ci->physicalDeviceCount : 1;
1281
locke-lunargd556cc32019-09-17 01:21:23 -06001282 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
1283 if (exclusive_scissor_features) {
1284 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1285 }
1286
1287 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1288 if (shading_rate_image_features) {
1289 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1290 }
1291
1292 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1293 if (mesh_shader_features) {
1294 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1295 }
1296
1297 const auto *inline_uniform_block_features =
1298 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1299 if (inline_uniform_block_features) {
1300 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1301 }
1302
1303 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1304 if (transform_feedback_features) {
1305 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1306 }
1307
locke-lunargd556cc32019-09-17 01:21:23 -06001308 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1309 if (vtx_attrib_div_features) {
1310 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1311 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001312
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001313 const auto *buffer_device_address_ext = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
1314 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001315 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001316 }
1317
1318 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1319 if (cooperative_matrix_features) {
1320 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1321 }
1322
locke-lunargd556cc32019-09-17 01:21:23 -06001323 const auto *compute_shader_derivatives_features =
1324 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1325 if (compute_shader_derivatives_features) {
1326 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1327 }
1328
1329 const auto *fragment_shader_barycentric_features =
1330 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1331 if (fragment_shader_barycentric_features) {
1332 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1333 }
1334
1335 const auto *shader_image_footprint_features =
1336 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1337 if (shader_image_footprint_features) {
1338 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1339 }
1340
1341 const auto *fragment_shader_interlock_features =
1342 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1343 if (fragment_shader_interlock_features) {
1344 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1345 }
1346
1347 const auto *demote_to_helper_invocation_features =
1348 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1349 if (demote_to_helper_invocation_features) {
1350 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1351 }
1352
1353 const auto *texel_buffer_alignment_features =
1354 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1355 if (texel_buffer_alignment_features) {
1356 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1357 }
1358
locke-lunargd556cc32019-09-17 01:21:23 -06001359 const auto *pipeline_exe_props_features =
1360 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1361 if (pipeline_exe_props_features) {
1362 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1363 }
1364
Jeff Bolz82f854d2019-09-17 14:56:47 -05001365 const auto *dedicated_allocation_image_aliasing_features =
1366 lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
1367 if (dedicated_allocation_image_aliasing_features) {
1368 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1369 *dedicated_allocation_image_aliasing_features;
1370 }
1371
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001372 const auto *performance_query_features = lvl_find_in_chain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
1373 if (performance_query_features) {
1374 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1375 }
1376
Tobias Hector782bcde2019-11-28 16:19:42 +00001377 const auto *device_coherent_memory_features = lvl_find_in_chain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
1378 if (device_coherent_memory_features) {
1379 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1380 }
1381
sfricke-samsungcead0802020-01-30 22:20:10 -08001382 const auto *ycbcr_image_array_features = lvl_find_in_chain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
1383 if (ycbcr_image_array_features) {
1384 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1385 }
1386
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001387 const auto *ray_tracing_features = lvl_find_in_chain<VkPhysicalDeviceRayTracingFeaturesKHR>(pCreateInfo->pNext);
1388 if (ray_tracing_features) {
1389 state_tracker->enabled_features.ray_tracing_features = *ray_tracing_features;
1390 }
1391
locke-lunargd556cc32019-09-17 01:21:23 -06001392 // Store physical device properties and physical device mem limits into CoreChecks structs
1393 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1394 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001395 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1396 &state_tracker->phys_dev_props_core11);
1397 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1398 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001399
1400 const auto &dev_ext = state_tracker->device_extensions;
1401 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1402
1403 if (dev_ext.vk_khr_push_descriptor) {
1404 // Get the needed push_descriptor limits
1405 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1406 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1407 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1408 }
1409
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001410 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
1411 VkPhysicalDeviceDescriptorIndexingPropertiesEXT descriptor_indexing_prop;
1412 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1413 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1414 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1415 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1416 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1417 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1418 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1419 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1420 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1421 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1422 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1423 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1424 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1425 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1426 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1427 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1428 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1429 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1430 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1431 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1432 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1433 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1434 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1435 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1436 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1437 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1438 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
1439 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1440 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
1441 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
1442 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
1443 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
1444 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
1445 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
1446 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1447 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1448 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
1449 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
1450 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1451 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
1452 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
1453 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
1454 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
1455 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
1456 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
1457 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
1458 }
1459
locke-lunargd556cc32019-09-17 01:21:23 -06001460 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1461 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1462 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1463 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001464
1465 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
1466 VkPhysicalDeviceDepthStencilResolvePropertiesKHR depth_stencil_resolve_props;
1467 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
1468 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
1469 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
1470 depth_stencil_resolve_props.supportedStencilResolveModes;
1471 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
1472 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
1473 }
1474
locke-lunargd556cc32019-09-17 01:21:23 -06001475 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001476 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
1477 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing, &phys_dev_props->ray_tracing_propsKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06001478 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1479 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001480 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001481
1482 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
1483 VkPhysicalDeviceTimelineSemaphorePropertiesKHR timeline_semaphore_props;
1484 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
1485 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
1486 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
1487 }
1488
1489 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
1490 VkPhysicalDeviceFloatControlsPropertiesKHR float_controls_props;
1491 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
1492 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
1493 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
1494 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
1495 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
1496 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
1497 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
1498 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
1499 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
1500 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
1501 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
1502 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
1503 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
1504 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
1505 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
1506 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
1507 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
1508 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
1509 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
1510 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
1511 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
1512 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07001513
locke-lunargd556cc32019-09-17 01:21:23 -06001514 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1515 // Get the needed cooperative_matrix properties
1516 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1517 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
1518 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1519 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1520
1521 uint32_t numCooperativeMatrixProperties = 0;
1522 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
1523 state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
1524 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
1525
1526 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
1527 state_tracker->cooperative_matrix_properties.data());
1528 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001529 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06001530 // Get the needed subgroup limits
1531 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
1532 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
1533 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1534
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001535 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
1536 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
1537 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
1538 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06001539 }
1540
1541 // Store queue family data
1542 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1543 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
1544 state_tracker->queue_family_index_map.insert(
1545 std::make_pair(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex, pCreateInfo->pQueueCreateInfos[i].queueCount));
1546 }
1547 }
1548}
1549
1550void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1551 if (!device) return;
1552
locke-lunargd556cc32019-09-17 01:21:23 -06001553 // Reset all command buffers before destroying them, to unlink object_bindings.
1554 for (auto &commandBuffer : commandBufferMap) {
1555 ResetCommandBufferState(commandBuffer.first);
1556 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001557 pipelineMap.clear();
1558 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001559 commandBufferMap.clear();
1560
1561 // This will also delete all sets in the pool & remove them from setMap
1562 DeleteDescriptorSetPools();
1563 // All sets should be removed
1564 assert(setMap.empty());
1565 descriptorSetLayoutMap.clear();
1566 imageViewMap.clear();
1567 imageMap.clear();
1568 bufferViewMap.clear();
1569 bufferMap.clear();
1570 // Queues persist until device is destroyed
1571 queueMap.clear();
1572}
1573
1574// Loop through bound objects and increment their in_use counts.
1575void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
1576 for (auto obj : cb_node->object_bindings) {
1577 auto base_obj = GetStateStructPtrFromObject(obj);
1578 if (base_obj) {
1579 base_obj->in_use.fetch_add(1);
1580 }
1581 }
1582}
1583
1584// Track which resources are in-flight by atomically incrementing their "in_use" count
1585void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1586 cb_node->submitCount++;
1587 cb_node->in_use.fetch_add(1);
1588
1589 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
1590 IncrementBoundObjects(cb_node);
1591 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1592 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1593 // should then be flagged prior to calling this function
1594 for (auto event : cb_node->writeEventsBeforeWait) {
1595 auto event_state = GetEventState(event);
1596 if (event_state) event_state->write_in_use++;
1597 }
1598}
1599
1600// Decrement in-use count for objects bound to command buffer
1601void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
1602 BASE_NODE *base_obj = nullptr;
1603 for (auto obj : cb_node->object_bindings) {
1604 base_obj = GetStateStructPtrFromObject(obj);
1605 if (base_obj) {
1606 base_obj->in_use.fetch_sub(1);
1607 }
1608 }
1609}
1610
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001611void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
locke-lunargd556cc32019-09-17 01:21:23 -06001612 std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
1613
1614 // Roll this queue forward, one submission at a time.
1615 while (pQueue->seq < seq) {
1616 auto &submission = pQueue->submissions.front();
1617
1618 for (auto &wait : submission.waitSemaphores) {
1619 auto pSemaphore = GetSemaphoreState(wait.semaphore);
1620 if (pSemaphore) {
1621 pSemaphore->in_use.fetch_sub(1);
1622 }
1623 auto &lastSeq = otherQueueSeqs[wait.queue];
1624 lastSeq = std::max(lastSeq, wait.seq);
1625 }
1626
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001627 for (auto &signal : submission.signalSemaphores) {
1628 auto pSemaphore = GetSemaphoreState(signal.semaphore);
locke-lunargd556cc32019-09-17 01:21:23 -06001629 if (pSemaphore) {
1630 pSemaphore->in_use.fetch_sub(1);
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001631 if (pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && pSemaphore->payload < signal.payload) {
1632 pSemaphore->payload = signal.payload;
1633 }
locke-lunargd556cc32019-09-17 01:21:23 -06001634 }
1635 }
1636
1637 for (auto &semaphore : submission.externalSemaphores) {
1638 auto pSemaphore = GetSemaphoreState(semaphore);
1639 if (pSemaphore) {
1640 pSemaphore->in_use.fetch_sub(1);
1641 }
1642 }
1643
1644 for (auto cb : submission.cbs) {
1645 auto cb_node = GetCBState(cb);
1646 if (!cb_node) {
1647 continue;
1648 }
1649 // First perform decrement on general case bound objects
1650 DecrementBoundResources(cb_node);
1651 for (auto event : cb_node->writeEventsBeforeWait) {
1652 auto eventNode = eventMap.find(event);
1653 if (eventNode != eventMap.end()) {
1654 eventNode->second.write_in_use--;
1655 }
1656 }
Jeff Bolz310775c2019-10-09 00:46:33 -05001657 QueryMap localQueryToStateMap;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001658 VkQueryPool first_pool = VK_NULL_HANDLE;
Jeff Bolz310775c2019-10-09 00:46:33 -05001659 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001660 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &localQueryToStateMap);
Jeff Bolz310775c2019-10-09 00:46:33 -05001661 }
1662
1663 for (auto queryStatePair : localQueryToStateMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001664 if (queryStatePair.second == QUERYSTATE_ENDED) {
1665 queryToStateMap[queryStatePair.first] = QUERYSTATE_AVAILABLE;
1666 }
locke-lunargd556cc32019-09-17 01:21:23 -06001667 }
locke-lunargd556cc32019-09-17 01:21:23 -06001668 cb_node->in_use.fetch_sub(1);
1669 }
1670
1671 auto pFence = GetFenceState(submission.fence);
1672 if (pFence && pFence->scope == kSyncScopeInternal) {
1673 pFence->state = FENCE_RETIRED;
1674 }
1675
1676 pQueue->submissions.pop_front();
1677 pQueue->seq++;
1678 }
1679
1680 // Roll other queues forward to the highest seq we saw a wait for
1681 for (auto qs : otherQueueSeqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001682 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001683 }
1684}
1685
1686// Submit a fence to a queue, delimiting previous fences and previous untracked
1687// work by it.
1688static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1689 pFence->state = FENCE_INFLIGHT;
1690 pFence->signaler.first = pQueue->queue;
1691 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1692}
1693
1694void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1695 VkFence fence, VkResult result) {
1696 uint64_t early_retire_seq = 0;
1697 auto pQueue = GetQueueState(queue);
1698 auto pFence = GetFenceState(fence);
1699
1700 if (pFence) {
1701 if (pFence->scope == kSyncScopeInternal) {
1702 // Mark fence in use
1703 SubmitFence(pQueue, pFence, std::max(1u, submitCount));
1704 if (!submitCount) {
1705 // If no submissions, but just dropping a fence on the end of the queue,
1706 // record an empty submission with just the fence, so we can determine
1707 // its completion.
1708 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001709 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001710 }
1711 } else {
1712 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1713 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1714 }
1715 }
1716
1717 // Now process each individual submit
1718 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1719 std::vector<VkCommandBuffer> cbs;
1720 const VkSubmitInfo *submit = &pSubmits[submit_idx];
1721 vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001722 vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06001723 vector<VkSemaphore> semaphore_externals;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001724 const uint64_t next_seq = pQueue->seq + pQueue->submissions.size() + 1;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001725 auto *timeline_semaphore_submit = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001726 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
1727 VkSemaphore semaphore = submit->pWaitSemaphores[i];
1728 auto pSemaphore = GetSemaphoreState(semaphore);
1729 if (pSemaphore) {
1730 if (pSemaphore->scope == kSyncScopeInternal) {
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001731 SEMAPHORE_WAIT wait;
1732 wait.semaphore = semaphore;
1733 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
1734 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1735 wait.queue = pSemaphore->signaler.first;
1736 wait.seq = pSemaphore->signaler.second;
1737 semaphore_waits.push_back(wait);
1738 pSemaphore->in_use.fetch_add(1);
1739 }
1740 pSemaphore->signaler.first = VK_NULL_HANDLE;
1741 pSemaphore->signaled = false;
1742 } else if (pSemaphore->payload < timeline_semaphore_submit->pWaitSemaphoreValues[i]) {
1743 wait.queue = queue;
1744 wait.seq = next_seq;
1745 wait.payload = timeline_semaphore_submit->pWaitSemaphoreValues[i];
1746 semaphore_waits.push_back(wait);
locke-lunargd556cc32019-09-17 01:21:23 -06001747 pSemaphore->in_use.fetch_add(1);
1748 }
locke-lunargd556cc32019-09-17 01:21:23 -06001749 } else {
1750 semaphore_externals.push_back(semaphore);
1751 pSemaphore->in_use.fetch_add(1);
1752 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1753 pSemaphore->scope = kSyncScopeInternal;
1754 }
1755 }
1756 }
1757 }
1758 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
1759 VkSemaphore semaphore = submit->pSignalSemaphores[i];
1760 auto pSemaphore = GetSemaphoreState(semaphore);
1761 if (pSemaphore) {
1762 if (pSemaphore->scope == kSyncScopeInternal) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001763 SEMAPHORE_SIGNAL signal;
1764 signal.semaphore = semaphore;
1765 signal.seq = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001766 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
1767 pSemaphore->signaler.first = queue;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001768 pSemaphore->signaler.second = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001769 pSemaphore->signaled = true;
1770 } else {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001771 signal.payload = timeline_semaphore_submit->pSignalSemaphoreValues[i];
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001772 }
locke-lunargd556cc32019-09-17 01:21:23 -06001773 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001774 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06001775 } else {
1776 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001777 early_retire_seq = std::max(early_retire_seq, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001778 }
1779 }
1780 }
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001781 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
1782 uint32_t perf_pass = perf_submit ? perf_submit->counterPassIndex : 0;
1783
locke-lunargd556cc32019-09-17 01:21:23 -06001784 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
1785 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
1786 if (cb_node) {
1787 cbs.push_back(submit->pCommandBuffers[i]);
1788 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
1789 cbs.push_back(secondaryCmdBuffer->commandBuffer);
1790 IncrementResources(secondaryCmdBuffer);
1791 }
1792 IncrementResources(cb_node);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001793
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001794 VkQueryPool first_pool = VK_NULL_HANDLE;
1795 EventToStageMap localEventToStageMap;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001796 QueryMap localQueryToStateMap;
1797 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001798 function(nullptr, /*do_validate*/ false, first_pool, perf_pass, &localQueryToStateMap);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001799 }
1800
1801 for (auto queryStatePair : localQueryToStateMap) {
1802 queryToStateMap[queryStatePair.first] = queryStatePair.second;
1803 }
1804
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001805 for (auto &function : cb_node->eventUpdates) {
1806 function(nullptr, /*do_validate*/ false, &localEventToStageMap);
1807 }
1808
1809 for (auto eventStagePair : localEventToStageMap) {
1810 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
1811 }
locke-lunargd556cc32019-09-17 01:21:23 -06001812 }
1813 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001814
locke-lunargd556cc32019-09-17 01:21:23 -06001815 pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001816 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, perf_pass);
locke-lunargd556cc32019-09-17 01:21:23 -06001817 }
1818
1819 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001820 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001821 }
1822}
1823
1824void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1825 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1826 VkResult result) {
1827 if (VK_SUCCESS == result) {
1828 AddMemObjInfo(device, *pMemory, pAllocateInfo);
1829 }
1830 return;
1831}
1832
1833void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1834 if (!mem) return;
1835 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
1836 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
1837
1838 // Clear mem binding for any bound objects
1839 for (const auto &obj : mem_info->obj_bindings) {
1840 BINDABLE *bindable_state = nullptr;
1841 switch (obj.type) {
1842 case kVulkanObjectTypeImage:
1843 bindable_state = GetImageState(obj.Cast<VkImage>());
1844 break;
1845 case kVulkanObjectTypeBuffer:
1846 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
1847 break;
1848 case kVulkanObjectTypeAccelerationStructureNV:
1849 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
1850 break;
1851
1852 default:
1853 // Should only have acceleration structure, buffer, or image objects bound to memory
1854 assert(0);
1855 }
1856
1857 if (bindable_state) {
Jeff Bolz41e29052020-03-29 22:33:55 -05001858 // Remove any sparse bindings bound to the resource that use this memory.
1859 for (auto it = bindable_state->sparse_bindings.begin(); it != bindable_state->sparse_bindings.end();) {
1860 auto nextit = it;
1861 nextit++;
1862
1863 auto &sparse_mem_binding = *it;
1864 if (sparse_mem_binding.mem_state.get() == mem_info) {
1865 bindable_state->sparse_bindings.erase(it);
1866 }
1867
1868 it = nextit;
1869 }
locke-lunargd556cc32019-09-17 01:21:23 -06001870 bindable_state->UpdateBoundMemorySet();
1871 }
1872 }
1873 // Any bound cmd buffers are now invalid
1874 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
1875 RemoveAliasingImages(mem_info->bound_images);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001876 mem_info->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001877 memObjMap.erase(mem);
1878}
1879
1880void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
1881 VkFence fence, VkResult result) {
1882 if (result != VK_SUCCESS) return;
1883 uint64_t early_retire_seq = 0;
1884 auto pFence = GetFenceState(fence);
1885 auto pQueue = GetQueueState(queue);
1886
1887 if (pFence) {
1888 if (pFence->scope == kSyncScopeInternal) {
1889 SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
1890 if (!bindInfoCount) {
1891 // No work to do, just dropping a fence in the queue by itself.
1892 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001893 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001894 }
1895 } else {
1896 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1897 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1898 }
1899 }
1900
1901 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
1902 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
1903 // Track objects tied to memory
1904 for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
1905 for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
1906 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07001907 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06001908 VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
1909 }
1910 }
1911 for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
1912 for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
1913 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07001914 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06001915 VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
1916 }
1917 }
1918 for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
1919 for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
1920 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
1921 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
1922 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
locke-lunargcf04d582019-11-26 00:31:50 -07001923 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, size,
locke-lunargd556cc32019-09-17 01:21:23 -06001924 VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
1925 }
1926 }
1927
1928 std::vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001929 std::vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06001930 std::vector<VkSemaphore> semaphore_externals;
1931 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
1932 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
1933 auto pSemaphore = GetSemaphoreState(semaphore);
1934 if (pSemaphore) {
1935 if (pSemaphore->scope == kSyncScopeInternal) {
1936 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1937 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1938 pSemaphore->in_use.fetch_add(1);
1939 }
1940 pSemaphore->signaler.first = VK_NULL_HANDLE;
1941 pSemaphore->signaled = false;
1942 } else {
1943 semaphore_externals.push_back(semaphore);
1944 pSemaphore->in_use.fetch_add(1);
1945 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1946 pSemaphore->scope = kSyncScopeInternal;
1947 }
1948 }
1949 }
1950 }
1951 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
1952 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
1953 auto pSemaphore = GetSemaphoreState(semaphore);
1954 if (pSemaphore) {
1955 if (pSemaphore->scope == kSyncScopeInternal) {
1956 pSemaphore->signaler.first = queue;
1957 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1958 pSemaphore->signaled = true;
1959 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001960
1961 SEMAPHORE_SIGNAL signal;
1962 signal.semaphore = semaphore;
1963 signal.seq = pSemaphore->signaler.second;
1964 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06001965 } else {
1966 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1967 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1968 }
1969 }
1970 }
1971
1972 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001973 bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001974 }
1975
1976 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001977 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001978 }
1979}
1980
1981void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
1982 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
1983 VkResult result) {
1984 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001985 auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06001986 semaphore_state->signaler.first = VK_NULL_HANDLE;
1987 semaphore_state->signaler.second = 0;
1988 semaphore_state->signaled = false;
1989 semaphore_state->scope = kSyncScopeInternal;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001990 semaphore_state->type = VK_SEMAPHORE_TYPE_BINARY_KHR;
1991 semaphore_state->payload = 0;
1992 auto semaphore_type_create_info = lvl_find_in_chain<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo->pNext);
1993 if (semaphore_type_create_info) {
1994 semaphore_state->type = semaphore_type_create_info->semaphoreType;
1995 semaphore_state->payload = semaphore_type_create_info->initialValue;
1996 }
locke-lunargd556cc32019-09-17 01:21:23 -06001997 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
1998}
1999
2000void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
2001 VkSemaphoreImportFlagsKHR flags) {
2002 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
2003 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
2004 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
2005 sema_node->scope == kSyncScopeInternal) {
2006 sema_node->scope = kSyncScopeExternalTemporary;
2007 } else {
2008 sema_node->scope = kSyncScopeExternalPermanent;
2009 }
2010 }
2011}
2012
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002013void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR *pSignalInfo,
2014 VkResult result) {
2015 auto *pSemaphore = GetSemaphoreState(pSignalInfo->semaphore);
2016 pSemaphore->payload = pSignalInfo->value;
2017}
2018
locke-lunargd556cc32019-09-17 01:21:23 -06002019void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
2020 auto mem_info = GetDevMemState(mem);
2021 if (mem_info) {
2022 mem_info->mapped_range.offset = offset;
2023 mem_info->mapped_range.size = size;
2024 mem_info->p_driver_data = *ppData;
2025 }
2026}
2027
2028void ValidationStateTracker::RetireFence(VkFence fence) {
2029 auto pFence = GetFenceState(fence);
2030 if (pFence && pFence->scope == kSyncScopeInternal) {
2031 if (pFence->signaler.first != VK_NULL_HANDLE) {
2032 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002033 RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002034 } else {
2035 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
2036 // the fence as retired.
2037 pFence->state = FENCE_RETIRED;
2038 }
2039 }
2040}
2041
2042void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2043 VkBool32 waitAll, uint64_t timeout, VkResult result) {
2044 if (VK_SUCCESS != result) return;
2045
2046 // When we know that all fences are complete we can clean/remove their CBs
2047 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
2048 for (uint32_t i = 0; i < fenceCount; i++) {
2049 RetireFence(pFences[i]);
2050 }
2051 }
2052 // NOTE : Alternate case not handled here is when some fences have completed. In
2053 // this case for app to guarantee which fences completed it will have to call
2054 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
2055}
2056
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002057void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
2058 auto pSemaphore = GetSemaphoreState(semaphore);
2059 if (pSemaphore) {
2060 for (auto &pair : queueMap) {
2061 QUEUE_STATE &queueState = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002062 uint64_t max_seq = 0;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002063 for (const auto &submission : queueState.submissions) {
2064 for (const auto &signalSemaphore : submission.signalSemaphores) {
2065 if (signalSemaphore.semaphore == semaphore && signalSemaphore.payload <= until_payload) {
Tony-LunarG47d5e272020-04-07 15:35:55 -06002066 if (signalSemaphore.seq > max_seq) {
2067 max_seq = signalSemaphore.seq;
2068 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002069 }
2070 }
2071 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06002072 if (max_seq) {
2073 RetireWorkOnQueue(&queueState, max_seq);
2074 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002075 }
2076 }
2077}
2078
John Zulauff89de662020-04-13 18:57:34 -06002079void ValidationStateTracker::RecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2080 VkResult result) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002081 if (VK_SUCCESS != result) return;
2082
2083 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
2084 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
2085 }
2086}
2087
John Zulauff89de662020-04-13 18:57:34 -06002088void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2089 VkResult result) {
2090 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2091}
2092
2093void ValidationStateTracker::PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo,
2094 uint64_t timeout, VkResult result) {
2095 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2096}
2097
locke-lunargd556cc32019-09-17 01:21:23 -06002098void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
2099 if (VK_SUCCESS != result) return;
2100 RetireFence(fence);
2101}
2102
2103void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
2104 // Add queue to tracking set only if it is new
2105 auto queue_is_new = queues.emplace(queue);
2106 if (queue_is_new.second == true) {
2107 QUEUE_STATE *queue_state = &queueMap[queue];
2108 queue_state->queue = queue;
2109 queue_state->queueFamilyIndex = queue_family_index;
2110 queue_state->seq = 0;
2111 }
2112}
2113
2114void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2115 VkQueue *pQueue) {
2116 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2117}
2118
2119void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2120 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2121}
2122
2123void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2124 if (VK_SUCCESS != result) return;
2125 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002126 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002127}
2128
2129void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2130 if (VK_SUCCESS != result) return;
2131 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002132 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002133 }
2134}
2135
2136void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2137 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002138 auto fence_state = GetFenceState(fence);
2139 fence_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002140 fenceMap.erase(fence);
2141}
2142
2143void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2144 const VkAllocationCallbacks *pAllocator) {
2145 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002146 auto semaphore_state = GetSemaphoreState(semaphore);
2147 semaphore_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002148 semaphoreMap.erase(semaphore);
2149}
2150
2151void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2152 if (!event) return;
2153 EVENT_STATE *event_state = GetEventState(event);
2154 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
2155 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
2156 eventMap.erase(event);
2157}
2158
2159void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2160 const VkAllocationCallbacks *pAllocator) {
2161 if (!queryPool) return;
2162 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
2163 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
2164 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002165 qp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002166 queryPoolMap.erase(queryPool);
2167}
2168
2169// Object with given handle is being bound to memory w/ given mem_info struct.
2170// Track the newly bound memory range with given memoryOffset
2171// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
2172// and non-linear range incorrectly overlap.
locke-lunargd556cc32019-09-17 01:21:23 -06002173void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002174 VkDeviceSize memoryOffset) {
locke-lunargd556cc32019-09-17 01:21:23 -06002175 if (typed_handle.type == kVulkanObjectTypeImage) {
2176 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
2177 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002178 mem_info->bound_buffers.insert(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002179 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002180 mem_info->bound_acceleration_structures.insert(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002181 } else {
2182 // Unsupported object type
2183 assert(false);
2184 }
2185}
2186
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002187void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2188 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002189}
2190
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002191void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2192 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002193}
2194
2195void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002196 VkDeviceSize mem_offset) {
2197 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002198}
2199
2200// This function will remove the handle-to-index mapping from the appropriate map.
2201static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
2202 if (typed_handle.type == kVulkanObjectTypeImage) {
2203 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
2204 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002205 mem_info->bound_buffers.erase(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002206 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002207 mem_info->bound_acceleration_structures.erase(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002208 } else {
2209 // Unsupported object type
2210 assert(false);
2211 }
2212}
2213
2214void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
2215 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
2216}
2217
2218void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
2219 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
2220}
2221
2222void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
2223 RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
2224}
2225
2226void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2227 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2228 if (buffer_state) {
2229 // Track bound memory range information
2230 auto mem_info = GetDevMemState(mem);
2231 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002232 InsertBufferMemoryRange(buffer, mem_info, memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06002233 }
2234 // Track objects tied to memory
2235 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
2236 }
2237}
2238
2239void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2240 VkDeviceSize memoryOffset, VkResult result) {
2241 if (VK_SUCCESS != result) return;
2242 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2243}
2244
2245void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
2246 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2247 for (uint32_t i = 0; i < bindInfoCount; i++) {
2248 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2249 }
2250}
2251
2252void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
2253 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2254 for (uint32_t i = 0; i < bindInfoCount; i++) {
2255 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2256 }
2257}
2258
Spencer Fricke6c127102020-04-16 06:25:20 -07002259void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer) {
locke-lunargd556cc32019-09-17 01:21:23 -06002260 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2261 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002262 buffer_state->memory_requirements_checked = true;
2263 }
2264}
2265
2266void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2267 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002268 RecordGetBufferMemoryRequirementsState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002269}
2270
2271void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
2272 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2273 VkMemoryRequirements2KHR *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002274 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002275}
2276
2277void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
2278 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2279 VkMemoryRequirements2KHR *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002280 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002281}
2282
Spencer Fricke6c127102020-04-16 06:25:20 -07002283void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002284 const VkImagePlaneMemoryRequirementsInfo *plane_info =
2285 (pInfo == nullptr) ? nullptr : lvl_find_in_chain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002286 IMAGE_STATE *image_state = GetImageState(image);
2287 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002288 if (plane_info != nullptr) {
2289 // Multi-plane image
2290 image_state->memory_requirements_checked = false; // Each image plane needs to be checked itself
2291 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
2292 image_state->plane0_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002293 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
2294 image_state->plane1_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002295 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
2296 image_state->plane2_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002297 }
2298 } else {
2299 // Single Plane image
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002300 image_state->memory_requirements_checked = true;
2301 }
locke-lunargd556cc32019-09-17 01:21:23 -06002302 }
2303}
2304
2305void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2306 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002307 RecordGetImageMemoryRequirementsState(image, nullptr);
locke-lunargd556cc32019-09-17 01:21:23 -06002308}
2309
2310void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2311 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002312 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002313}
2314
2315void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2316 const VkImageMemoryRequirementsInfo2 *pInfo,
2317 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002318 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002319}
2320
2321static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2322 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2323 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2324 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2325 image_state->sparse_metadata_required = true;
2326 }
2327}
2328
2329void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2330 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2331 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2332 auto image_state = GetImageState(image);
2333 image_state->get_sparse_reqs_called = true;
2334 if (!pSparseMemoryRequirements) return;
2335 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2336 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2337 }
2338}
2339
2340void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
2341 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2342 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2343 auto image_state = GetImageState(pInfo->image);
2344 image_state->get_sparse_reqs_called = true;
2345 if (!pSparseMemoryRequirements) return;
2346 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2347 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2348 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2349 }
2350}
2351
2352void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
2353 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2354 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2355 auto image_state = GetImageState(pInfo->image);
2356 image_state->get_sparse_reqs_called = true;
2357 if (!pSparseMemoryRequirements) return;
2358 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2359 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2360 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2361 }
2362}
2363
2364void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2365 const VkAllocationCallbacks *pAllocator) {
2366 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002367 auto shader_module_state = GetShaderModuleState(shaderModule);
2368 shader_module_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002369 shaderModuleMap.erase(shaderModule);
2370}
2371
2372void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2373 const VkAllocationCallbacks *pAllocator) {
2374 if (!pipeline) return;
2375 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
2376 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
2377 // Any bound cmd buffers are now invalid
2378 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002379 pipeline_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002380 pipelineMap.erase(pipeline);
2381}
2382
2383void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2384 const VkAllocationCallbacks *pAllocator) {
2385 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002386 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
2387 pipeline_layout_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002388 pipelineLayoutMap.erase(pipelineLayout);
2389}
2390
2391void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2392 const VkAllocationCallbacks *pAllocator) {
2393 if (!sampler) return;
2394 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
2395 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
2396 // Any bound cmd buffers are now invalid
2397 if (sampler_state) {
2398 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
2399 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002400 sampler_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002401 samplerMap.erase(sampler);
2402}
2403
2404void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2405 const VkAllocationCallbacks *pAllocator) {
2406 if (!descriptorSetLayout) return;
2407 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2408 if (layout_it != descriptorSetLayoutMap.end()) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002409 layout_it->second.get()->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002410 descriptorSetLayoutMap.erase(layout_it);
2411 }
2412}
2413
2414void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2415 const VkAllocationCallbacks *pAllocator) {
2416 if (!descriptorPool) return;
2417 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
2418 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
2419 if (desc_pool_state) {
2420 // Any bound cmd buffers are now invalid
2421 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
2422 // Free sets that were in this pool
2423 for (auto ds : desc_pool_state->sets) {
2424 FreeDescriptorSet(ds);
2425 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002426 desc_pool_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002427 descriptorPoolMap.erase(descriptorPool);
2428 }
2429}
2430
2431// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2432void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2433 const VkCommandBuffer *command_buffers) {
2434 for (uint32_t i = 0; i < command_buffer_count; i++) {
2435 auto cb_state = GetCBState(command_buffers[i]);
2436 // Remove references to command buffer's state and delete
2437 if (cb_state) {
2438 // reset prior to delete, removing various references to it.
2439 // TODO: fix this, it's insane.
2440 ResetCommandBufferState(cb_state->commandBuffer);
2441 // Remove the cb_state's references from COMMAND_POOL_STATEs
2442 pool_state->commandBuffers.erase(command_buffers[i]);
2443 // Remove the cb debug labels
2444 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
2445 // Remove CBState from CB map
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002446 cb_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002447 commandBufferMap.erase(cb_state->commandBuffer);
2448 }
2449 }
2450}
2451
2452void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2453 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
2454 auto pPool = GetCommandPoolState(commandPool);
2455 FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
2456}
2457
2458void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2459 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2460 VkResult result) {
2461 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002462 auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002463 cmd_pool_state->createFlags = pCreateInfo->flags;
2464 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
2465 commandPoolMap[*pCommandPool] = std::move(cmd_pool_state);
2466}
2467
2468void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2469 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2470 VkResult result) {
2471 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002472 auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002473 query_pool_state->createInfo = *pCreateInfo;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002474 query_pool_state->pool = *pQueryPool;
2475 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
2476 const auto *perf = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
2477 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
2478
2479 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2480 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2481 switch (counter.scope) {
2482 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
2483 query_pool_state->has_perf_scope_command_buffer = true;
2484 break;
2485 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
2486 query_pool_state->has_perf_scope_render_pass = true;
2487 break;
2488 default:
2489 break;
2490 }
2491 }
2492
2493 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
2494 &query_pool_state->n_performance_passes);
2495 }
2496
locke-lunargd556cc32019-09-17 01:21:23 -06002497 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
2498
2499 QueryObject query_obj{*pQueryPool, 0u};
2500 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2501 query_obj.query = i;
2502 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2503 }
2504}
2505
2506void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2507 const VkAllocationCallbacks *pAllocator) {
2508 if (!commandPool) return;
2509 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2510 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2511 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2512 if (cp_state) {
2513 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2514 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2515 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002516 cp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002517 commandPoolMap.erase(commandPool);
2518 }
2519}
2520
2521void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2522 VkCommandPoolResetFlags flags, VkResult result) {
2523 if (VK_SUCCESS != result) return;
2524 // Reset all of the CBs allocated from this pool
2525 auto command_pool_state = GetCommandPoolState(commandPool);
2526 for (auto cmdBuffer : command_pool_state->commandBuffers) {
2527 ResetCommandBufferState(cmdBuffer);
2528 }
2529}
2530
2531void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2532 VkResult result) {
2533 for (uint32_t i = 0; i < fenceCount; ++i) {
2534 auto pFence = GetFenceState(pFences[i]);
2535 if (pFence) {
2536 if (pFence->scope == kSyncScopeInternal) {
2537 pFence->state = FENCE_UNSIGNALED;
2538 } else if (pFence->scope == kSyncScopeExternalTemporary) {
2539 pFence->scope = kSyncScopeInternal;
2540 }
2541 }
2542 }
2543}
2544
Jeff Bolzadbfa852019-10-04 13:53:30 -05002545// For given cb_nodes, invalidate them and track object causing invalidation.
2546// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
2547// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
2548// can also unlink objects from command buffers.
2549void ValidationStateTracker::InvalidateCommandBuffers(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_nodes,
2550 const VulkanTypedHandle &obj, bool unlink) {
2551 for (const auto &cb_node_pair : cb_nodes) {
2552 auto &cb_node = cb_node_pair.first;
2553 if (cb_node->state == CB_RECORDING) {
2554 cb_node->state = CB_INVALID_INCOMPLETE;
2555 } else if (cb_node->state == CB_RECORDED) {
2556 cb_node->state = CB_INVALID_COMPLETE;
2557 }
2558 cb_node->broken_bindings.push_back(obj);
2559
2560 // if secondary, then propagate the invalidation to the primaries that will call us.
2561 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2562 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
2563 }
2564 if (unlink) {
2565 int index = cb_node_pair.second;
2566 assert(cb_node->object_bindings[index] == obj);
2567 cb_node->object_bindings[index] = VulkanTypedHandle();
2568 }
2569 }
2570 if (unlink) {
2571 cb_nodes.clear();
2572 }
2573}
2574
2575void ValidationStateTracker::InvalidateLinkedCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
2576 const VulkanTypedHandle &obj) {
locke-lunargd556cc32019-09-17 01:21:23 -06002577 for (auto cb_node : cb_nodes) {
2578 if (cb_node->state == CB_RECORDING) {
2579 cb_node->state = CB_INVALID_INCOMPLETE;
2580 } else if (cb_node->state == CB_RECORDED) {
2581 cb_node->state = CB_INVALID_COMPLETE;
2582 }
2583 cb_node->broken_bindings.push_back(obj);
2584
2585 // if secondary, then propagate the invalidation to the primaries that will call us.
2586 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002587 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
locke-lunargd556cc32019-09-17 01:21:23 -06002588 }
2589 }
2590}
2591
2592void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2593 const VkAllocationCallbacks *pAllocator) {
2594 if (!framebuffer) return;
2595 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
2596 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
2597 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002598 framebuffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002599 frameBufferMap.erase(framebuffer);
2600}
2601
2602void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2603 const VkAllocationCallbacks *pAllocator) {
2604 if (!renderPass) return;
2605 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
2606 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
2607 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002608 rp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002609 renderPassMap.erase(renderPass);
2610}
2611
2612void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2613 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2614 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002615 auto fence_state = std::make_shared<FENCE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002616 fence_state->fence = *pFence;
2617 fence_state->createInfo = *pCreateInfo;
2618 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
2619 fenceMap[*pFence] = std::move(fence_state);
2620}
2621
2622bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2623 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2624 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002625 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002626 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2627 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2628 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2629 cgpl_state->pipe_state.reserve(count);
2630 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002631 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002632 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002633 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002634 }
2635 return false;
2636}
2637
2638void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2639 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2640 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2641 VkResult result, void *cgpl_state_data) {
2642 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2643 // This API may create pipelines regardless of the return value
2644 for (uint32_t i = 0; i < count; i++) {
2645 if (pPipelines[i] != VK_NULL_HANDLE) {
2646 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2647 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2648 }
2649 }
2650 cgpl_state->pipe_state.clear();
2651}
2652
2653bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2654 const VkComputePipelineCreateInfo *pCreateInfos,
2655 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002656 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002657 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2658 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2659 ccpl_state->pipe_state.reserve(count);
2660 for (uint32_t i = 0; i < count; i++) {
2661 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002662 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002663 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002664 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002665 }
2666 return false;
2667}
2668
2669void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2670 const VkComputePipelineCreateInfo *pCreateInfos,
2671 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2672 VkResult result, void *ccpl_state_data) {
2673 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2674
2675 // This API may create pipelines regardless of the return value
2676 for (uint32_t i = 0; i < count; i++) {
2677 if (pPipelines[i] != VK_NULL_HANDLE) {
2678 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2679 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2680 }
2681 }
2682 ccpl_state->pipe_state.clear();
2683}
2684
2685bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2686 uint32_t count,
2687 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2688 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002689 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002690 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2691 crtpl_state->pipe_state.reserve(count);
2692 for (uint32_t i = 0; i < count; i++) {
2693 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002694 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002695 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002696 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002697 }
2698 return false;
2699}
2700
2701void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2702 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2703 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2704 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2705 // This API may create pipelines regardless of the return value
2706 for (uint32_t i = 0; i < count; i++) {
2707 if (pPipelines[i] != VK_NULL_HANDLE) {
2708 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2709 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2710 }
2711 }
2712 crtpl_state->pipe_state.clear();
2713}
2714
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002715bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkPipelineCache pipelineCache,
2716 uint32_t count,
2717 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2718 const VkAllocationCallbacks *pAllocator,
2719 VkPipeline *pPipelines, void *crtpl_state_data) const {
2720 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2721 crtpl_state->pipe_state.reserve(count);
2722 for (uint32_t i = 0; i < count; i++) {
2723 // Create and initialize internal tracking data structure
2724 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
2725 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
2726 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
2727 }
2728 return false;
2729}
2730
2731void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(
2732 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2733 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2734 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2735 // This API may create pipelines regardless of the return value
2736 for (uint32_t i = 0; i < count; i++) {
2737 if (pPipelines[i] != VK_NULL_HANDLE) {
2738 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2739 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2740 }
2741 }
2742 crtpl_state->pipe_state.clear();
2743}
2744
locke-lunargd556cc32019-09-17 01:21:23 -06002745void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2746 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2747 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002748 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002749}
2750
2751void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2752 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2753 const VkAllocationCallbacks *pAllocator,
2754 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2755 if (VK_SUCCESS != result) return;
2756 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2757}
2758
2759// For repeatable sorting, not very useful for "memory in range" search
2760struct PushConstantRangeCompare {
2761 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2762 if (lhs->offset == rhs->offset) {
2763 if (lhs->size == rhs->size) {
2764 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2765 return lhs->stageFlags < rhs->stageFlags;
2766 }
2767 // If the offsets are the same then sorting by the end of range is useful for validation
2768 return lhs->size < rhs->size;
2769 }
2770 return lhs->offset < rhs->offset;
2771 }
2772};
2773
2774static PushConstantRangesDict push_constant_ranges_dict;
2775
2776PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2777 if (!info->pPushConstantRanges) {
2778 // Hand back the empty entry (creating as needed)...
2779 return push_constant_ranges_dict.look_up(PushConstantRanges());
2780 }
2781
2782 // Sort the input ranges to ensure equivalent ranges map to the same id
2783 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2784 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2785 sorted.insert(info->pPushConstantRanges + i);
2786 }
2787
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07002788 PushConstantRanges ranges;
2789 ranges.reserve(sorted.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002790 for (const auto range : sorted) {
2791 ranges.emplace_back(*range);
2792 }
2793 return push_constant_ranges_dict.look_up(std::move(ranges));
2794}
2795
2796// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2797static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2798
2799// Dictionary of canonical form of the "compatible for set" records
2800static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2801
2802static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2803 const PipelineLayoutSetLayoutsId set_layouts_id) {
2804 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2805}
2806
2807void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2808 const VkAllocationCallbacks *pAllocator,
2809 VkPipelineLayout *pPipelineLayout, VkResult result) {
2810 if (VK_SUCCESS != result) return;
2811
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002812 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002813 pipeline_layout_state->layout = *pPipelineLayout;
2814 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2815 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2816 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002817 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002818 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2819 }
2820
2821 // Get canonical form IDs for the "compatible for set" contents
2822 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
2823 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
2824 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
2825
2826 // Create table of "compatible for set N" cannonical forms for trivial accept validation
2827 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2828 pipeline_layout_state->compat_for_set.emplace_back(
2829 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
2830 }
2831 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
2832}
2833
2834void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
2835 const VkAllocationCallbacks *pAllocator,
2836 VkDescriptorPool *pDescriptorPool, VkResult result) {
2837 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002838 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002839}
2840
2841void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2842 VkDescriptorPoolResetFlags flags, VkResult result) {
2843 if (VK_SUCCESS != result) return;
2844 DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
2845 // TODO: validate flags
2846 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
2847 for (auto ds : pPool->sets) {
2848 FreeDescriptorSet(ds);
2849 }
2850 pPool->sets.clear();
2851 // Reset available count for each type and available sets for this pool
2852 for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
2853 pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
2854 }
2855 pPool->availableSets = pPool->maxSets;
2856}
2857
2858bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
2859 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002860 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002861 // Always update common data
2862 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2863 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2864 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
2865
2866 return false;
2867}
2868
2869// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
2870void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
2871 VkDescriptorSet *pDescriptorSets, VkResult result,
2872 void *ads_state_data) {
2873 if (VK_SUCCESS != result) return;
2874 // All the updates are contained in a single cvdescriptorset function
2875 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2876 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2877 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
2878}
2879
2880void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
2881 const VkDescriptorSet *pDescriptorSets) {
2882 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
2883 // Update available descriptor sets in pool
2884 pool_state->availableSets += count;
2885
2886 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
2887 for (uint32_t i = 0; i < count; ++i) {
2888 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
2889 auto descriptor_set = setMap[pDescriptorSets[i]].get();
2890 uint32_t type_index = 0, descriptor_count = 0;
2891 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
2892 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
2893 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
2894 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
2895 }
2896 FreeDescriptorSet(descriptor_set);
2897 pool_state->sets.erase(descriptor_set);
2898 }
2899 }
2900}
2901
2902void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
2903 const VkWriteDescriptorSet *pDescriptorWrites,
2904 uint32_t descriptorCopyCount,
2905 const VkCopyDescriptorSet *pDescriptorCopies) {
2906 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
2907 pDescriptorCopies);
2908}
2909
2910void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
2911 VkCommandBuffer *pCommandBuffer, VkResult result) {
2912 if (VK_SUCCESS != result) return;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002913 auto pPool = GetCommandPoolShared(pCreateInfo->commandPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002914 if (pPool) {
2915 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
2916 // Add command buffer to its commandPool map
2917 pPool->commandBuffers.insert(pCommandBuffer[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002918 auto pCB = std::make_shared<CMD_BUFFER_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002919 pCB->createInfo = *pCreateInfo;
2920 pCB->device = device;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002921 pCB->command_pool = pPool;
locke-lunargd556cc32019-09-17 01:21:23 -06002922 // Add command buffer to map
2923 commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
2924 ResetCommandBufferState(pCommandBuffer[i]);
2925 }
2926 }
2927}
2928
2929// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
2930void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002931 AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
locke-lunargd556cc32019-09-17 01:21:23 -06002932 cb_state);
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -07002933 // If imageless fb, skip fb binding
2934 if (fb_state->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002935 const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
2936 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
2937 auto view_state = GetAttachmentImageViewState(fb_state, attachment);
2938 if (view_state) {
2939 AddCommandBufferBindingImageView(cb_state, view_state);
2940 }
2941 }
2942}
2943
2944void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
2945 const VkCommandBufferBeginInfo *pBeginInfo) {
2946 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2947 if (!cb_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002948 if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
2949 // Secondary Command Buffer
2950 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
2951 if (pInfo) {
2952 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
2953 assert(pInfo->renderPass);
2954 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
2955 if (framebuffer) {
2956 // Connect this framebuffer and its children to this cmdBuffer
2957 AddFramebufferBinding(cb_state, framebuffer);
2958 }
2959 }
2960 }
2961 }
2962 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
2963 ResetCommandBufferState(commandBuffer);
2964 }
2965 // Set updated state here in case implicit reset occurs above
2966 cb_state->state = CB_RECORDING;
2967 cb_state->beginInfo = *pBeginInfo;
2968 if (cb_state->beginInfo.pInheritanceInfo) {
2969 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
2970 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
2971 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
2972 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
2973 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
2974 cb_state->activeRenderPass = GetRenderPassState(cb_state->beginInfo.pInheritanceInfo->renderPass);
2975 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
2976 cb_state->activeFramebuffer = cb_state->beginInfo.pInheritanceInfo->framebuffer;
2977 cb_state->framebuffers.insert(cb_state->beginInfo.pInheritanceInfo->framebuffer);
2978 }
2979 }
2980
2981 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
2982 if (chained_device_group_struct) {
2983 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
2984 } else {
2985 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
2986 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002987
2988 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06002989}
2990
2991void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
2992 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2993 if (!cb_state) return;
2994 // Cached validation is specific to a specific recording of a specific command buffer.
2995 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
2996 descriptor_set->ClearCachedValidation(cb_state);
2997 }
2998 cb_state->validated_descriptor_sets.clear();
2999 if (VK_SUCCESS == result) {
3000 cb_state->state = CB_RECORDED;
3001 }
3002}
3003
3004void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
3005 VkResult result) {
3006 if (VK_SUCCESS == result) {
3007 ResetCommandBufferState(commandBuffer);
3008 }
3009}
3010
3011CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
3012 // initially assume everything is static state
3013 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
3014
3015 if (ds) {
3016 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
3017 switch (ds->pDynamicStates[i]) {
3018 case VK_DYNAMIC_STATE_LINE_WIDTH:
3019 flags &= ~CBSTATUS_LINE_WIDTH_SET;
3020 break;
3021 case VK_DYNAMIC_STATE_DEPTH_BIAS:
3022 flags &= ~CBSTATUS_DEPTH_BIAS_SET;
3023 break;
3024 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
3025 flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
3026 break;
3027 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
3028 flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
3029 break;
3030 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
3031 flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
3032 break;
3033 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
3034 flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
3035 break;
3036 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
3037 flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
3038 break;
3039 case VK_DYNAMIC_STATE_SCISSOR:
3040 flags &= ~CBSTATUS_SCISSOR_SET;
3041 break;
3042 case VK_DYNAMIC_STATE_VIEWPORT:
3043 flags &= ~CBSTATUS_VIEWPORT_SET;
3044 break;
3045 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
3046 flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
3047 break;
3048 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
3049 flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
3050 break;
3051 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
3052 flags &= ~CBSTATUS_LINE_STIPPLE_SET;
3053 break;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003054 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
3055 flags &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
3056 break;
locke-lunargd556cc32019-09-17 01:21:23 -06003057 default:
3058 break;
3059 }
3060 }
3061 }
3062
3063 return flags;
3064}
3065
3066// Validation cache:
3067// CV is the bottommost implementor of this extension. Don't pass calls down.
3068// utility function to set collective state for pipeline
3069void SetPipelineState(PIPELINE_STATE *pPipe) {
3070 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
3071 if (pPipe->graphicsPipelineCI.pColorBlendState) {
3072 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
3073 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
3074 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3075 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3076 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3077 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3078 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3079 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3080 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3081 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
3082 pPipe->blendConstantsEnabled = true;
3083 }
3084 }
3085 }
3086 }
3087}
3088
3089void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
3090 VkPipeline pipeline) {
3091 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3092 assert(cb_state);
3093
3094 auto pipe_state = GetPipelineState(pipeline);
3095 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
3096 cb_state->status &= ~cb_state->static_status;
3097 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
3098 cb_state->status |= cb_state->static_status;
3099 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003100 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout->layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003101 cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
3102 SetPipelineState(pipe_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003103 AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003104}
3105
3106void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3107 uint32_t viewportCount, const VkViewport *pViewports) {
3108 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3109 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
3110 cb_state->status |= CBSTATUS_VIEWPORT_SET;
3111}
3112
3113void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
3114 uint32_t exclusiveScissorCount,
3115 const VkRect2D *pExclusiveScissors) {
3116 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3117 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
3118 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
3119 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
3120}
3121
3122void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3123 VkImageLayout imageLayout) {
3124 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3125
3126 if (imageView != VK_NULL_HANDLE) {
3127 auto view_state = GetImageViewState(imageView);
3128 AddCommandBufferBindingImageView(cb_state, view_state);
3129 }
3130}
3131
3132void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3133 uint32_t viewportCount,
3134 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3135 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3136 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3137 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3138 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
3139}
3140
3141void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3142 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3143 const VkAllocationCallbacks *pAllocator,
3144 VkAccelerationStructureNV *pAccelerationStructure,
3145 VkResult result) {
3146 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003147 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003148
3149 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3150 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
3151 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3152 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
3153 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3154 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3155
3156 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
3157 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3158 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
3159 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3160 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3161 &as_state->build_scratch_memory_requirements);
3162
3163 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
3164 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3165 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
3166 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3167 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3168 &as_state->update_scratch_memory_requirements);
3169
3170 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3171}
3172
Jeff Bolz95176d02020-04-01 00:36:16 -05003173void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3174 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3175 const VkAllocationCallbacks *pAllocator,
3176 VkAccelerationStructureKHR *pAccelerationStructure,
3177 VkResult result) {
3178 if (VK_SUCCESS != result) return;
3179 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
3180
3181 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3182 VkAccelerationStructureMemoryRequirementsInfoKHR as_memory_requirements_info = {};
3183 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3184 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR;
3185 as_memory_requirements_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3186 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3187 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &as_memory_requirements_info, &as_state->memory_requirements);
3188
3189 VkAccelerationStructureMemoryRequirementsInfoKHR scratch_memory_req_info = {};
3190 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3191 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR;
3192 scratch_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3193 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3194 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &scratch_memory_req_info,
3195 &as_state->build_scratch_memory_requirements);
3196
3197 VkAccelerationStructureMemoryRequirementsInfoKHR update_memory_req_info = {};
3198 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3199 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR;
3200 update_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3201 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3202 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &update_memory_req_info,
3203 &as_state->update_scratch_memory_requirements);
3204
3205 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3206}
3207
locke-lunargd556cc32019-09-17 01:21:23 -06003208void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
3209 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
3210 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
3211 if (as_state != nullptr) {
3212 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
3213 as_state->memory_requirements = *pMemoryRequirements;
3214 as_state->memory_requirements_checked = true;
3215 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
3216 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
3217 as_state->build_scratch_memory_requirements_checked = true;
3218 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
3219 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
3220 as_state->update_scratch_memory_requirements_checked = true;
3221 }
3222 }
3223}
3224
Jeff Bolz95176d02020-04-01 00:36:16 -05003225void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryCommon(
3226 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result,
3227 bool isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003228 if (VK_SUCCESS != result) return;
3229 for (uint32_t i = 0; i < bindInfoCount; i++) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003230 const VkBindAccelerationStructureMemoryInfoKHR &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06003231
3232 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
3233 if (as_state) {
3234 // Track bound memory range information
3235 auto mem_info = GetDevMemState(info.memory);
3236 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07003237 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06003238 }
3239 // Track objects tied to memory
3240 SetMemBinding(info.memory, as_state, info.memoryOffset,
Jeff Bolz95176d02020-04-01 00:36:16 -05003241 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR));
locke-lunargd556cc32019-09-17 01:21:23 -06003242
3243 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05003244 // XXX TODO: Query device address for KHR extension
3245 if (enabled.gpu_validation && isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003246 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
3247 }
3248 }
3249 }
3250}
3251
Jeff Bolz95176d02020-04-01 00:36:16 -05003252void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
3253 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
3254 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, true);
3255}
3256
3257void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryKHR(
3258 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result) {
3259 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, false);
3260}
3261
locke-lunargd556cc32019-09-17 01:21:23 -06003262void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
3263 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
3264 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
3265 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3266 if (cb_state == nullptr) {
3267 return;
3268 }
3269
3270 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3271 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3272 if (dst_as_state != nullptr) {
3273 dst_as_state->built = true;
3274 dst_as_state->build_info.initialize(pInfo);
3275 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3276 }
3277 if (src_as_state != nullptr) {
3278 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3279 }
3280 cb_state->hasBuildAccelerationStructureCmd = true;
3281}
3282
3283void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
3284 VkAccelerationStructureNV dst,
3285 VkAccelerationStructureNV src,
3286 VkCopyAccelerationStructureModeNV mode) {
3287 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3288 if (cb_state) {
3289 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3290 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3291 if (dst_as_state != nullptr && src_as_state != nullptr) {
3292 dst_as_state->built = true;
3293 dst_as_state->build_info = src_as_state->build_info;
3294 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3295 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3296 }
3297 }
3298}
3299
Jeff Bolz95176d02020-04-01 00:36:16 -05003300void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
3301 VkAccelerationStructureKHR accelerationStructure,
3302 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06003303 if (!accelerationStructure) return;
3304 auto *as_state = GetAccelerationStructureState(accelerationStructure);
3305 if (as_state) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003306 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06003307 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
3308 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07003309 RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -06003310 }
3311 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003312 as_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003313 accelerationStructureMap.erase(accelerationStructure);
3314 }
3315}
3316
Jeff Bolz95176d02020-04-01 00:36:16 -05003317void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
3318 VkAccelerationStructureNV accelerationStructure,
3319 const VkAllocationCallbacks *pAllocator) {
3320 PreCallRecordDestroyAccelerationStructureKHR(device, accelerationStructure, pAllocator);
3321}
3322
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003323void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3324 uint32_t viewportCount,
3325 const VkViewportWScalingNV *pViewportWScalings) {
3326 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3327 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
3328}
3329
locke-lunargd556cc32019-09-17 01:21:23 -06003330void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
3331 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3332 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
3333}
3334
3335void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
3336 uint16_t lineStipplePattern) {
3337 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3338 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
3339}
3340
3341void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
3342 float depthBiasClamp, float depthBiasSlopeFactor) {
3343 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3344 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
3345}
3346
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003347void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
3348 const VkRect2D *pScissors) {
3349 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3350 cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
3351 cb_state->status |= CBSTATUS_SCISSOR_SET;
3352}
3353
locke-lunargd556cc32019-09-17 01:21:23 -06003354void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
3355 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3356 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
3357}
3358
3359void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
3360 float maxDepthBounds) {
3361 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3362 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
3363}
3364
3365void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3366 uint32_t compareMask) {
3367 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3368 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
3369}
3370
3371void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3372 uint32_t writeMask) {
3373 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3374 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
3375}
3376
3377void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3378 uint32_t reference) {
3379 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3380 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
3381}
3382
3383// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
3384// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
3385// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
3386void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
3387 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
3388 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
3389 cvdescriptorset::DescriptorSet *push_descriptor_set,
3390 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
3391 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
3392 // Defensive
3393 assert(pipeline_layout);
3394 if (!pipeline_layout) return;
3395
3396 uint32_t required_size = first_set + set_count;
3397 const uint32_t last_binding_index = required_size - 1;
3398 assert(last_binding_index < pipeline_layout->compat_for_set.size());
3399
3400 // Some useful shorthand
3401 auto &last_bound = cb_state->lastBound[pipeline_bind_point];
3402 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
3403 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
3404
3405 // We need this three times in this function, but nowhere else
3406 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
3407 if (ds && ds->IsPushDescriptor()) {
3408 assert(ds == last_bound.push_descriptor_set.get());
3409 last_bound.push_descriptor_set = nullptr;
3410 return true;
3411 }
3412 return false;
3413 };
3414
3415 // Clean up the "disturbed" before and after the range to be set
3416 if (required_size < current_size) {
3417 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
3418 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
3419 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
3420 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
3421 }
3422 } else {
3423 // We're not disturbing past last, so leave the upper binding data alone.
3424 required_size = current_size;
3425 }
3426 }
3427
3428 // We resize if we need more set entries or if those past "last" are disturbed
3429 if (required_size != current_size) {
3430 last_bound.per_set.resize(required_size);
3431 }
3432
3433 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
3434 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
3435 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
3436 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3437 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
3438 last_bound.per_set[set_idx].dynamicOffsets.clear();
3439 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
3440 }
3441 }
3442
3443 // Now update the bound sets with the input sets
3444 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
3445 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
3446 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
3447 cvdescriptorset::DescriptorSet *descriptor_set =
3448 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
3449
3450 // Record binding (or push)
3451 if (descriptor_set != last_bound.push_descriptor_set.get()) {
3452 // Only cleanup the push descriptors if they aren't the currently used set.
3453 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3454 }
3455 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
3456 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
3457
3458 if (descriptor_set) {
3459 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
3460 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
3461 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
3462 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
3463 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
3464 input_dynamic_offsets = end_offset;
3465 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
3466 } else {
3467 last_bound.per_set[set_idx].dynamicOffsets.clear();
3468 }
3469 if (!descriptor_set->IsPushDescriptor()) {
3470 // Can't cache validation of push_descriptors
3471 cb_state->validated_descriptor_sets.insert(descriptor_set);
3472 }
3473 }
3474 }
3475}
3476
3477// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
3478void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
3479 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3480 uint32_t firstSet, uint32_t setCount,
3481 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
3482 const uint32_t *pDynamicOffsets) {
3483 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3484 auto pipeline_layout = GetPipelineLayout(layout);
3485
3486 // Resize binding arrays
3487 uint32_t last_set_index = firstSet + setCount - 1;
3488 if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
3489 cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
3490 }
3491
3492 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
3493 dynamicOffsetCount, pDynamicOffsets);
3494 cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
3495 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3496}
3497
3498void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
3499 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
3500 const VkWriteDescriptorSet *pDescriptorWrites) {
3501 const auto &pipeline_layout = GetPipelineLayout(layout);
3502 // Short circuit invalid updates
3503 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
3504 !pipeline_layout->set_layouts[set]->IsPushDescriptor())
3505 return;
3506
3507 // We need a descriptor set to update the bindings with, compatible with the passed layout
3508 const auto dsl = pipeline_layout->set_layouts[set];
3509 auto &last_bound = cb_state->lastBound[pipelineBindPoint];
3510 auto &push_descriptor_set = last_bound.push_descriptor_set;
3511 // If we are disturbing the current push_desriptor_set clear it
3512 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
John Zulaufd2c3dae2019-12-12 11:02:17 -07003513 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003514 }
3515
3516 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3517 nullptr);
3518 last_bound.pipeline_layout = layout;
3519
3520 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003521 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003522}
3523
3524void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3525 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3526 uint32_t set, uint32_t descriptorWriteCount,
3527 const VkWriteDescriptorSet *pDescriptorWrites) {
3528 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3529 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3530}
3531
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003532void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3533 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3534 const void *pValues) {
3535 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3536 if (cb_state != nullptr) {
3537 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3538
3539 auto &push_constant_data = cb_state->push_constant_data;
3540 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3541 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
3542 }
3543}
3544
locke-lunargd556cc32019-09-17 01:21:23 -06003545void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3546 VkIndexType indexType) {
3547 auto buffer_state = GetBufferState(buffer);
3548 auto cb_state = GetCBState(commandBuffer);
3549
3550 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
3551 cb_state->index_buffer_binding.buffer = buffer;
3552 cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
3553 cb_state->index_buffer_binding.offset = offset;
3554 cb_state->index_buffer_binding.index_type = indexType;
3555 // Add binding for this index buffer to this commandbuffer
3556 AddCommandBufferBindingBuffer(cb_state, buffer_state);
3557}
3558
3559void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3560 uint32_t bindingCount, const VkBuffer *pBuffers,
3561 const VkDeviceSize *pOffsets) {
3562 auto cb_state = GetCBState(commandBuffer);
3563
3564 uint32_t end = firstBinding + bindingCount;
3565 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3566 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3567 }
3568
3569 for (uint32_t i = 0; i < bindingCount; ++i) {
3570 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
3571 vertex_buffer_binding.buffer = pBuffers[i];
3572 vertex_buffer_binding.offset = pOffsets[i];
3573 // Add binding for this vertex buffer to this commandbuffer
3574 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
3575 }
3576}
3577
3578void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3579 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
3580 auto cb_state = GetCBState(commandBuffer);
3581 auto dst_buffer_state = GetBufferState(dstBuffer);
3582
3583 // Update bindings between buffer and cmd buffer
3584 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
3585}
3586
Jeff Bolz310775c2019-10-09 00:46:33 -05003587bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags stageMask,
3588 EventToStageMap *localEventToStageMap) {
3589 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003590 return false;
3591}
3592
3593void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3594 VkPipelineStageFlags stageMask) {
3595 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3596 auto event_state = GetEventState(event);
3597 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003598 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003599 }
3600 cb_state->events.push_back(event);
3601 if (!cb_state->waitedEvents.count(event)) {
3602 cb_state->writeEventsBeforeWait.push_back(event);
3603 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003604 cb_state->eventUpdates.emplace_back(
3605 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3606 return SetEventStageMask(event, stageMask, localEventToStageMap);
3607 });
locke-lunargd556cc32019-09-17 01:21:23 -06003608}
3609
3610void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3611 VkPipelineStageFlags stageMask) {
3612 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3613 auto event_state = GetEventState(event);
3614 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003615 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003616 }
3617 cb_state->events.push_back(event);
3618 if (!cb_state->waitedEvents.count(event)) {
3619 cb_state->writeEventsBeforeWait.push_back(event);
3620 }
3621
3622 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003623 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
3624 return SetEventStageMask(event, VkPipelineStageFlags(0), localEventToStageMap);
3625 });
locke-lunargd556cc32019-09-17 01:21:23 -06003626}
3627
3628void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3629 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3630 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3631 uint32_t bufferMemoryBarrierCount,
3632 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3633 uint32_t imageMemoryBarrierCount,
3634 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3635 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3636 for (uint32_t i = 0; i < eventCount; ++i) {
3637 auto event_state = GetEventState(pEvents[i]);
3638 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003639 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
3640 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003641 }
3642 cb_state->waitedEvents.insert(pEvents[i]);
3643 cb_state->events.push_back(pEvents[i]);
3644 }
3645}
3646
Jeff Bolz310775c2019-10-09 00:46:33 -05003647bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
3648 (*localQueryToStateMap)[object] = value;
3649 return false;
3650}
3651
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003652bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
3653 QueryState value, QueryMap *localQueryToStateMap) {
Jeff Bolz310775c2019-10-09 00:46:33 -05003654 for (uint32_t i = 0; i < queryCount; i++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003655 QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
Jeff Bolz310775c2019-10-09 00:46:33 -05003656 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06003657 }
3658 return false;
3659}
3660
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003661QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex,
3662 uint32_t perfPass) const {
3663 QueryObject query = QueryObject(QueryObject(queryPool, queryIndex), perfPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003664
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003665 auto iter = localQueryToStateMap->find(query);
3666 if (iter != localQueryToStateMap->end()) return iter->second;
Jeff Bolz310775c2019-10-09 00:46:33 -05003667
Jeff Bolz310775c2019-10-09 00:46:33 -05003668 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06003669}
3670
3671void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003672 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003673 cb_state->activeQueries.insert(query_obj);
3674 cb_state->startedQueries.insert(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003675 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3676 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3677 QueryMap *localQueryToStateMap) {
3678 SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
3679 return false;
3680 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003681 auto pool_state = GetQueryPoolState(query_obj.pool);
3682 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3683 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003684}
3685
3686void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3687 VkFlags flags) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003688 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003689 QueryObject query = {queryPool, slot};
3690 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3691 RecordCmdBeginQuery(cb_state, query);
3692}
3693
3694void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003695 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003696 cb_state->activeQueries.erase(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003697 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3698 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3699 QueryMap *localQueryToStateMap) {
3700 return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3701 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003702 auto pool_state = GetQueryPoolState(query_obj.pool);
3703 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3704 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003705}
3706
3707void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003708 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003709 QueryObject query_obj = {queryPool, slot};
3710 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3711 RecordCmdEndQuery(cb_state, query_obj);
3712}
3713
3714void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3715 uint32_t firstQuery, uint32_t queryCount) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003716 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003717 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3718
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02003719 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
3720 QueryObject query = {queryPool, slot};
3721 cb_state->resetQueries.insert(query);
3722 }
3723
Jeff Bolz310775c2019-10-09 00:46:33 -05003724 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003725 bool do_validate, VkQueryPool &firstPerfQueryPool,
3726 uint32_t perfQueryPass,
3727 QueryMap *localQueryToStateMap) {
3728 return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06003729 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003730 auto pool_state = GetQueryPoolState(queryPool);
3731 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003732 cb_state);
3733}
3734
3735void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3736 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3737 VkDeviceSize dstOffset, VkDeviceSize stride,
3738 VkQueryResultFlags flags) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003739 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003740 auto cb_state = GetCBState(commandBuffer);
3741 auto dst_buff_state = GetBufferState(dstBuffer);
3742 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003743 auto pool_state = GetQueryPoolState(queryPool);
3744 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003745 cb_state);
3746}
3747
3748void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3749 VkQueryPool queryPool, uint32_t slot) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003750 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003751 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003752 auto pool_state = GetQueryPoolState(queryPool);
3753 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003754 cb_state);
3755 QueryObject query = {queryPool, slot};
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003756 cb_state->queryUpdates.emplace_back([query](const ValidationStateTracker *device_data, bool do_validate,
3757 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3758 QueryMap *localQueryToStateMap) {
3759 return SetQueryState(QueryObject(query, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3760 });
locke-lunargd556cc32019-09-17 01:21:23 -06003761}
3762
3763void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3764 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3765 VkResult result) {
3766 if (VK_SUCCESS != result) return;
3767 // Shadow create info and store in map
Jeff Bolz6ae39612019-10-11 20:57:36 -05003768 auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
locke-lunargd556cc32019-09-17 01:21:23 -06003769
3770 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
3771 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
3772 VkImageView view = pCreateInfo->pAttachments[i];
3773 auto view_state = GetImageViewState(view);
3774 if (!view_state) {
3775 continue;
3776 }
3777 }
3778 }
3779 frameBufferMap[*pFramebuffer] = std::move(fb_state);
3780}
3781
3782void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3783 RENDER_PASS_STATE *render_pass) {
3784 auto &subpass_to_node = render_pass->subpassToNode;
3785 subpass_to_node.resize(pCreateInfo->subpassCount);
3786 auto &self_dependencies = render_pass->self_dependencies;
3787 self_dependencies.resize(pCreateInfo->subpassCount);
3788
3789 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
3790 subpass_to_node[i].pass = i;
3791 self_dependencies[i].clear();
3792 }
3793 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
3794 const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
3795 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
3796 if (dependency.srcSubpass == dependency.dstSubpass) {
3797 self_dependencies[dependency.srcSubpass].push_back(i);
3798 } else {
3799 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
3800 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
3801 }
3802 }
3803 }
3804}
3805
3806static void MarkAttachmentFirstUse(RENDER_PASS_STATE *render_pass, uint32_t index, bool is_read) {
3807 if (index == VK_ATTACHMENT_UNUSED) return;
3808
3809 if (!render_pass->attachment_first_read.count(index)) render_pass->attachment_first_read[index] = is_read;
3810}
3811
3812void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
3813 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
3814 VkRenderPass *pRenderPass) {
3815 render_pass->renderPass = *pRenderPass;
3816 auto create_info = render_pass->createInfo.ptr();
3817
3818 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
3819
3820 for (uint32_t i = 0; i < create_info->subpassCount; ++i) {
3821 const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[i];
3822 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
3823 MarkAttachmentFirstUse(render_pass.get(), subpass.pColorAttachments[j].attachment, false);
3824
3825 // resolve attachments are considered to be written
3826 if (subpass.pResolveAttachments) {
3827 MarkAttachmentFirstUse(render_pass.get(), subpass.pResolveAttachments[j].attachment, false);
3828 }
3829 }
3830 if (subpass.pDepthStencilAttachment) {
3831 MarkAttachmentFirstUse(render_pass.get(), subpass.pDepthStencilAttachment->attachment, false);
3832 }
3833 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
3834 MarkAttachmentFirstUse(render_pass.get(), subpass.pInputAttachments[j].attachment, true);
3835 }
3836 }
3837
3838 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
3839 renderPassMap[*pRenderPass] = std::move(render_pass);
3840}
3841
3842// Style note:
3843// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
3844// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
3845// construction or assignment.
3846void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
3847 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3848 VkResult result) {
3849 if (VK_SUCCESS != result) return;
3850 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3851 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
3852}
3853
Tony-LunarG977448c2019-12-02 14:52:02 -07003854void ValidationStateTracker::RecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3855 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3856 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06003857 if (VK_SUCCESS != result) return;
3858 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3859 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
3860}
3861
Tony-LunarG977448c2019-12-02 14:52:02 -07003862void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3863 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3864 VkResult result) {
3865 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
3866}
3867
3868void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3869 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3870 VkResult result) {
3871 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
3872}
3873
locke-lunargd556cc32019-09-17 01:21:23 -06003874void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
3875 const VkRenderPassBeginInfo *pRenderPassBegin,
3876 const VkSubpassContents contents) {
3877 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3878 auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
3879 auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
3880
3881 if (render_pass_state) {
3882 cb_state->activeFramebuffer = pRenderPassBegin->framebuffer;
3883 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07003884 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06003885 cb_state->activeSubpass = 0;
3886 cb_state->activeSubpassContents = contents;
3887 cb_state->framebuffers.insert(pRenderPassBegin->framebuffer);
3888 // Connect this framebuffer and its children to this cmdBuffer
3889 AddFramebufferBinding(cb_state, framebuffer);
3890 // Connect this RP to cmdBuffer
Jeff Bolzadbfa852019-10-04 13:53:30 -05003891 AddCommandBufferBinding(render_pass_state->cb_bindings,
3892 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state),
3893 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003894
3895 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
3896 if (chained_device_group_struct) {
3897 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
3898 } else {
3899 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
3900 }
3901 }
3902}
3903
3904void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
3905 const VkRenderPassBeginInfo *pRenderPassBegin,
3906 VkSubpassContents contents) {
3907 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
3908}
3909
3910void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
3911 const VkRenderPassBeginInfo *pRenderPassBegin,
3912 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
3913 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3914}
3915
Tony-LunarG977448c2019-12-02 14:52:02 -07003916void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
3917 const VkRenderPassBeginInfo *pRenderPassBegin,
3918 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
3919 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3920}
3921
locke-lunargd556cc32019-09-17 01:21:23 -06003922void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3923 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3924 cb_state->activeSubpass++;
3925 cb_state->activeSubpassContents = contents;
3926}
3927
3928void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3929 RecordCmdNextSubpass(commandBuffer, contents);
3930}
3931
3932void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
3933 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
3934 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3935 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3936}
3937
Tony-LunarG977448c2019-12-02 14:52:02 -07003938void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
3939 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
3940 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3941 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3942}
3943
locke-lunargd556cc32019-09-17 01:21:23 -06003944void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
3945 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3946 cb_state->activeRenderPass = nullptr;
3947 cb_state->activeSubpass = 0;
3948 cb_state->activeFramebuffer = VK_NULL_HANDLE;
3949}
3950
3951void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
3952 RecordCmdEndRenderPassState(commandBuffer);
3953}
3954
3955void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
3956 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3957 RecordCmdEndRenderPassState(commandBuffer);
3958}
3959
Tony-LunarG977448c2019-12-02 14:52:02 -07003960void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
3961 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3962 RecordCmdEndRenderPassState(commandBuffer);
3963}
locke-lunargd556cc32019-09-17 01:21:23 -06003964void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
3965 const VkCommandBuffer *pCommandBuffers) {
3966 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3967
3968 CMD_BUFFER_STATE *sub_cb_state = NULL;
3969 for (uint32_t i = 0; i < commandBuffersCount; i++) {
3970 sub_cb_state = GetCBState(pCommandBuffers[i]);
3971 assert(sub_cb_state);
3972 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
3973 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
3974 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
3975 // from the validation step to the recording step
3976 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
3977 }
3978 }
3979
3980 // Propagate inital layout and current layout state to the primary cmd buffer
3981 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
3982 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
3983 // for those other classes.
3984 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
3985 const auto image = sub_layout_map_entry.first;
3986 const auto *image_state = GetImageState(image);
3987 if (!image_state) continue; // Can't set layouts of a dead image
3988
3989 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
3990 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
3991 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
3992 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
3993 }
3994
3995 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
3996 cb_state->linkedCommandBuffers.insert(sub_cb_state);
3997 sub_cb_state->linkedCommandBuffers.insert(cb_state);
3998 for (auto &function : sub_cb_state->queryUpdates) {
3999 cb_state->queryUpdates.push_back(function);
4000 }
4001 for (auto &function : sub_cb_state->queue_submit_functions) {
4002 cb_state->queue_submit_functions.push_back(function);
4003 }
4004 }
4005}
4006
4007void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
4008 VkFlags flags, void **ppData, VkResult result) {
4009 if (VK_SUCCESS != result) return;
4010 RecordMappedMemory(mem, offset, size, ppData);
4011}
4012
4013void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
4014 auto mem_info = GetDevMemState(mem);
4015 if (mem_info) {
4016 mem_info->mapped_range = MemRange();
4017 mem_info->p_driver_data = nullptr;
4018 }
4019}
4020
4021void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
4022 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
4023 if (image_state) {
4024 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
4025 if (swapchain_info) {
4026 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
4027 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -06004028 swapchain->images[swapchain_info->imageIndex].bound_images.emplace(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -06004029 image_state->bind_swapchain = swapchain_info->swapchain;
4030 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
4031 }
4032 } else {
4033 // Track bound memory range information
4034 auto mem_info = GetDevMemState(bindInfo.memory);
4035 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07004036 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06004037 }
4038
4039 // Track objects tied to memory
4040 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
4041 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
4042 }
Tony-LunarG330cf4c2020-03-04 16:29:03 -07004043 if ((image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) || swapchain_info) {
locke-lunargd556cc32019-09-17 01:21:23 -06004044 AddAliasingImage(image_state);
4045 }
4046 }
4047}
4048
4049void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
4050 VkDeviceSize memoryOffset, VkResult result) {
4051 if (VK_SUCCESS != result) return;
4052 VkBindImageMemoryInfo bindInfo = {};
4053 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
4054 bindInfo.image = image;
4055 bindInfo.memory = mem;
4056 bindInfo.memoryOffset = memoryOffset;
4057 UpdateBindImageMemoryState(bindInfo);
4058}
4059
4060void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
4061 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
4062 if (VK_SUCCESS != result) return;
4063 for (uint32_t i = 0; i < bindInfoCount; i++) {
4064 UpdateBindImageMemoryState(pBindInfos[i]);
4065 }
4066}
4067
4068void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
4069 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
4070 if (VK_SUCCESS != result) return;
4071 for (uint32_t i = 0; i < bindInfoCount; i++) {
4072 UpdateBindImageMemoryState(pBindInfos[i]);
4073 }
4074}
4075
4076void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
4077 auto event_state = GetEventState(event);
4078 if (event_state) {
4079 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
4080 }
locke-lunargd556cc32019-09-17 01:21:23 -06004081}
4082
4083void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
4084 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
4085 VkResult result) {
4086 if (VK_SUCCESS != result) return;
4087 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
4088 pImportSemaphoreFdInfo->flags);
4089}
4090
4091void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
4092 VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
4093 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
4094 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4095 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
4096 semaphore_state->scope = kSyncScopeExternalPermanent;
4097 }
4098}
4099
4100#ifdef VK_USE_PLATFORM_WIN32_KHR
4101void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
4102 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
4103 if (VK_SUCCESS != result) return;
4104 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
4105 pImportSemaphoreWin32HandleInfo->flags);
4106}
4107
4108void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
4109 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4110 HANDLE *pHandle, VkResult result) {
4111 if (VK_SUCCESS != result) return;
4112 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
4113}
4114
4115void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
4116 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
4117 if (VK_SUCCESS != result) return;
4118 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
4119 pImportFenceWin32HandleInfo->flags);
4120}
4121
4122void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
4123 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4124 HANDLE *pHandle, VkResult result) {
4125 if (VK_SUCCESS != result) return;
4126 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
4127}
4128#endif
4129
4130void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
4131 VkResult result) {
4132 if (VK_SUCCESS != result) return;
4133 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
4134}
4135
4136void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
4137 VkFenceImportFlagsKHR flags) {
4138 FENCE_STATE *fence_node = GetFenceState(fence);
4139 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
4140 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
4141 fence_node->scope == kSyncScopeInternal) {
4142 fence_node->scope = kSyncScopeExternalTemporary;
4143 } else {
4144 fence_node->scope = kSyncScopeExternalPermanent;
4145 }
4146 }
4147}
4148
4149void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
4150 VkResult result) {
4151 if (VK_SUCCESS != result) return;
4152 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
4153}
4154
4155void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
4156 FENCE_STATE *fence_state = GetFenceState(fence);
4157 if (fence_state) {
4158 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4159 // Export with reference transference becomes external
4160 fence_state->scope = kSyncScopeExternalPermanent;
4161 } else if (fence_state->scope == kSyncScopeInternal) {
4162 // Export with copy transference has a side effect of resetting the fence
4163 fence_state->state = FENCE_UNSIGNALED;
4164 }
4165 }
4166}
4167
4168void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
4169 VkResult result) {
4170 if (VK_SUCCESS != result) return;
4171 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
4172}
4173
4174void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
4175 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
4176 if (VK_SUCCESS != result) return;
4177 eventMap[*pEvent].write_in_use = 0;
4178 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
4179}
4180
4181void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
4182 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
4183 SWAPCHAIN_NODE *old_swapchain_state) {
4184 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004185 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004186 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
4187 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
4188 swapchain_state->shared_presentable = true;
4189 }
4190 surface_state->swapchain = swapchain_state.get();
4191 swapchainMap[*pSwapchain] = std::move(swapchain_state);
4192 } else {
4193 surface_state->swapchain = nullptr;
4194 }
4195 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
4196 if (old_swapchain_state) {
4197 old_swapchain_state->retired = true;
4198 }
4199 return;
4200}
4201
4202void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
4203 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
4204 VkResult result) {
4205 auto surface_state = GetSurfaceState(pCreateInfo->surface);
4206 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
4207 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
4208}
4209
4210void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
4211 const VkAllocationCallbacks *pAllocator) {
4212 if (!swapchain) return;
4213 auto swapchain_data = GetSwapchainState(swapchain);
4214 if (swapchain_data) {
4215 for (const auto &swapchain_image : swapchain_data->images) {
locke-lunargb3584732019-10-28 20:18:36 -06004216 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image, kVulkanObjectTypeImage));
4217 imageMap.erase(swapchain_image.image);
4218 RemoveAliasingImages(swapchain_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06004219 }
4220
4221 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
4222 if (surface_state) {
4223 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
4224 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004225 swapchain_data->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004226 swapchainMap.erase(swapchain);
4227 }
4228}
4229
4230void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
4231 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
4232 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
4233 auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
4234 if (pSemaphore) {
4235 pSemaphore->signaler.first = VK_NULL_HANDLE;
4236 pSemaphore->signaled = false;
4237 }
4238 }
4239
4240 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
4241 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
4242 // confused itself just as much.
4243 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
4244 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
4245 // Mark the image as having been released to the WSI
4246 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
4247 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
locke-lunargb3584732019-10-28 20:18:36 -06004248 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004249 auto image_state = GetImageState(image);
4250 if (image_state) {
4251 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004252 if (image_state->shared_presentable) {
4253 image_state->layout_locked = true;
4254 }
locke-lunargd556cc32019-09-17 01:21:23 -06004255 }
4256 }
4257 }
4258 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
4259 // its semaphore waits) /never/ participate in any completion proof.
4260}
4261
4262void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
4263 const VkSwapchainCreateInfoKHR *pCreateInfos,
4264 const VkAllocationCallbacks *pAllocator,
4265 VkSwapchainKHR *pSwapchains, VkResult result) {
4266 if (pCreateInfos) {
4267 for (uint32_t i = 0; i < swapchainCount; i++) {
4268 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
4269 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
4270 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
4271 }
4272 }
4273}
4274
4275void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4276 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
4277 auto pFence = GetFenceState(fence);
4278 if (pFence && pFence->scope == kSyncScopeInternal) {
4279 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
4280 // import
4281 pFence->state = FENCE_INFLIGHT;
4282 pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
4283 }
4284
4285 auto pSemaphore = GetSemaphoreState(semaphore);
4286 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
4287 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
4288 // temporary import
4289 pSemaphore->signaled = true;
4290 pSemaphore->signaler.first = VK_NULL_HANDLE;
4291 }
4292
4293 // Mark the image as acquired.
4294 auto swapchain_data = GetSwapchainState(swapchain);
4295 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
locke-lunargb3584732019-10-28 20:18:36 -06004296 auto image = swapchain_data->images[*pImageIndex].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004297 auto image_state = GetImageState(image);
4298 if (image_state) {
4299 image_state->acquired = true;
4300 image_state->shared_presentable = swapchain_data->shared_presentable;
4301 }
4302 }
4303}
4304
4305void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4306 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
4307 VkResult result) {
4308 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4309 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
4310}
4311
4312void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
4313 uint32_t *pImageIndex, VkResult result) {
4314 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4315 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
4316 pAcquireInfo->fence, pImageIndex);
4317}
4318
4319void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
4320 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
4321 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
4322 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
4323 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
4324 phys_device_state.phys_device = pPhysicalDevices[i];
4325 // Init actual features for each physical device
4326 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
4327 }
4328 }
4329}
4330
4331// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
4332static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
4333 VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4334 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
4335
4336 if (!pQueueFamilyProperties) {
4337 if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
4338 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
4339 } else { // Save queue family properties
4340 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
4341
4342 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
4343 for (uint32_t i = 0; i < count; ++i) {
4344 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
4345 }
4346 }
4347}
4348
4349void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
4350 uint32_t *pQueueFamilyPropertyCount,
4351 VkQueueFamilyProperties *pQueueFamilyProperties) {
4352 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4353 assert(physical_device_state);
4354 VkQueueFamilyProperties2KHR *pqfp = nullptr;
4355 std::vector<VkQueueFamilyProperties2KHR> qfp;
4356 qfp.resize(*pQueueFamilyPropertyCount);
4357 if (pQueueFamilyProperties) {
4358 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
4359 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
4360 qfp[i].pNext = nullptr;
4361 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
4362 }
4363 pqfp = qfp.data();
4364 }
4365 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
4366}
4367
4368void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
4369 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4370 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4371 assert(physical_device_state);
4372 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4373 pQueueFamilyProperties);
4374}
4375
4376void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
4377 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4378 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4379 assert(physical_device_state);
4380 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4381 pQueueFamilyProperties);
4382}
4383void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
4384 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004385 if (!surface) return;
4386 auto surface_state = GetSurfaceState(surface);
4387 surface_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004388 surface_map.erase(surface);
4389}
4390
4391void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004392 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06004393}
4394
4395void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
4396 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
4397 const VkAllocationCallbacks *pAllocator,
4398 VkSurfaceKHR *pSurface, VkResult result) {
4399 if (VK_SUCCESS != result) return;
4400 RecordVulkanSurface(pSurface);
4401}
4402
4403#ifdef VK_USE_PLATFORM_ANDROID_KHR
4404void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
4405 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
4406 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4407 VkResult result) {
4408 if (VK_SUCCESS != result) return;
4409 RecordVulkanSurface(pSurface);
4410}
4411#endif // VK_USE_PLATFORM_ANDROID_KHR
4412
4413#ifdef VK_USE_PLATFORM_IOS_MVK
4414void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
4415 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4416 VkResult result) {
4417 if (VK_SUCCESS != result) return;
4418 RecordVulkanSurface(pSurface);
4419}
4420#endif // VK_USE_PLATFORM_IOS_MVK
4421
4422#ifdef VK_USE_PLATFORM_MACOS_MVK
4423void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
4424 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
4425 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4426 VkResult result) {
4427 if (VK_SUCCESS != result) return;
4428 RecordVulkanSurface(pSurface);
4429}
4430#endif // VK_USE_PLATFORM_MACOS_MVK
4431
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07004432#ifdef VK_USE_PLATFORM_METAL_EXT
4433void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
4434 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
4435 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4436 VkResult result) {
4437 if (VK_SUCCESS != result) return;
4438 RecordVulkanSurface(pSurface);
4439}
4440#endif // VK_USE_PLATFORM_METAL_EXT
4441
locke-lunargd556cc32019-09-17 01:21:23 -06004442#ifdef VK_USE_PLATFORM_WAYLAND_KHR
4443void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
4444 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
4445 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4446 VkResult result) {
4447 if (VK_SUCCESS != result) return;
4448 RecordVulkanSurface(pSurface);
4449}
4450#endif // VK_USE_PLATFORM_WAYLAND_KHR
4451
4452#ifdef VK_USE_PLATFORM_WIN32_KHR
4453void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
4454 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
4455 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4456 VkResult result) {
4457 if (VK_SUCCESS != result) return;
4458 RecordVulkanSurface(pSurface);
4459}
4460#endif // VK_USE_PLATFORM_WIN32_KHR
4461
4462#ifdef VK_USE_PLATFORM_XCB_KHR
4463void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
4464 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4465 VkResult result) {
4466 if (VK_SUCCESS != result) return;
4467 RecordVulkanSurface(pSurface);
4468}
4469#endif // VK_USE_PLATFORM_XCB_KHR
4470
4471#ifdef VK_USE_PLATFORM_XLIB_KHR
4472void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
4473 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4474 VkResult result) {
4475 if (VK_SUCCESS != result) return;
4476 RecordVulkanSurface(pSurface);
4477}
4478#endif // VK_USE_PLATFORM_XLIB_KHR
4479
Niklas Haas8b84af12020-04-19 22:20:11 +02004480void ValidationStateTracker::PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance,
4481 const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
4482 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4483 VkResult result) {
4484 if (VK_SUCCESS != result) return;
4485 RecordVulkanSurface(pSurface);
4486}
4487
Cort23cf2282019-09-20 18:58:18 +02004488void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004489 VkPhysicalDeviceFeatures *pFeatures) {
4490 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4491 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4492 physical_device_state->features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
4493 physical_device_state->features2.pNext = nullptr;
4494 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02004495}
4496
4497void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004498 VkPhysicalDeviceFeatures2 *pFeatures) {
4499 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4500 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4501 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004502}
4503
4504void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004505 VkPhysicalDeviceFeatures2 *pFeatures) {
4506 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4507 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4508 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004509}
4510
locke-lunargd556cc32019-09-17 01:21:23 -06004511void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
4512 VkSurfaceKHR surface,
4513 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
4514 VkResult result) {
4515 if (VK_SUCCESS != result) return;
4516 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4517 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004518 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004519 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
4520}
4521
4522void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
4523 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4524 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
4525 if (VK_SUCCESS != result) return;
4526 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4527 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004528 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004529 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
4530}
4531
4532void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
4533 VkSurfaceKHR surface,
4534 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
4535 VkResult result) {
4536 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4537 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004538 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004539 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
4540 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
4541 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
4542 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
4543 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
4544 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
4545 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
4546 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
4547 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
4548 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
4549}
4550
4551void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
4552 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
4553 VkBool32 *pSupported, VkResult result) {
4554 if (VK_SUCCESS != result) return;
4555 auto surface_state = GetSurfaceState(surface);
4556 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
4557}
4558
4559void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
4560 VkSurfaceKHR surface,
4561 uint32_t *pPresentModeCount,
4562 VkPresentModeKHR *pPresentModes,
4563 VkResult result) {
4564 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4565
4566 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
4567 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4568 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState;
4569
4570 if (*pPresentModeCount) {
4571 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4572 if (*pPresentModeCount > physical_device_state->present_modes.size())
4573 physical_device_state->present_modes.resize(*pPresentModeCount);
4574 }
4575 if (pPresentModes) {
4576 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4577 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
4578 physical_device_state->present_modes[i] = pPresentModes[i];
4579 }
4580 }
4581}
4582
4583void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
4584 uint32_t *pSurfaceFormatCount,
4585 VkSurfaceFormatKHR *pSurfaceFormats,
4586 VkResult result) {
4587 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4588
4589 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4590 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
4591
4592 if (*pSurfaceFormatCount) {
4593 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4594 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
4595 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
4596 }
4597 if (pSurfaceFormats) {
4598 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4599 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4600 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
4601 }
4602 }
4603}
4604
4605void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
4606 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4607 uint32_t *pSurfaceFormatCount,
4608 VkSurfaceFormat2KHR *pSurfaceFormats,
4609 VkResult result) {
4610 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4611
4612 auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
4613 if (*pSurfaceFormatCount) {
4614 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_COUNT) {
4615 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_COUNT;
4616 }
4617 if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
4618 physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
4619 }
4620 if (pSurfaceFormats) {
4621 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_DETAILS) {
4622 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_DETAILS;
4623 }
4624 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4625 physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
4626 }
4627 }
4628}
4629
4630void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4631 const VkDebugUtilsLabelEXT *pLabelInfo) {
4632 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4633}
4634
4635void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
4636 EndCmdDebugUtilsLabel(report_data, commandBuffer);
4637}
4638
4639void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4640 const VkDebugUtilsLabelEXT *pLabelInfo) {
4641 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4642
4643 // Squirrel away an easily accessible copy.
4644 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4645 cb_state->debug_label = LoggingLabel(pLabelInfo);
4646}
4647
4648void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
4649 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
4650 if (NULL != pPhysicalDeviceGroupProperties) {
4651 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
4652 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
4653 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
4654 auto &phys_device_state = physical_device_map[cur_phys_dev];
4655 phys_device_state.phys_device = cur_phys_dev;
4656 // Init actual features for each physical device
4657 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
4658 }
4659 }
4660 }
4661}
4662
4663void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
4664 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4665 VkResult result) {
4666 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4667 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4668}
4669
4670void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
4671 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4672 VkResult result) {
4673 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4674 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4675}
4676
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004677void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
4678 uint32_t queueFamilyIndex,
4679 uint32_t *pCounterCount,
4680 VkPerformanceCounterKHR *pCounters) {
4681 if (NULL == pCounters) return;
4682
4683 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4684 assert(physical_device_state);
4685
4686 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queueFamilyCounters(new QUEUE_FAMILY_PERF_COUNTERS());
4687 queueFamilyCounters->counters.resize(*pCounterCount);
4688 for (uint32_t i = 0; i < *pCounterCount; i++) queueFamilyCounters->counters[i] = pCounters[i];
4689
4690 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queueFamilyCounters);
4691}
4692
4693void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
4694 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
4695 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
4696 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4697 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
4698}
4699
4700void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
4701 VkResult result) {
4702 if (result == VK_SUCCESS) performance_lock_acquired = true;
4703}
4704
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004705void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
4706 performance_lock_acquired = false;
4707 for (auto &cmd_buffer : commandBufferMap) {
4708 cmd_buffer.second->performance_lock_released = true;
4709 }
4710}
4711
locke-lunargd556cc32019-09-17 01:21:23 -06004712void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
4713 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4714 const VkAllocationCallbacks *pAllocator) {
4715 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004716 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4717 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004718 desc_template_map.erase(descriptorUpdateTemplate);
4719}
4720
4721void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
4722 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4723 const VkAllocationCallbacks *pAllocator) {
4724 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004725 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4726 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004727 desc_template_map.erase(descriptorUpdateTemplate);
4728}
4729
4730void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
4731 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
4732 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004733 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004734 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
4735}
4736
4737void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
4738 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4739 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4740 if (VK_SUCCESS != result) return;
4741 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4742}
4743
4744void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
4745 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4746 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4747 if (VK_SUCCESS != result) return;
4748 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4749}
4750
4751void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
4752 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4753 const void *pData) {
4754 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
4755 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
4756 assert(0);
4757 } else {
4758 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
4759 // TODO: Record template push descriptor updates
4760 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
4761 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
4762 }
4763 }
4764}
4765
4766void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
4767 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4768 const void *pData) {
4769 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4770}
4771
4772void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
4773 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4774 const void *pData) {
4775 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4776}
4777
4778void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
4779 VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
4780 const void *pData) {
4781 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4782
4783 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4784 if (template_state) {
4785 auto layout_data = GetPipelineLayout(layout);
4786 auto dsl = GetDslFromPipelineLayout(layout_data, set);
4787 const auto &template_ci = template_state->create_info;
Jeff Bolz6ae39612019-10-11 20:57:36 -05004788 if (dsl && !dsl->destroyed) {
locke-lunargd556cc32019-09-17 01:21:23 -06004789 // Decode the template into a set of write updates
4790 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
4791 dsl->GetDescriptorSetLayout());
4792 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
4793 static_cast<uint32_t>(decoded_template.desc_writes.size()),
4794 decoded_template.desc_writes.data());
4795 }
4796 }
4797}
4798
4799void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
4800 uint32_t *pPropertyCount, void *pProperties) {
4801 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4802 if (*pPropertyCount) {
4803 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_COUNT) {
4804 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_COUNT;
Camden Stocker61050592019-11-27 12:03:09 -08004805 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004806 }
4807 physical_device_state->display_plane_property_count = *pPropertyCount;
4808 }
4809 if (pProperties) {
4810 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_DETAILS) {
4811 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004812 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004813 }
4814 }
4815}
4816
4817void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
4818 uint32_t *pPropertyCount,
4819 VkDisplayPlanePropertiesKHR *pProperties,
4820 VkResult result) {
4821 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4822 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4823}
4824
4825void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
4826 uint32_t *pPropertyCount,
4827 VkDisplayPlaneProperties2KHR *pProperties,
4828 VkResult result) {
4829 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4830 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4831}
4832
4833void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4834 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
4835 QueryObject query_obj = {queryPool, query, index};
4836 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4837 RecordCmdBeginQuery(cb_state, query_obj);
4838}
4839
4840void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4841 uint32_t query, uint32_t index) {
4842 QueryObject query_obj = {queryPool, query, index};
4843 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4844 RecordCmdEndQuery(cb_state, query_obj);
4845}
4846
4847void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
4848 VkSamplerYcbcrConversion ycbcr_conversion) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004849 auto ycbcr_state = std::make_shared<SAMPLER_YCBCR_CONVERSION_STATE>();
4850
locke-lunargd556cc32019-09-17 01:21:23 -06004851 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004852 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion, ycbcr_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06004853 }
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004854
4855 const VkFormat conversion_format = create_info->format;
4856
4857 if (conversion_format != VK_FORMAT_UNDEFINED) {
4858 // If format is VK_FORMAT_UNDEFINED, will be set by external AHB features
4859 ycbcr_state->format_features = GetPotentialFormatFeatures(conversion_format);
4860 }
4861
4862 ycbcr_state->chromaFilter = create_info->chromaFilter;
4863 ycbcr_state->format = conversion_format;
4864 samplerYcbcrConversionMap[ycbcr_conversion] = std::move(ycbcr_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004865}
4866
4867void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
4868 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4869 const VkAllocationCallbacks *pAllocator,
4870 VkSamplerYcbcrConversion *pYcbcrConversion,
4871 VkResult result) {
4872 if (VK_SUCCESS != result) return;
4873 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4874}
4875
4876void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
4877 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4878 const VkAllocationCallbacks *pAllocator,
4879 VkSamplerYcbcrConversion *pYcbcrConversion,
4880 VkResult result) {
4881 if (VK_SUCCESS != result) return;
4882 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4883}
4884
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004885void ValidationStateTracker::RecordDestroySamplerYcbcrConversionState(VkSamplerYcbcrConversion ycbcr_conversion) {
4886 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4887 RecordDestroySamplerYcbcrConversionANDROID(ycbcr_conversion);
4888 }
4889
4890 auto ycbcr_state = GetSamplerYcbcrConversionState(ycbcr_conversion);
4891 ycbcr_state->destroyed = true;
4892 samplerYcbcrConversionMap.erase(ycbcr_conversion);
4893}
4894
locke-lunargd556cc32019-09-17 01:21:23 -06004895void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
4896 const VkAllocationCallbacks *pAllocator) {
4897 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004898 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06004899}
4900
4901void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
4902 VkSamplerYcbcrConversion ycbcrConversion,
4903 const VkAllocationCallbacks *pAllocator) {
4904 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07004905 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06004906}
4907
Tony-LunarG977448c2019-12-02 14:52:02 -07004908void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4909 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06004910 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07004911 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004912
4913 // Do nothing if the query pool has been destroyed.
4914 auto query_pool_state = GetQueryPoolState(queryPool);
4915 if (!query_pool_state) return;
4916
4917 // Reset the state of existing entries.
4918 QueryObject query_obj{queryPool, 0};
4919 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
4920 for (uint32_t i = 0; i < max_query_count; ++i) {
4921 query_obj.query = firstQuery + i;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004922 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004923 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
4924 for (uint32_t passIndex = 0; passIndex < query_pool_state->n_performance_passes; passIndex++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004925 query_obj.perf_pass = passIndex;
4926 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004927 }
4928 }
locke-lunargd556cc32019-09-17 01:21:23 -06004929 }
4930}
4931
Tony-LunarG977448c2019-12-02 14:52:02 -07004932void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4933 uint32_t queryCount) {
4934 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
4935}
4936
4937void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4938 uint32_t queryCount) {
4939 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
4940}
4941
locke-lunargd556cc32019-09-17 01:21:23 -06004942void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
4943 const TEMPLATE_STATE *template_state, const void *pData) {
4944 // Translate the templated update into a normal update for validation...
4945 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
4946 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
4947 decoded_update.desc_writes.data(), 0, NULL);
4948}
4949
4950// Update the common AllocateDescriptorSetsData
4951void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004952 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06004953 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05004954 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06004955 if (layout) {
4956 ds_data->layout_nodes[i] = layout;
4957 // Count total descriptors required per type
4958 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
4959 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
4960 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
4961 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
4962 }
4963 }
4964 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
4965 }
4966}
4967
4968// Decrement allocated sets from the pool and insert new sets into set_map
4969void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
4970 const VkDescriptorSet *descriptor_sets,
4971 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
4972 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
4973 // Account for sets and individual descriptors allocated from pool
4974 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
4975 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
4976 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
4977 }
4978
4979 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
4980 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
4981
4982 // Create tracking object for each descriptor set; insert into global map and the pool's set.
4983 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
4984 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
4985
Jeff Bolz41a1ced2019-10-11 11:40:49 -05004986 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07004987 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06004988 pool_state->sets.insert(new_ds.get());
4989 new_ds->in_use.store(0);
4990 setMap[descriptor_sets[i]] = std::move(new_ds);
4991 }
4992}
4993
4994// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
4995void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4996 UpdateDrawState(cb_state, bind_point);
4997 cb_state->hasDispatchCmd = true;
4998}
4999
locke-lunargd556cc32019-09-17 01:21:23 -06005000// Generic function to handle state update for all CmdDraw* type functions
5001void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
5002 UpdateStateCmdDrawDispatchType(cb_state, bind_point);
locke-lunargd556cc32019-09-17 01:21:23 -06005003 cb_state->hasDrawCmd = true;
5004}
5005
5006void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
5007 uint32_t firstVertex, uint32_t firstInstance) {
5008 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5009 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5010}
5011
5012void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
5013 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
5014 uint32_t firstInstance) {
5015 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5016 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5017}
5018
5019void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5020 uint32_t count, uint32_t stride) {
5021 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5022 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5023 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5024 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5025}
5026
5027void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5028 VkDeviceSize offset, uint32_t count, uint32_t stride) {
5029 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5030 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5031 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5032 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5033}
5034
5035void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
5036 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5037 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
5038}
5039
5040void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5041 VkDeviceSize offset) {
5042 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5043 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
5044 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5045 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5046}
5047
Tony-LunarG977448c2019-12-02 14:52:02 -07005048void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5049 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5050 uint32_t stride) {
5051 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5052 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5053 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
5054 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5055 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5056 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5057}
5058
locke-lunargd556cc32019-09-17 01:21:23 -06005059void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5060 VkDeviceSize offset, VkBuffer countBuffer,
5061 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5062 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005063 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
5064}
5065
5066void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5067 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5068 uint32_t maxDrawCount, uint32_t stride) {
5069 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
5070}
5071
5072void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5073 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5074 uint32_t maxDrawCount, uint32_t stride) {
locke-lunargd556cc32019-09-17 01:21:23 -06005075 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5076 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5077 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
5078 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5079 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5080 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5081}
5082
5083void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5084 VkDeviceSize offset, VkBuffer countBuffer,
5085 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5086 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005087 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
5088}
5089
5090void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
5091 VkDeviceSize offset, VkBuffer countBuffer,
5092 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5093 uint32_t stride) {
5094 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
locke-lunargd556cc32019-09-17 01:21:23 -06005095}
5096
5097void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
5098 uint32_t firstTask) {
5099 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5100 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5101}
5102
5103void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5104 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
5105 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5106 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5107 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5108 if (buffer_state) {
5109 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5110 }
5111}
5112
5113void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5114 VkDeviceSize offset, VkBuffer countBuffer,
5115 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5116 uint32_t stride) {
5117 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5118 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5119 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
5120 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
5121 if (buffer_state) {
5122 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5123 }
5124 if (count_buffer_state) {
5125 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5126 }
5127}
5128
5129void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
5130 const VkAllocationCallbacks *pAllocator,
5131 VkShaderModule *pShaderModule, VkResult result,
5132 void *csm_state_data) {
5133 if (VK_SUCCESS != result) return;
5134 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
5135
5136 spv_target_env spirv_environment = ((api_version >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
5137 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005138 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
5139 csm_state->unique_shader_id)
5140 : std::make_shared<SHADER_MODULE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06005141 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
5142}
5143
5144void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005145 PIPELINE_STATE::StageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005146 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
5147 auto module = GetShaderModuleState(pStage->module);
5148 if (!module->has_valid_spirv) return;
5149
5150 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
5151 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
5152 if (entrypoint == module->end()) return;
5153
5154 // Mark accessible ids
5155 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
5156 ProcessExecutionModes(module, entrypoint, pipeline);
5157
5158 stage_state->descriptor_uses =
Mark Lobodzinskid8d658e2020-01-30 15:05:51 -07005159 CollectInterfaceByDescriptorSlot(module, stage_state->accessible_ids, &stage_state->has_writable_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06005160 // Capture descriptor uses for the pipeline
5161 for (auto use : stage_state->descriptor_uses) {
5162 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06005163 const uint32_t slot = use.first.first;
5164 auto &reqs = pipeline->active_slots[slot][use.first.second];
locke-lunargd556cc32019-09-17 01:21:23 -06005165 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
John Zulauf649edd52019-10-02 14:39:41 -06005166 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunargd556cc32019-09-17 01:21:23 -06005167 }
5168}
5169
5170void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
5171 if (cb_state == nullptr) {
5172 return;
5173 }
5174
5175 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
5176 if (pipeline_layout_state == nullptr) {
5177 return;
5178 }
5179
5180 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
5181 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
5182 cb_state->push_constant_data.clear();
5183 uint32_t size_needed = 0;
5184 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
5185 size_needed = std::max(size_needed, (push_constant_range.offset + push_constant_range.size));
5186 }
5187 cb_state->push_constant_data.resize(size_needed, 0);
5188 }
5189}
John Zulauf22b0fbe2019-10-15 06:26:16 -06005190
5191void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
5192 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
5193 VkResult result) {
5194 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
5195 auto swapchain_state = GetSwapchainState(swapchain);
5196
5197 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
5198
5199 if (pSwapchainImages) {
5200 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_DETAILS) {
5201 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_DETAILS;
5202 }
5203 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
locke-lunargb3584732019-10-28 20:18:36 -06005204 if (swapchain_state->images[i].image != VK_NULL_HANDLE) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06005205
5206 // Add imageMap entries for each swapchain image
5207 VkImageCreateInfo image_ci;
5208 image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -06005209 image_ci.pNext = nullptr; // to be set later
5210 image_ci.flags = 0; // to be updated below
John Zulauf22b0fbe2019-10-15 06:26:16 -06005211 image_ci.imageType = VK_IMAGE_TYPE_2D;
5212 image_ci.format = swapchain_state->createInfo.imageFormat;
5213 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
5214 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
5215 image_ci.extent.depth = 1;
5216 image_ci.mipLevels = 1;
5217 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
5218 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
5219 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
5220 image_ci.usage = swapchain_state->createInfo.imageUsage;
5221 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
5222 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
5223 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
5224 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5225
5226 image_ci.pNext = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(swapchain_state->createInfo.pNext);
5227
5228 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR)
5229 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
5230 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)
5231 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
5232 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
5233 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR);
5234
5235 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(pSwapchainImages[i], &image_ci);
5236 auto &image_state = imageMap[pSwapchainImages[i]];
5237 image_state->valid = false;
5238 image_state->create_from_swapchain = swapchain;
5239 image_state->bind_swapchain = swapchain;
5240 image_state->bind_swapchain_imageIndex = i;
Tony-LunarGe64e4fe2020-02-17 16:21:55 -07005241 image_state->is_swapchain_image = true;
locke-lunargb3584732019-10-28 20:18:36 -06005242 swapchain_state->images[i].image = pSwapchainImages[i];
5243 swapchain_state->images[i].bound_images.emplace(pSwapchainImages[i]);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005244 }
5245 }
5246
5247 if (*pSwapchainImageCount) {
5248 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_COUNT) {
5249 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_COUNT;
5250 }
5251 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
5252 }
5253}