blob: 807fa4eadc25c6eea884e5d7a76ecebf6e88edaa [file] [log] [blame]
Tony-LunarG73719992020-01-15 10:20:28 -07001/* Copyright (c) 2015-2020 The Khronos Group Inc.
2 * Copyright (c) 2015-2020 Valve Corporation
3 * Copyright (c) 2015-2020 LunarG, Inc.
4 * Copyright (C) 2015-2020 Google Inc.
locke-lunargd556cc32019-09-17 01:21:23 -06005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
locke-lunargd556cc32019-09-17 01:21:23 -060023#include <cmath>
24#include <set>
25#include <sstream>
26#include <string>
27
28#include "vk_enum_string_helper.h"
29#include "vk_format_utils.h"
30#include "vk_layer_data.h"
31#include "vk_layer_utils.h"
32#include "vk_layer_logging.h"
33#include "vk_typemap_helper.h"
34
35#include "chassis.h"
36#include "state_tracker.h"
37#include "shader_validation.h"
38
39using std::max;
40using std::string;
41using std::stringstream;
42using std::unique_ptr;
43using std::unordered_map;
44using std::unordered_set;
45using std::vector;
46
47#ifdef VK_USE_PLATFORM_ANDROID_KHR
48// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
49// This could also move into a seperate core_validation_android.cpp file... ?
50
51void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
52 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
53 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
54 is_node->imported_ahb = true;
55 }
56 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
57 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
58 is_node->has_ahb_format = true;
59 is_node->ahb_format = ext_fmt_android->externalFormat;
60 }
61}
62
63void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
64 VkSamplerYcbcrConversion ycbcr_conversion) {
65 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
66 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
67 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
68 }
69};
70
71void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
72 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
73};
74
75#else
76
77void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
78
79void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
80 VkSamplerYcbcrConversion ycbcr_conversion){};
81
82void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
83
84#endif // VK_USE_PLATFORM_ANDROID_KHR
85
86void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
87 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
88 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -050089 auto is_node = std::make_shared<IMAGE_STATE>(*pImage, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -060090 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
91 RecordCreateImageANDROID(pCreateInfo, is_node.get());
92 }
93 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
94 if (swapchain_info) {
95 is_node->create_from_swapchain = swapchain_info->swapchain;
96 }
97
98 bool pre_fetch_memory_reqs = true;
99#ifdef VK_USE_PLATFORM_ANDROID_KHR
100 if (is_node->external_format_android) {
101 // Do not fetch requirements for external memory images
102 pre_fetch_memory_reqs = false;
103 }
104#endif
105 // Record the memory requirements in case they won't be queried
106 if (pre_fetch_memory_reqs) {
107 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
108 }
109 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
110}
111
112void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
113 if (!image) return;
114 IMAGE_STATE *image_state = GetImageState(image);
115 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
116 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
117 // Clean up memory mapping, bindings and range references for image
118 for (auto mem_binding : image_state->GetBoundMemory()) {
119 auto mem_info = GetDevMemState(mem_binding);
120 if (mem_info) {
121 RemoveImageMemoryRange(image, mem_info);
122 }
123 }
124 if (image_state->bind_swapchain) {
125 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
126 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -0600127 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -0600128 }
129 }
130 RemoveAliasingImage(image_state);
131 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500132 image_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600133 // Remove image from imageMap
134 imageMap.erase(image);
135}
136
137void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
138 VkImageLayout imageLayout, const VkClearColorValue *pColor,
139 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
140 auto cb_node = GetCBState(commandBuffer);
141 auto image_state = GetImageState(image);
142 if (cb_node && image_state) {
143 AddCommandBufferBindingImage(cb_node, image_state);
144 }
145}
146
147void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
148 VkImageLayout imageLayout,
149 const VkClearDepthStencilValue *pDepthStencil,
150 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
151 auto cb_node = GetCBState(commandBuffer);
152 auto image_state = GetImageState(image);
153 if (cb_node && image_state) {
154 AddCommandBufferBindingImage(cb_node, image_state);
155 }
156}
157
158void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
159 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
160 uint32_t regionCount, const VkImageCopy *pRegions) {
161 auto cb_node = GetCBState(commandBuffer);
162 auto src_image_state = GetImageState(srcImage);
163 auto dst_image_state = GetImageState(dstImage);
164
165 // Update bindings between images and cmd buffer
166 AddCommandBufferBindingImage(cb_node, src_image_state);
167 AddCommandBufferBindingImage(cb_node, dst_image_state);
168}
169
170void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
171 VkImageLayout srcImageLayout, VkImage dstImage,
172 VkImageLayout dstImageLayout, uint32_t regionCount,
173 const VkImageResolve *pRegions) {
174 auto cb_node = GetCBState(commandBuffer);
175 auto src_image_state = GetImageState(srcImage);
176 auto dst_image_state = GetImageState(dstImage);
177
178 // Update bindings between images and cmd buffer
179 AddCommandBufferBindingImage(cb_node, src_image_state);
180 AddCommandBufferBindingImage(cb_node, dst_image_state);
181}
182
183void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
184 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
185 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
186 auto cb_node = GetCBState(commandBuffer);
187 auto src_image_state = GetImageState(srcImage);
188 auto dst_image_state = GetImageState(dstImage);
189
190 // Update bindings between images and cmd buffer
191 AddCommandBufferBindingImage(cb_node, src_image_state);
192 AddCommandBufferBindingImage(cb_node, dst_image_state);
193}
194
195void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
196 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
197 VkResult result) {
198 if (result != VK_SUCCESS) return;
199 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500200 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600201
202 // Get a set of requirements in the case the app does not
203 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
204
205 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
206}
207
208void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
209 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
210 VkResult result) {
211 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500212 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
213 bufferViewMap[*pView] = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600214}
215
216void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
217 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
218 VkResult result) {
219 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500220 auto image_state = GetImageShared(pCreateInfo->image);
221 imageViewMap[*pView] = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600222}
223
224void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
225 uint32_t regionCount, const VkBufferCopy *pRegions) {
226 auto cb_node = GetCBState(commandBuffer);
227 auto src_buffer_state = GetBufferState(srcBuffer);
228 auto dst_buffer_state = GetBufferState(dstBuffer);
229
230 // Update bindings between buffers and cmd buffer
231 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
232 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
233}
234
235void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
236 const VkAllocationCallbacks *pAllocator) {
237 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
238 if (!image_view_state) return;
239 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
240
241 // Any bound cmd buffers are now invalid
242 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500243 image_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600244 imageViewMap.erase(imageView);
245}
246
247void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
248 if (!buffer) return;
249 auto buffer_state = GetBufferState(buffer);
250 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
251
252 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
253 for (auto mem_binding : buffer_state->GetBoundMemory()) {
254 auto mem_info = GetDevMemState(mem_binding);
255 if (mem_info) {
256 RemoveBufferMemoryRange(buffer, mem_info);
257 }
258 }
259 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500260 buffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600261 bufferMap.erase(buffer_state->buffer);
262}
263
264void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
265 const VkAllocationCallbacks *pAllocator) {
266 if (!bufferView) return;
267 auto buffer_view_state = GetBufferViewState(bufferView);
268 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
269
270 // Any bound cmd buffers are now invalid
271 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500272 buffer_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600273 bufferViewMap.erase(bufferView);
274}
275
276void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
277 VkDeviceSize size, uint32_t data) {
278 auto cb_node = GetCBState(commandBuffer);
279 auto buffer_state = GetBufferState(dstBuffer);
280 // Update bindings between buffer and cmd buffer
281 AddCommandBufferBindingBuffer(cb_node, buffer_state);
282}
283
284void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
285 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
286 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
287 auto cb_node = GetCBState(commandBuffer);
288 auto src_image_state = GetImageState(srcImage);
289 auto dst_buffer_state = GetBufferState(dstBuffer);
290
291 // Update bindings between buffer/image and cmd buffer
292 AddCommandBufferBindingImage(cb_node, src_image_state);
293 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
294}
295
296void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
297 VkImageLayout dstImageLayout, uint32_t regionCount,
298 const VkBufferImageCopy *pRegions) {
299 auto cb_node = GetCBState(commandBuffer);
300 auto src_buffer_state = GetBufferState(srcBuffer);
301 auto dst_image_state = GetImageState(dstImage);
302
303 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
304 AddCommandBufferBindingImage(cb_node, dst_image_state);
305}
306
307// Get the image viewstate for a given framebuffer attachment
308IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(FRAMEBUFFER_STATE *framebuffer, uint32_t index) {
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -0700309 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -0600310 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
311 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
312 return GetImageViewState(image_view);
313}
314
315// Get the image viewstate for a given framebuffer attachment
316const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const FRAMEBUFFER_STATE *framebuffer,
317 uint32_t index) const {
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -0700318 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -0600319 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
320 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
321 return GetImageViewState(image_view);
322}
323
324void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
325 if (!(image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT)) return;
326 std::unordered_set<VkImage> *bound_images = nullptr;
327
locke-lunargb3584732019-10-28 20:18:36 -0600328 if (image_state->bind_swapchain) {
329 auto swapchain_state = GetSwapchainState(image_state->bind_swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -0600330 if (swapchain_state) {
locke-lunargb3584732019-10-28 20:18:36 -0600331 bound_images = &swapchain_state->images[image_state->bind_swapchain_imageIndex].bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600332 }
333 } else {
334 auto mem_state = GetDevMemState(image_state->binding.mem);
335 if (mem_state) {
336 bound_images = &mem_state->bound_images;
337 }
338 }
339
340 if (bound_images) {
341 for (const auto &handle : *bound_images) {
342 if (handle != image_state->image) {
343 auto is = GetImageState(handle);
344 if (is && is->IsCompatibleAliasing(image_state)) {
345 auto inserted = is->aliasing_images.emplace(image_state->image);
346 if (inserted.second) {
347 image_state->aliasing_images.emplace(handle);
348 }
349 }
350 }
351 }
352 }
353}
354
355void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
356 for (const auto &image : image_state->aliasing_images) {
357 auto is = GetImageState(image);
358 if (is) {
359 is->aliasing_images.erase(image_state->image);
360 }
361 }
362 image_state->aliasing_images.clear();
363}
364
365void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
366 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
367 // reference. It doesn't need two ways clear.
368 for (const auto &handle : bound_images) {
369 auto is = GetImageState(handle);
370 if (is) {
371 is->aliasing_images.clear();
372 }
373 }
374}
375
Jeff Bolz310775c2019-10-09 00:46:33 -0500376const EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) const {
377 auto it = eventMap.find(event);
378 if (it == eventMap.end()) {
379 return nullptr;
380 }
381 return &it->second;
382}
383
locke-lunargd556cc32019-09-17 01:21:23 -0600384EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
385 auto it = eventMap.find(event);
386 if (it == eventMap.end()) {
387 return nullptr;
388 }
389 return &it->second;
390}
391
392const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
393 auto it = queueMap.find(queue);
394 if (it == queueMap.cend()) {
395 return nullptr;
396 }
397 return &it->second;
398}
399
400QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
401 auto it = queueMap.find(queue);
402 if (it == queueMap.end()) {
403 return nullptr;
404 }
405 return &it->second;
406}
407
408const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
409 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
410 auto it = phys_dev_map->find(phys);
411 if (it == phys_dev_map->end()) {
412 return nullptr;
413 }
414 return &it->second;
415}
416
417PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
418 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
419 auto it = phys_dev_map->find(phys);
420 if (it == phys_dev_map->end()) {
421 return nullptr;
422 }
423 return &it->second;
424}
425
426PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
427const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
428
429// Return ptr to memory binding for given handle of specified type
430template <typename State, typename Result>
431static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
432 switch (typed_handle.type) {
433 case kVulkanObjectTypeImage:
434 return state->GetImageState(typed_handle.Cast<VkImage>());
435 case kVulkanObjectTypeBuffer:
436 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
437 case kVulkanObjectTypeAccelerationStructureNV:
438 return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
439 default:
440 break;
441 }
442 return nullptr;
443}
444
445const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
446 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
447}
448
449BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
450 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
451}
452
453void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
454 assert(object != NULL);
455
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500456 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo);
457 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600458
459 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
460 if (dedicated) {
461 mem_info->is_dedicated = true;
462 mem_info->dedicated_buffer = dedicated->buffer;
463 mem_info->dedicated_image = dedicated->image;
464 }
465 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
466 if (export_info) {
467 mem_info->is_export = true;
468 mem_info->export_handle_type_flags = export_info->handleTypes;
469 }
470}
471
472// Create binding link between given sampler and command buffer node
473void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
474 if (disabled.command_buffer_state) {
475 return;
476 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500477 AddCommandBufferBinding(sampler_state->cb_bindings,
478 VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600479}
480
481// Create binding link between given image node and command buffer node
482void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
483 if (disabled.command_buffer_state) {
484 return;
485 }
486 // Skip validation if this image was created through WSI
487 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
488 // First update cb binding for image
Jeff Bolzadbfa852019-10-04 13:53:30 -0500489 if (AddCommandBufferBinding(image_state->cb_bindings,
490 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600491 // Now update CB binding in MemObj mini CB list
492 for (auto mem_binding : image_state->GetBoundMemory()) {
493 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
494 if (pMemInfo) {
495 // Now update CBInfo's Mem reference list
Jeff Bolzadbfa852019-10-04 13:53:30 -0500496 AddCommandBufferBinding(pMemInfo->cb_bindings,
497 VulkanTypedHandle(mem_binding, kVulkanObjectTypeDeviceMemory, pMemInfo), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600498 }
499 }
500 }
501 }
502}
503
504// Create binding link between given image view node and its image with command buffer node
505void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
506 if (disabled.command_buffer_state) {
507 return;
508 }
509 // First add bindings for imageView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500510 if (AddCommandBufferBinding(view_state->cb_bindings,
511 VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600512 // Only need to continue if this is a new item
Jeff Bolzadbfa852019-10-04 13:53:30 -0500513 auto image_state = view_state->image_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600514 // Add bindings for image within imageView
515 if (image_state) {
516 AddCommandBufferBindingImage(cb_node, image_state);
517 }
518 }
519}
520
521// Create binding link between given buffer node and command buffer node
522void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
523 if (disabled.command_buffer_state) {
524 return;
525 }
526 // First update cb binding for buffer
Jeff Bolzadbfa852019-10-04 13:53:30 -0500527 if (AddCommandBufferBinding(buffer_state->cb_bindings,
528 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600529 // Now update CB binding in MemObj mini CB list
530 for (auto mem_binding : buffer_state->GetBoundMemory()) {
531 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
532 if (pMemInfo) {
533 // Now update CBInfo's Mem reference list
Jeff Bolzadbfa852019-10-04 13:53:30 -0500534 AddCommandBufferBinding(pMemInfo->cb_bindings,
535 VulkanTypedHandle(mem_binding, kVulkanObjectTypeDeviceMemory, pMemInfo), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600536 }
537 }
538 }
539}
540
541// Create binding link between given buffer view node and its buffer with command buffer node
542void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
543 if (disabled.command_buffer_state) {
544 return;
545 }
546 // First add bindings for bufferView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500547 if (AddCommandBufferBinding(view_state->cb_bindings,
548 VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
549 auto buffer_state = view_state->buffer_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600550 // Add bindings for buffer within bufferView
551 if (buffer_state) {
552 AddCommandBufferBindingBuffer(cb_node, buffer_state);
553 }
554 }
555}
556
557// Create binding link between given acceleration structure and command buffer node
558void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
559 ACCELERATION_STRUCTURE_STATE *as_state) {
560 if (disabled.command_buffer_state) {
561 return;
562 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500563 if (AddCommandBufferBinding(
564 as_state->cb_bindings,
565 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600566 // Now update CB binding in MemObj mini CB list
567 for (auto mem_binding : as_state->GetBoundMemory()) {
568 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
569 if (pMemInfo) {
570 // Now update CBInfo's Mem reference list
Jeff Bolzadbfa852019-10-04 13:53:30 -0500571 AddCommandBufferBinding(pMemInfo->cb_bindings,
572 VulkanTypedHandle(mem_binding, kVulkanObjectTypeDeviceMemory, pMemInfo), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600573 }
574 }
575 }
576}
577
locke-lunargd556cc32019-09-17 01:21:23 -0600578// Clear a single object binding from given memory object
579void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, VkDeviceMemory mem) {
580 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
581 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
582 if (mem_info) {
583 mem_info->obj_bindings.erase(typed_handle);
584 }
585}
586
587// ClearMemoryObjectBindings clears the binding of objects to memory
588// For the given object it pulls the memory bindings and makes sure that the bindings
589// no longer refer to the object being cleared. This occurs when objects are destroyed.
590void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
591 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
592 if (mem_binding) {
593 if (!mem_binding->sparse) {
594 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem);
595 } else { // Sparse, clear all bindings
596 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
597 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem);
598 }
599 }
600 }
601}
602
603// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
604// Corresponding valid usage checks are in ValidateSetMemBinding().
605void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
606 const VulkanTypedHandle &typed_handle) {
607 assert(mem_binding);
608 mem_binding->binding.mem = mem;
609 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
610 mem_binding->binding.offset = memory_offset;
611 mem_binding->binding.size = mem_binding->requirements.size;
612
613 if (mem != VK_NULL_HANDLE) {
614 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
615 if (mem_info) {
616 mem_info->obj_bindings.insert(typed_handle);
617 // For image objects, make sure default memory state is correctly set
618 // TODO : What's the best/correct way to handle this?
619 if (kVulkanObjectTypeImage == typed_handle.type) {
620 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
621 if (image_state) {
622 VkImageCreateInfo ici = image_state->createInfo;
623 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
624 // TODO:: More memory state transition stuff.
625 }
626 }
627 }
628 }
629 }
630}
631
632// For NULL mem case, clear any previous binding Else...
633// Make sure given object is in its object map
634// IF a previous binding existed, update binding
635// Add reference from objectInfo to memoryInfo
636// Add reference off of object's binding info
637// Return VK_TRUE if addition is successful, VK_FALSE otherwise
638bool ValidationStateTracker::SetSparseMemBinding(MEM_BINDING binding, const VulkanTypedHandle &typed_handle) {
639 bool skip = VK_FALSE;
640 // Handle NULL case separately, just clear previous binding & decrement reference
641 if (binding.mem == VK_NULL_HANDLE) {
642 // TODO : This should cause the range of the resource to be unbound according to spec
643 } else {
644 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
645 assert(mem_binding);
646 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
647 assert(mem_binding->sparse);
648 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(binding.mem);
649 if (mem_info) {
650 mem_info->obj_bindings.insert(typed_handle);
651 // Need to set mem binding for this object
652 mem_binding->sparse_bindings.insert(binding);
653 mem_binding->UpdateBoundMemorySet();
654 }
655 }
656 }
657 return skip;
658}
659
locke-lunargd556cc32019-09-17 01:21:23 -0600660void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, const VkPipelineBindPoint bind_point) {
661 auto &state = cb_state->lastBound[bind_point];
662 PIPELINE_STATE *pPipe = state.pipeline_state;
663 if (VK_NULL_HANDLE != state.pipeline_layout) {
664 for (const auto &set_binding_pair : pPipe->active_slots) {
665 uint32_t setIndex = set_binding_pair.first;
666 // Pull the set node
667 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
668 if (!descriptor_set->IsPushDescriptor()) {
669 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
670
671 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
672 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
673 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
674 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
675
676 if (reduced_map.IsManyDescriptors()) {
677 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
678 descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
679 }
680
681 // We can skip updating the state if "nothing" has changed since the last validation.
682 // See CoreChecks::ValidateCmdBufDrawState for more details.
Jeff Bolz56308942019-10-06 22:05:23 -0500683 bool descriptor_set_changed =
locke-lunargd556cc32019-09-17 01:21:23 -0600684 !reduced_map.IsManyDescriptors() ||
685 // Update if descriptor set (or contents) has changed
686 state.per_set[setIndex].validated_set != descriptor_set ||
687 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
688 (!disabled.image_layout_validation &&
Jeff Bolz56308942019-10-06 22:05:23 -0500689 state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
690 bool need_update = descriptor_set_changed ||
691 // Update if previous bindingReqMap doesn't include new bindingReqMap
692 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
693 state.per_set[setIndex].validated_set_binding_req_map.end(),
694 binding_req_map.begin(), binding_req_map.end());
locke-lunargd556cc32019-09-17 01:21:23 -0600695
696 if (need_update) {
697 // Bind this set and its active descriptor resources to the command buffer
Jeff Bolz56308942019-10-06 22:05:23 -0500698 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
699 // Only record the bindings that haven't already been recorded
700 BindingReqMap delta_reqs;
701 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
702 state.per_set[setIndex].validated_set_binding_req_map.begin(),
703 state.per_set[setIndex].validated_set_binding_req_map.end(),
704 std::inserter(delta_reqs, delta_reqs.begin()));
locke-gb3ce08f2019-09-30 12:30:56 -0600705 descriptor_set->UpdateDrawState(this, cb_state, pPipe, delta_reqs);
Jeff Bolz56308942019-10-06 22:05:23 -0500706 } else {
locke-gb3ce08f2019-09-30 12:30:56 -0600707 descriptor_set->UpdateDrawState(this, cb_state, pPipe, binding_req_map);
Jeff Bolz56308942019-10-06 22:05:23 -0500708 }
locke-lunargd556cc32019-09-17 01:21:23 -0600709
710 state.per_set[setIndex].validated_set = descriptor_set;
711 state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
712 state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
713 if (reduced_map.IsManyDescriptors()) {
714 // Check whether old == new before assigning, the equality check is much cheaper than
715 // freeing and reallocating the map.
716 if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
717 state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
718 }
719 } else {
720 state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
721 }
722 }
723 }
724 }
725 }
726 if (!pPipe->vertex_binding_descriptions_.empty()) {
727 cb_state->vertex_buffer_used = true;
728 }
729}
730
731// Remove set from setMap and delete the set
732void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500733 descriptor_set->destroyed = true;
Jeff Bolzadbfa852019-10-04 13:53:30 -0500734 const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500735 // Any bound cmd buffers are now invalid
Jeff Bolzadbfa852019-10-04 13:53:30 -0500736 InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500737
locke-lunargd556cc32019-09-17 01:21:23 -0600738 setMap.erase(descriptor_set->GetSet());
739}
740
741// Free all DS Pools including their Sets & related sub-structs
742// NOTE : Calls to this function should be wrapped in mutex
743void ValidationStateTracker::DeleteDescriptorSetPools() {
744 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
745 // Remove this pools' sets from setMap and delete them
746 for (auto ds : ii->second->sets) {
747 FreeDescriptorSet(ds);
748 }
749 ii->second->sets.clear();
750 ii = descriptorPoolMap.erase(ii);
751 }
752}
753
754// For given object struct return a ptr of BASE_NODE type for its wrapping struct
755BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500756 if (object_struct.node) {
757#ifdef _DEBUG
758 // assert that lookup would find the same object
759 VulkanTypedHandle other = object_struct;
760 other.node = nullptr;
761 assert(object_struct.node == GetStateStructPtrFromObject(other));
762#endif
763 return object_struct.node;
764 }
locke-lunargd556cc32019-09-17 01:21:23 -0600765 BASE_NODE *base_ptr = nullptr;
766 switch (object_struct.type) {
767 case kVulkanObjectTypeDescriptorSet: {
768 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
769 break;
770 }
771 case kVulkanObjectTypeSampler: {
772 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
773 break;
774 }
775 case kVulkanObjectTypeQueryPool: {
776 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
777 break;
778 }
779 case kVulkanObjectTypePipeline: {
780 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
781 break;
782 }
783 case kVulkanObjectTypeBuffer: {
784 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
785 break;
786 }
787 case kVulkanObjectTypeBufferView: {
788 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
789 break;
790 }
791 case kVulkanObjectTypeImage: {
792 base_ptr = GetImageState(object_struct.Cast<VkImage>());
793 break;
794 }
795 case kVulkanObjectTypeImageView: {
796 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
797 break;
798 }
799 case kVulkanObjectTypeEvent: {
800 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
801 break;
802 }
803 case kVulkanObjectTypeDescriptorPool: {
804 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
805 break;
806 }
807 case kVulkanObjectTypeCommandPool: {
808 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
809 break;
810 }
811 case kVulkanObjectTypeFramebuffer: {
812 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
813 break;
814 }
815 case kVulkanObjectTypeRenderPass: {
816 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
817 break;
818 }
819 case kVulkanObjectTypeDeviceMemory: {
820 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
821 break;
822 }
823 case kVulkanObjectTypeAccelerationStructureNV: {
824 base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
825 break;
826 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500827 case kVulkanObjectTypeUnknown:
828 // This can happen if an element of the object_bindings vector has been
829 // zeroed out, after an object is destroyed.
830 break;
locke-lunargd556cc32019-09-17 01:21:23 -0600831 default:
832 // TODO : Any other objects to be handled here?
833 assert(0);
834 break;
835 }
836 return base_ptr;
837}
838
839// Tie the VulkanTypedHandle to the cmd buffer which includes:
840// Add object_binding to cmd buffer
841// Add cb_binding to object
Jeff Bolzadbfa852019-10-04 13:53:30 -0500842bool ValidationStateTracker::AddCommandBufferBinding(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_bindings,
locke-lunargd556cc32019-09-17 01:21:23 -0600843 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
844 if (disabled.command_buffer_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500845 return false;
locke-lunargd556cc32019-09-17 01:21:23 -0600846 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500847 // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
848 // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
849 auto inserted = cb_bindings.insert({cb_node, -1});
850 if (inserted.second) {
851 cb_node->object_bindings.push_back(obj);
852 inserted.first->second = (int)cb_node->object_bindings.size() - 1;
853 return true;
854 }
855 return false;
locke-lunargd556cc32019-09-17 01:21:23 -0600856}
857
858// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
859void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
860 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
861 if (base_obj) base_obj->cb_bindings.erase(cb_node);
862}
863
864// Reset the command buffer state
865// Maintain the createInfo and set state to CB_NEW, but clear all other state
866void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
867 CMD_BUFFER_STATE *pCB = GetCBState(cb);
868 if (pCB) {
869 pCB->in_use.store(0);
870 // Reset CB state (note that createInfo is not cleared)
871 pCB->commandBuffer = cb;
872 memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
873 memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
874 pCB->hasDrawCmd = false;
875 pCB->hasTraceRaysCmd = false;
876 pCB->hasBuildAccelerationStructureCmd = false;
877 pCB->hasDispatchCmd = false;
878 pCB->state = CB_NEW;
Lionel Landwerlinc7420912019-05-23 00:33:42 +0100879 pCB->commandCount = 0;
locke-lunargd556cc32019-09-17 01:21:23 -0600880 pCB->submitCount = 0;
881 pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
882 pCB->status = 0;
883 pCB->static_status = 0;
884 pCB->viewportMask = 0;
885 pCB->scissorMask = 0;
886
887 for (auto &item : pCB->lastBound) {
888 item.second.reset();
889 }
890
891 memset(&pCB->activeRenderPassBeginInfo, 0, sizeof(pCB->activeRenderPassBeginInfo));
892 pCB->activeRenderPass = nullptr;
893 pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
894 pCB->activeSubpass = 0;
895 pCB->broken_bindings.clear();
896 pCB->waitedEvents.clear();
897 pCB->events.clear();
898 pCB->writeEventsBeforeWait.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600899 pCB->activeQueries.clear();
900 pCB->startedQueries.clear();
901 pCB->image_layout_map.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600902 pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
903 pCB->vertex_buffer_used = false;
904 pCB->primaryCommandBuffer = VK_NULL_HANDLE;
905 // If secondary, invalidate any primary command buffer that may call us.
906 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500907 InvalidateLinkedCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -0600908 }
909
910 // Remove reverse command buffer links.
911 for (auto pSubCB : pCB->linkedCommandBuffers) {
912 pSubCB->linkedCommandBuffers.erase(pCB);
913 }
914 pCB->linkedCommandBuffers.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600915 pCB->queue_submit_functions.clear();
916 pCB->cmd_execute_commands_functions.clear();
917 pCB->eventUpdates.clear();
918 pCB->queryUpdates.clear();
919
920 // Remove object bindings
921 for (const auto &obj : pCB->object_bindings) {
922 RemoveCommandBufferBinding(obj, pCB);
923 }
924 pCB->object_bindings.clear();
925 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
926 for (auto framebuffer : pCB->framebuffers) {
927 auto fb_state = GetFramebufferState(framebuffer);
928 if (fb_state) fb_state->cb_bindings.erase(pCB);
929 }
930 pCB->framebuffers.clear();
931 pCB->activeFramebuffer = VK_NULL_HANDLE;
932 memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
933
934 pCB->qfo_transfer_image_barriers.Reset();
935 pCB->qfo_transfer_buffer_barriers.Reset();
936
937 // Clean up the label data
938 ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
939 pCB->debug_label.Reset();
locke-gb3ce08f2019-09-30 12:30:56 -0600940 pCB->validate_descriptorsets_in_queuesubmit.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600941 }
942 if (command_buffer_reset_callback) {
943 (*command_buffer_reset_callback)(cb);
944 }
945}
946
947void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
948 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
949 VkResult result) {
950 if (VK_SUCCESS != result) return;
951
952 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
953 if (nullptr == enabled_features_found) {
954 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
955 if (features2) {
956 enabled_features_found = &(features2->features);
957 }
958 }
959
960 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
961 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
962 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
963
964 if (nullptr == enabled_features_found) {
965 state_tracker->enabled_features.core = {};
966 } else {
967 state_tracker->enabled_features.core = *enabled_features_found;
968 }
969
970 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
971 // previously set them through an explicit API call.
972 uint32_t count;
973 auto pd_state = GetPhysicalDeviceState(gpu);
974 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
975 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
976 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
977 // Save local link to this device's physical device state
978 state_tracker->physical_device_state = pd_state;
979
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700980 const auto *vulkan_12_features = lvl_find_in_chain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
981 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700982 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700983 } else {
984 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan12Features
985
986 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
987 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700988 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
989 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
990 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
991 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700992 }
993
994 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
995 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -0700996 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
997 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700998 }
999
1000 const auto *descriptor_indexing_features =
1001 lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
1002 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001003 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
1004 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
1005 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
1006 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
1007 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
1008 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
1009 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
1010 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
1011 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
1012 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
1013 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
1014 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
1015 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
1016 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
1017 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
1018 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
1019 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
1020 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
1021 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
1022 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
1023 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
1024 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
1025 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
1026 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
1027 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
1028 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
1029 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
1030 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
1031 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
1032 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
1033 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
1034 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
1035 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
1036 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
1037 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
1038 descriptor_indexing_features->descriptorBindingPartiallyBound;
1039 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
1040 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
1041 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001042 }
1043
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001044 const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001045 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001046 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001047 }
1048
1049 const auto *imageless_framebuffer_features =
1050 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
1051 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001052 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001053 }
1054
1055 const auto *uniform_buffer_standard_layout_features =
1056 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
1057 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001058 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
1059 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001060 }
1061
1062 const auto *subgroup_extended_types_features =
1063 lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
1064 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001065 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
1066 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001067 }
1068
1069 const auto *separate_depth_stencil_layouts_features =
1070 lvl_find_in_chain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
1071 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001072 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
1073 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001074 }
1075
1076 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
1077 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001078 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001079 }
1080
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001081 const auto *timeline_semaphore_features = lvl_find_in_chain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001082 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001083 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001084 }
1085
1086 const auto *buffer_device_address = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
1087 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001088 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
1089 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1090 buffer_device_address->bufferDeviceAddressCaptureReplay;
1091 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1092 buffer_device_address->bufferDeviceAddressMultiDevice;
1093 }
1094 }
1095
1096 const auto *vulkan_11_features = lvl_find_in_chain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
1097 if (vulkan_11_features) {
1098 state_tracker->enabled_features.core11 = *vulkan_11_features;
1099 } else {
1100 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
1101
1102 const auto *sixteen_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
1103 if (sixteen_bit_storage_features) {
1104 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
1105 sixteen_bit_storage_features->storageBuffer16BitAccess;
1106 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
1107 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
1108 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
1109 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
1110 }
1111
1112 const auto *multiview_features = lvl_find_in_chain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
1113 if (multiview_features) {
1114 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1115 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1116 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1117 }
1118
1119 const auto *variable_pointers_features = lvl_find_in_chain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
1120 if (variable_pointers_features) {
1121 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1122 variable_pointers_features->variablePointersStorageBuffer;
1123 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1124 }
1125
1126 const auto *protected_memory_features = lvl_find_in_chain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
1127 if (protected_memory_features) {
1128 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1129 }
1130
1131 const auto *ycbcr_conversion_features =
1132 lvl_find_in_chain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
1133 if (ycbcr_conversion_features) {
1134 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1135 }
1136
1137 const auto *shader_draw_parameters_features =
1138 lvl_find_in_chain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
1139 if (shader_draw_parameters_features) {
1140 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001141 }
1142 }
1143
locke-lunargd556cc32019-09-17 01:21:23 -06001144 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
1145 state_tracker->physical_device_count =
1146 device_group_ci && device_group_ci->physicalDeviceCount > 0 ? device_group_ci->physicalDeviceCount : 1;
1147
locke-lunargd556cc32019-09-17 01:21:23 -06001148 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
1149 if (exclusive_scissor_features) {
1150 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1151 }
1152
1153 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1154 if (shading_rate_image_features) {
1155 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1156 }
1157
1158 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1159 if (mesh_shader_features) {
1160 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1161 }
1162
1163 const auto *inline_uniform_block_features =
1164 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1165 if (inline_uniform_block_features) {
1166 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1167 }
1168
1169 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1170 if (transform_feedback_features) {
1171 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1172 }
1173
locke-lunargd556cc32019-09-17 01:21:23 -06001174 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1175 if (vtx_attrib_div_features) {
1176 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1177 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001178
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001179 const auto *buffer_device_address_ext = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
1180 if (buffer_device_address_ext) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001181 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address_ext->bufferDeviceAddress;
1182 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1183 buffer_device_address_ext->bufferDeviceAddressCaptureReplay;
1184 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1185 buffer_device_address_ext->bufferDeviceAddressMultiDevice;
locke-lunargd556cc32019-09-17 01:21:23 -06001186 }
1187
1188 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1189 if (cooperative_matrix_features) {
1190 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1191 }
1192
locke-lunargd556cc32019-09-17 01:21:23 -06001193 const auto *compute_shader_derivatives_features =
1194 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1195 if (compute_shader_derivatives_features) {
1196 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1197 }
1198
1199 const auto *fragment_shader_barycentric_features =
1200 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1201 if (fragment_shader_barycentric_features) {
1202 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1203 }
1204
1205 const auto *shader_image_footprint_features =
1206 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1207 if (shader_image_footprint_features) {
1208 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1209 }
1210
1211 const auto *fragment_shader_interlock_features =
1212 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1213 if (fragment_shader_interlock_features) {
1214 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1215 }
1216
1217 const auto *demote_to_helper_invocation_features =
1218 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1219 if (demote_to_helper_invocation_features) {
1220 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1221 }
1222
1223 const auto *texel_buffer_alignment_features =
1224 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1225 if (texel_buffer_alignment_features) {
1226 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1227 }
1228
locke-lunargd556cc32019-09-17 01:21:23 -06001229 const auto *pipeline_exe_props_features =
1230 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1231 if (pipeline_exe_props_features) {
1232 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1233 }
1234
Jeff Bolz82f854d2019-09-17 14:56:47 -05001235 const auto *dedicated_allocation_image_aliasing_features =
1236 lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
1237 if (dedicated_allocation_image_aliasing_features) {
1238 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1239 *dedicated_allocation_image_aliasing_features;
1240 }
1241
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001242 const auto *performance_query_features = lvl_find_in_chain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
1243 if (performance_query_features) {
1244 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1245 }
1246
Tobias Hector782bcde2019-11-28 16:19:42 +00001247 const auto *device_coherent_memory_features = lvl_find_in_chain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
1248 if (device_coherent_memory_features) {
1249 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1250 }
1251
sfricke-samsungcead0802020-01-30 22:20:10 -08001252 const auto *ycbcr_image_array_features = lvl_find_in_chain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
1253 if (ycbcr_image_array_features) {
1254 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1255 }
1256
locke-lunargd556cc32019-09-17 01:21:23 -06001257 // Store physical device properties and physical device mem limits into CoreChecks structs
1258 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1259 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001260 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1261 &state_tracker->phys_dev_props_core11);
1262 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1263 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001264
1265 const auto &dev_ext = state_tracker->device_extensions;
1266 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1267
1268 if (dev_ext.vk_khr_push_descriptor) {
1269 // Get the needed push_descriptor limits
1270 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1271 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1272 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1273 }
1274
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001275 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
1276 VkPhysicalDeviceDescriptorIndexingPropertiesEXT descriptor_indexing_prop;
1277 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1278 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1279 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1280 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1281 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1282 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1283 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1284 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1285 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1286 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1287 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1288 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1289 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1290 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1291 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1292 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1293 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1294 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1295 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1296 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1297 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1298 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1299 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1300 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1301 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1302 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1303 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
1304 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1305 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
1306 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
1307 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
1308 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
1309 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
1310 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
1311 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1312 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1313 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
1314 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
1315 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1316 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
1317 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
1318 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
1319 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
1320 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
1321 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
1322 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
1323 }
1324
locke-lunargd556cc32019-09-17 01:21:23 -06001325 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1326 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1327 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1328 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001329
1330 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
1331 VkPhysicalDeviceDepthStencilResolvePropertiesKHR depth_stencil_resolve_props;
1332 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
1333 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
1334 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
1335 depth_stencil_resolve_props.supportedStencilResolveModes;
1336 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
1337 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
1338 }
1339
locke-lunargd556cc32019-09-17 01:21:23 -06001340 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
1341 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_props);
1342 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1343 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001344 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001345
1346 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
1347 VkPhysicalDeviceTimelineSemaphorePropertiesKHR timeline_semaphore_props;
1348 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
1349 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
1350 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
1351 }
1352
1353 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
1354 VkPhysicalDeviceFloatControlsPropertiesKHR float_controls_props;
1355 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
1356 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
1357 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
1358 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
1359 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
1360 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
1361 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
1362 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
1363 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
1364 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
1365 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
1366 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
1367 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
1368 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
1369 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
1370 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
1371 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
1372 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
1373 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
1374 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
1375 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
1376 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07001377
locke-lunargd556cc32019-09-17 01:21:23 -06001378 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1379 // Get the needed cooperative_matrix properties
1380 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1381 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
1382 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1383 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1384
1385 uint32_t numCooperativeMatrixProperties = 0;
1386 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
1387 state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
1388 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
1389
1390 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
1391 state_tracker->cooperative_matrix_properties.data());
1392 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001393 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06001394 // Get the needed subgroup limits
1395 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
1396 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
1397 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1398
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001399 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
1400 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
1401 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
1402 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06001403 }
1404
1405 // Store queue family data
1406 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1407 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
1408 state_tracker->queue_family_index_map.insert(
1409 std::make_pair(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex, pCreateInfo->pQueueCreateInfos[i].queueCount));
1410 }
1411 }
1412}
1413
1414void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1415 if (!device) return;
1416
locke-lunargd556cc32019-09-17 01:21:23 -06001417 // Reset all command buffers before destroying them, to unlink object_bindings.
1418 for (auto &commandBuffer : commandBufferMap) {
1419 ResetCommandBufferState(commandBuffer.first);
1420 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001421 pipelineMap.clear();
1422 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001423 commandBufferMap.clear();
1424
1425 // This will also delete all sets in the pool & remove them from setMap
1426 DeleteDescriptorSetPools();
1427 // All sets should be removed
1428 assert(setMap.empty());
1429 descriptorSetLayoutMap.clear();
1430 imageViewMap.clear();
1431 imageMap.clear();
1432 bufferViewMap.clear();
1433 bufferMap.clear();
1434 // Queues persist until device is destroyed
1435 queueMap.clear();
1436}
1437
1438// Loop through bound objects and increment their in_use counts.
1439void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
1440 for (auto obj : cb_node->object_bindings) {
1441 auto base_obj = GetStateStructPtrFromObject(obj);
1442 if (base_obj) {
1443 base_obj->in_use.fetch_add(1);
1444 }
1445 }
1446}
1447
1448// Track which resources are in-flight by atomically incrementing their "in_use" count
1449void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1450 cb_node->submitCount++;
1451 cb_node->in_use.fetch_add(1);
1452
1453 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
1454 IncrementBoundObjects(cb_node);
1455 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1456 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1457 // should then be flagged prior to calling this function
1458 for (auto event : cb_node->writeEventsBeforeWait) {
1459 auto event_state = GetEventState(event);
1460 if (event_state) event_state->write_in_use++;
1461 }
1462}
1463
1464// Decrement in-use count for objects bound to command buffer
1465void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
1466 BASE_NODE *base_obj = nullptr;
1467 for (auto obj : cb_node->object_bindings) {
1468 base_obj = GetStateStructPtrFromObject(obj);
1469 if (base_obj) {
1470 base_obj->in_use.fetch_sub(1);
1471 }
1472 }
1473}
1474
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001475void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
locke-lunargd556cc32019-09-17 01:21:23 -06001476 std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
1477
1478 // Roll this queue forward, one submission at a time.
1479 while (pQueue->seq < seq) {
1480 auto &submission = pQueue->submissions.front();
1481
1482 for (auto &wait : submission.waitSemaphores) {
1483 auto pSemaphore = GetSemaphoreState(wait.semaphore);
1484 if (pSemaphore) {
1485 pSemaphore->in_use.fetch_sub(1);
1486 }
1487 auto &lastSeq = otherQueueSeqs[wait.queue];
1488 lastSeq = std::max(lastSeq, wait.seq);
1489 }
1490
1491 for (auto &semaphore : submission.signalSemaphores) {
1492 auto pSemaphore = GetSemaphoreState(semaphore);
1493 if (pSemaphore) {
1494 pSemaphore->in_use.fetch_sub(1);
1495 }
1496 }
1497
1498 for (auto &semaphore : submission.externalSemaphores) {
1499 auto pSemaphore = GetSemaphoreState(semaphore);
1500 if (pSemaphore) {
1501 pSemaphore->in_use.fetch_sub(1);
1502 }
1503 }
1504
1505 for (auto cb : submission.cbs) {
1506 auto cb_node = GetCBState(cb);
1507 if (!cb_node) {
1508 continue;
1509 }
1510 // First perform decrement on general case bound objects
1511 DecrementBoundResources(cb_node);
1512 for (auto event : cb_node->writeEventsBeforeWait) {
1513 auto eventNode = eventMap.find(event);
1514 if (eventNode != eventMap.end()) {
1515 eventNode->second.write_in_use--;
1516 }
1517 }
Jeff Bolz310775c2019-10-09 00:46:33 -05001518 QueryMap localQueryToStateMap;
1519 for (auto &function : cb_node->queryUpdates) {
1520 function(nullptr, /*do_validate*/ false, &localQueryToStateMap);
1521 }
1522
1523 for (auto queryStatePair : localQueryToStateMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001524 if (queryStatePair.second == QUERYSTATE_ENDED) {
1525 queryToStateMap[queryStatePair.first] = QUERYSTATE_AVAILABLE;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001526
1527 const QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryStatePair.first.pool);
1528 if (qp_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR)
1529 queryPassToStateMap[QueryObjectPass(queryStatePair.first, submission.perf_submit_pass)] =
1530 QUERYSTATE_AVAILABLE;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001531 }
locke-lunargd556cc32019-09-17 01:21:23 -06001532 }
locke-lunargd556cc32019-09-17 01:21:23 -06001533 cb_node->in_use.fetch_sub(1);
1534 }
1535
1536 auto pFence = GetFenceState(submission.fence);
1537 if (pFence && pFence->scope == kSyncScopeInternal) {
1538 pFence->state = FENCE_RETIRED;
1539 }
1540
1541 pQueue->submissions.pop_front();
1542 pQueue->seq++;
1543 }
1544
1545 // Roll other queues forward to the highest seq we saw a wait for
1546 for (auto qs : otherQueueSeqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001547 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001548 }
1549}
1550
1551// Submit a fence to a queue, delimiting previous fences and previous untracked
1552// work by it.
1553static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1554 pFence->state = FENCE_INFLIGHT;
1555 pFence->signaler.first = pQueue->queue;
1556 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1557}
1558
1559void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1560 VkFence fence, VkResult result) {
1561 uint64_t early_retire_seq = 0;
1562 auto pQueue = GetQueueState(queue);
1563 auto pFence = GetFenceState(fence);
1564
1565 if (pFence) {
1566 if (pFence->scope == kSyncScopeInternal) {
1567 // Mark fence in use
1568 SubmitFence(pQueue, pFence, std::max(1u, submitCount));
1569 if (!submitCount) {
1570 // If no submissions, but just dropping a fence on the end of the queue,
1571 // record an empty submission with just the fence, so we can determine
1572 // its completion.
1573 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001574 std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001575 }
1576 } else {
1577 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1578 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1579 }
1580 }
1581
1582 // Now process each individual submit
1583 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1584 std::vector<VkCommandBuffer> cbs;
1585 const VkSubmitInfo *submit = &pSubmits[submit_idx];
1586 vector<SEMAPHORE_WAIT> semaphore_waits;
1587 vector<VkSemaphore> semaphore_signals;
1588 vector<VkSemaphore> semaphore_externals;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001589 auto *timeline_semaphore_submit = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001590 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
1591 VkSemaphore semaphore = submit->pWaitSemaphores[i];
1592 auto pSemaphore = GetSemaphoreState(semaphore);
1593 if (pSemaphore) {
1594 if (pSemaphore->scope == kSyncScopeInternal) {
1595 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1596 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1597 pSemaphore->in_use.fetch_add(1);
1598 }
1599 pSemaphore->signaler.first = VK_NULL_HANDLE;
1600 pSemaphore->signaled = false;
1601 } else {
1602 semaphore_externals.push_back(semaphore);
1603 pSemaphore->in_use.fetch_add(1);
1604 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1605 pSemaphore->scope = kSyncScopeInternal;
1606 }
1607 }
1608 }
1609 }
1610 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
1611 VkSemaphore semaphore = submit->pSignalSemaphores[i];
1612 auto pSemaphore = GetSemaphoreState(semaphore);
1613 if (pSemaphore) {
1614 if (pSemaphore->scope == kSyncScopeInternal) {
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001615 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
1616 pSemaphore->signaler.first = queue;
1617 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1618 pSemaphore->signaled = true;
1619 } else {
1620 pSemaphore->payload = timeline_semaphore_submit->pSignalSemaphoreValues[i];
1621 }
locke-lunargd556cc32019-09-17 01:21:23 -06001622 pSemaphore->in_use.fetch_add(1);
1623 semaphore_signals.push_back(semaphore);
1624 } else {
1625 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1626 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1627 }
1628 }
1629 }
1630 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
1631 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
1632 if (cb_node) {
1633 cbs.push_back(submit->pCommandBuffers[i]);
1634 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
1635 cbs.push_back(secondaryCmdBuffer->commandBuffer);
1636 IncrementResources(secondaryCmdBuffer);
1637 }
1638 IncrementResources(cb_node);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001639
1640 QueryMap localQueryToStateMap;
1641 for (auto &function : cb_node->queryUpdates) {
1642 function(nullptr, /*do_validate*/ false, &localQueryToStateMap);
1643 }
1644
1645 for (auto queryStatePair : localQueryToStateMap) {
1646 queryToStateMap[queryStatePair.first] = queryStatePair.second;
1647 }
1648
1649 EventToStageMap localEventToStageMap;
1650 for (auto &function : cb_node->eventUpdates) {
1651 function(nullptr, /*do_validate*/ false, &localEventToStageMap);
1652 }
1653
1654 for (auto eventStagePair : localEventToStageMap) {
1655 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
1656 }
locke-lunargd556cc32019-09-17 01:21:23 -06001657 }
1658 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001659
1660 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
1661
locke-lunargd556cc32019-09-17 01:21:23 -06001662 pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001663 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE,
1664 perf_submit ? perf_submit->counterPassIndex : 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001665 }
1666
1667 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001668 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001669 }
1670}
1671
1672void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1673 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1674 VkResult result) {
1675 if (VK_SUCCESS == result) {
1676 AddMemObjInfo(device, *pMemory, pAllocateInfo);
1677 }
1678 return;
1679}
1680
1681void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1682 if (!mem) return;
1683 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
1684 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
1685
1686 // Clear mem binding for any bound objects
1687 for (const auto &obj : mem_info->obj_bindings) {
1688 BINDABLE *bindable_state = nullptr;
1689 switch (obj.type) {
1690 case kVulkanObjectTypeImage:
1691 bindable_state = GetImageState(obj.Cast<VkImage>());
1692 break;
1693 case kVulkanObjectTypeBuffer:
1694 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
1695 break;
1696 case kVulkanObjectTypeAccelerationStructureNV:
1697 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
1698 break;
1699
1700 default:
1701 // Should only have acceleration structure, buffer, or image objects bound to memory
1702 assert(0);
1703 }
1704
1705 if (bindable_state) {
1706 bindable_state->binding.mem = MEMORY_UNBOUND;
1707 bindable_state->UpdateBoundMemorySet();
1708 }
1709 }
1710 // Any bound cmd buffers are now invalid
1711 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
1712 RemoveAliasingImages(mem_info->bound_images);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001713 mem_info->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001714 memObjMap.erase(mem);
1715}
1716
1717void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
1718 VkFence fence, VkResult result) {
1719 if (result != VK_SUCCESS) return;
1720 uint64_t early_retire_seq = 0;
1721 auto pFence = GetFenceState(fence);
1722 auto pQueue = GetQueueState(queue);
1723
1724 if (pFence) {
1725 if (pFence->scope == kSyncScopeInternal) {
1726 SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
1727 if (!bindInfoCount) {
1728 // No work to do, just dropping a fence in the queue by itself.
1729 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001730 std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001731 }
1732 } else {
1733 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1734 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1735 }
1736 }
1737
1738 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
1739 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
1740 // Track objects tied to memory
1741 for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
1742 for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
1743 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
1744 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
1745 VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
1746 }
1747 }
1748 for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
1749 for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
1750 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
1751 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
1752 VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
1753 }
1754 }
1755 for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
1756 for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
1757 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
1758 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
1759 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
1760 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, size},
1761 VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
1762 }
1763 }
1764
1765 std::vector<SEMAPHORE_WAIT> semaphore_waits;
1766 std::vector<VkSemaphore> semaphore_signals;
1767 std::vector<VkSemaphore> semaphore_externals;
1768 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
1769 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
1770 auto pSemaphore = GetSemaphoreState(semaphore);
1771 if (pSemaphore) {
1772 if (pSemaphore->scope == kSyncScopeInternal) {
1773 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1774 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1775 pSemaphore->in_use.fetch_add(1);
1776 }
1777 pSemaphore->signaler.first = VK_NULL_HANDLE;
1778 pSemaphore->signaled = false;
1779 } else {
1780 semaphore_externals.push_back(semaphore);
1781 pSemaphore->in_use.fetch_add(1);
1782 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1783 pSemaphore->scope = kSyncScopeInternal;
1784 }
1785 }
1786 }
1787 }
1788 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
1789 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
1790 auto pSemaphore = GetSemaphoreState(semaphore);
1791 if (pSemaphore) {
1792 if (pSemaphore->scope == kSyncScopeInternal) {
1793 pSemaphore->signaler.first = queue;
1794 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1795 pSemaphore->signaled = true;
1796 pSemaphore->in_use.fetch_add(1);
1797 semaphore_signals.push_back(semaphore);
1798 } else {
1799 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1800 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1801 }
1802 }
1803 }
1804
1805 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001806 bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001807 }
1808
1809 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001810 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001811 }
1812}
1813
1814void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
1815 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
1816 VkResult result) {
1817 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001818 auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06001819 semaphore_state->signaler.first = VK_NULL_HANDLE;
1820 semaphore_state->signaler.second = 0;
1821 semaphore_state->signaled = false;
1822 semaphore_state->scope = kSyncScopeInternal;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001823 semaphore_state->type = VK_SEMAPHORE_TYPE_BINARY_KHR;
1824 semaphore_state->payload = 0;
1825 auto semaphore_type_create_info = lvl_find_in_chain<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo->pNext);
1826 if (semaphore_type_create_info) {
1827 semaphore_state->type = semaphore_type_create_info->semaphoreType;
1828 semaphore_state->payload = semaphore_type_create_info->initialValue;
1829 }
locke-lunargd556cc32019-09-17 01:21:23 -06001830 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
1831}
1832
1833void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
1834 VkSemaphoreImportFlagsKHR flags) {
1835 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
1836 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
1837 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
1838 sema_node->scope == kSyncScopeInternal) {
1839 sema_node->scope = kSyncScopeExternalTemporary;
1840 } else {
1841 sema_node->scope = kSyncScopeExternalPermanent;
1842 }
1843 }
1844}
1845
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001846void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR *pSignalInfo,
1847 VkResult result) {
1848 auto *pSemaphore = GetSemaphoreState(pSignalInfo->semaphore);
1849 pSemaphore->payload = pSignalInfo->value;
1850}
1851
locke-lunargd556cc32019-09-17 01:21:23 -06001852void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
1853 auto mem_info = GetDevMemState(mem);
1854 if (mem_info) {
1855 mem_info->mapped_range.offset = offset;
1856 mem_info->mapped_range.size = size;
1857 mem_info->p_driver_data = *ppData;
1858 }
1859}
1860
1861void ValidationStateTracker::RetireFence(VkFence fence) {
1862 auto pFence = GetFenceState(fence);
1863 if (pFence && pFence->scope == kSyncScopeInternal) {
1864 if (pFence->signaler.first != VK_NULL_HANDLE) {
1865 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001866 RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001867 } else {
1868 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
1869 // the fence as retired.
1870 pFence->state = FENCE_RETIRED;
1871 }
1872 }
1873}
1874
1875void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
1876 VkBool32 waitAll, uint64_t timeout, VkResult result) {
1877 if (VK_SUCCESS != result) return;
1878
1879 // When we know that all fences are complete we can clean/remove their CBs
1880 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
1881 for (uint32_t i = 0; i < fenceCount; i++) {
1882 RetireFence(pFences[i]);
1883 }
1884 }
1885 // NOTE : Alternate case not handled here is when some fences have completed. In
1886 // this case for app to guarantee which fences completed it will have to call
1887 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
1888}
1889
1890void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
1891 if (VK_SUCCESS != result) return;
1892 RetireFence(fence);
1893}
1894
1895void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
1896 // Add queue to tracking set only if it is new
1897 auto queue_is_new = queues.emplace(queue);
1898 if (queue_is_new.second == true) {
1899 QUEUE_STATE *queue_state = &queueMap[queue];
1900 queue_state->queue = queue;
1901 queue_state->queueFamilyIndex = queue_family_index;
1902 queue_state->seq = 0;
1903 }
1904}
1905
1906void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
1907 VkQueue *pQueue) {
1908 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
1909}
1910
1911void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
1912 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
1913}
1914
1915void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
1916 if (VK_SUCCESS != result) return;
1917 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001918 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06001919}
1920
1921void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
1922 if (VK_SUCCESS != result) return;
1923 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001924 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06001925 }
1926}
1927
1928void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
1929 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001930 auto fence_state = GetFenceState(fence);
1931 fence_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001932 fenceMap.erase(fence);
1933}
1934
1935void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
1936 const VkAllocationCallbacks *pAllocator) {
1937 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001938 auto semaphore_state = GetSemaphoreState(semaphore);
1939 semaphore_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001940 semaphoreMap.erase(semaphore);
1941}
1942
1943void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
1944 if (!event) return;
1945 EVENT_STATE *event_state = GetEventState(event);
1946 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
1947 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
1948 eventMap.erase(event);
1949}
1950
1951void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
1952 const VkAllocationCallbacks *pAllocator) {
1953 if (!queryPool) return;
1954 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
1955 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
1956 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001957 qp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001958 queryPoolMap.erase(queryPool);
1959}
1960
1961// Object with given handle is being bound to memory w/ given mem_info struct.
1962// Track the newly bound memory range with given memoryOffset
1963// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
1964// and non-linear range incorrectly overlap.
1965// Return true if an error is flagged and the user callback returns "true", otherwise false
1966// is_image indicates an image object, otherwise handle is for a buffer
1967// is_linear indicates a buffer or linear image
1968void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
1969 VkDeviceSize memoryOffset, VkMemoryRequirements memRequirements, bool is_linear) {
1970 if (typed_handle.type == kVulkanObjectTypeImage) {
1971 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
1972 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
1973 mem_info->bound_buffers.insert(typed_handle.handle);
1974 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
1975 mem_info->bound_acceleration_structures.insert(typed_handle.handle);
1976 } else {
1977 // Unsupported object type
1978 assert(false);
1979 }
1980}
1981
1982void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
1983 VkMemoryRequirements mem_reqs, bool is_linear) {
1984 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset, mem_reqs, is_linear);
1985}
1986
1987void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
1988 const VkMemoryRequirements &mem_reqs) {
1989 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset, mem_reqs, true);
1990}
1991
1992void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
1993 VkDeviceSize mem_offset, const VkMemoryRequirements &mem_reqs) {
1994 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset, mem_reqs, true);
1995}
1996
1997// This function will remove the handle-to-index mapping from the appropriate map.
1998static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
1999 if (typed_handle.type == kVulkanObjectTypeImage) {
2000 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
2001 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
2002 mem_info->bound_buffers.erase(typed_handle.handle);
2003 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
2004 mem_info->bound_acceleration_structures.erase(typed_handle.handle);
2005 } else {
2006 // Unsupported object type
2007 assert(false);
2008 }
2009}
2010
2011void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
2012 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
2013}
2014
2015void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
2016 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
2017}
2018
2019void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
2020 RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
2021}
2022
2023void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2024 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2025 if (buffer_state) {
2026 // Track bound memory range information
2027 auto mem_info = GetDevMemState(mem);
2028 if (mem_info) {
2029 InsertBufferMemoryRange(buffer, mem_info, memoryOffset, buffer_state->requirements);
2030 }
2031 // Track objects tied to memory
2032 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
2033 }
2034}
2035
2036void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2037 VkDeviceSize memoryOffset, VkResult result) {
2038 if (VK_SUCCESS != result) return;
2039 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2040}
2041
2042void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
2043 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2044 for (uint32_t i = 0; i < bindInfoCount; i++) {
2045 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2046 }
2047}
2048
2049void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
2050 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2051 for (uint32_t i = 0; i < bindInfoCount; i++) {
2052 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2053 }
2054}
2055
2056void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) {
2057 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2058 if (buffer_state) {
2059 buffer_state->requirements = *pMemoryRequirements;
2060 buffer_state->memory_requirements_checked = true;
2061 }
2062}
2063
2064void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2065 VkMemoryRequirements *pMemoryRequirements) {
2066 RecordGetBufferMemoryRequirementsState(buffer, pMemoryRequirements);
2067}
2068
2069void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
2070 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2071 VkMemoryRequirements2KHR *pMemoryRequirements) {
2072 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
2073}
2074
2075void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
2076 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2077 VkMemoryRequirements2KHR *pMemoryRequirements) {
2078 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
2079}
2080
2081void ValidationStateTracker::RecordGetImageMemoryRequiementsState(VkImage image, VkMemoryRequirements *pMemoryRequirements) {
2082 IMAGE_STATE *image_state = GetImageState(image);
2083 if (image_state) {
2084 image_state->requirements = *pMemoryRequirements;
2085 image_state->memory_requirements_checked = true;
2086 }
2087}
2088
2089void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2090 VkMemoryRequirements *pMemoryRequirements) {
2091 RecordGetImageMemoryRequiementsState(image, pMemoryRequirements);
2092}
2093
2094void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2095 VkMemoryRequirements2 *pMemoryRequirements) {
2096 RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
2097}
2098
2099void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2100 const VkImageMemoryRequirementsInfo2 *pInfo,
2101 VkMemoryRequirements2 *pMemoryRequirements) {
2102 RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
2103}
2104
2105static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2106 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2107 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2108 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2109 image_state->sparse_metadata_required = true;
2110 }
2111}
2112
2113void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2114 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2115 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2116 auto image_state = GetImageState(image);
2117 image_state->get_sparse_reqs_called = true;
2118 if (!pSparseMemoryRequirements) return;
2119 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2120 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2121 }
2122}
2123
2124void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
2125 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2126 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2127 auto image_state = GetImageState(pInfo->image);
2128 image_state->get_sparse_reqs_called = true;
2129 if (!pSparseMemoryRequirements) return;
2130 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2131 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2132 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2133 }
2134}
2135
2136void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
2137 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2138 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2139 auto image_state = GetImageState(pInfo->image);
2140 image_state->get_sparse_reqs_called = true;
2141 if (!pSparseMemoryRequirements) return;
2142 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2143 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2144 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2145 }
2146}
2147
2148void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2149 const VkAllocationCallbacks *pAllocator) {
2150 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002151 auto shader_module_state = GetShaderModuleState(shaderModule);
2152 shader_module_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002153 shaderModuleMap.erase(shaderModule);
2154}
2155
2156void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2157 const VkAllocationCallbacks *pAllocator) {
2158 if (!pipeline) return;
2159 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
2160 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
2161 // Any bound cmd buffers are now invalid
2162 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002163 pipeline_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002164 pipelineMap.erase(pipeline);
2165}
2166
2167void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2168 const VkAllocationCallbacks *pAllocator) {
2169 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002170 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
2171 pipeline_layout_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002172 pipelineLayoutMap.erase(pipelineLayout);
2173}
2174
2175void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2176 const VkAllocationCallbacks *pAllocator) {
2177 if (!sampler) return;
2178 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
2179 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
2180 // Any bound cmd buffers are now invalid
2181 if (sampler_state) {
2182 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
2183 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002184 sampler_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002185 samplerMap.erase(sampler);
2186}
2187
2188void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2189 const VkAllocationCallbacks *pAllocator) {
2190 if (!descriptorSetLayout) return;
2191 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2192 if (layout_it != descriptorSetLayoutMap.end()) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002193 layout_it->second.get()->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002194 descriptorSetLayoutMap.erase(layout_it);
2195 }
2196}
2197
2198void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2199 const VkAllocationCallbacks *pAllocator) {
2200 if (!descriptorPool) return;
2201 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
2202 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
2203 if (desc_pool_state) {
2204 // Any bound cmd buffers are now invalid
2205 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
2206 // Free sets that were in this pool
2207 for (auto ds : desc_pool_state->sets) {
2208 FreeDescriptorSet(ds);
2209 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002210 desc_pool_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002211 descriptorPoolMap.erase(descriptorPool);
2212 }
2213}
2214
2215// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2216void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2217 const VkCommandBuffer *command_buffers) {
2218 for (uint32_t i = 0; i < command_buffer_count; i++) {
2219 auto cb_state = GetCBState(command_buffers[i]);
2220 // Remove references to command buffer's state and delete
2221 if (cb_state) {
2222 // reset prior to delete, removing various references to it.
2223 // TODO: fix this, it's insane.
2224 ResetCommandBufferState(cb_state->commandBuffer);
2225 // Remove the cb_state's references from COMMAND_POOL_STATEs
2226 pool_state->commandBuffers.erase(command_buffers[i]);
2227 // Remove the cb debug labels
2228 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
2229 // Remove CBState from CB map
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002230 cb_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002231 commandBufferMap.erase(cb_state->commandBuffer);
2232 }
2233 }
2234}
2235
2236void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2237 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
2238 auto pPool = GetCommandPoolState(commandPool);
2239 FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
2240}
2241
2242void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2243 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2244 VkResult result) {
2245 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002246 auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002247 cmd_pool_state->createFlags = pCreateInfo->flags;
2248 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
2249 commandPoolMap[*pCommandPool] = std::move(cmd_pool_state);
2250}
2251
2252void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2253 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2254 VkResult result) {
2255 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002256 auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002257 query_pool_state->createInfo = *pCreateInfo;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002258 query_pool_state->pool = *pQueryPool;
2259 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
2260 const auto *perf = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
2261 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
2262
2263 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2264 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2265 switch (counter.scope) {
2266 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
2267 query_pool_state->has_perf_scope_command_buffer = true;
2268 break;
2269 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
2270 query_pool_state->has_perf_scope_render_pass = true;
2271 break;
2272 default:
2273 break;
2274 }
2275 }
2276
2277 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
2278 &query_pool_state->n_performance_passes);
2279 }
2280
locke-lunargd556cc32019-09-17 01:21:23 -06002281 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
2282
2283 QueryObject query_obj{*pQueryPool, 0u};
2284 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2285 query_obj.query = i;
2286 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2287 }
2288}
2289
2290void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2291 const VkAllocationCallbacks *pAllocator) {
2292 if (!commandPool) return;
2293 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2294 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2295 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2296 if (cp_state) {
2297 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2298 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2299 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002300 cp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002301 commandPoolMap.erase(commandPool);
2302 }
2303}
2304
2305void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2306 VkCommandPoolResetFlags flags, VkResult result) {
2307 if (VK_SUCCESS != result) return;
2308 // Reset all of the CBs allocated from this pool
2309 auto command_pool_state = GetCommandPoolState(commandPool);
2310 for (auto cmdBuffer : command_pool_state->commandBuffers) {
2311 ResetCommandBufferState(cmdBuffer);
2312 }
2313}
2314
2315void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2316 VkResult result) {
2317 for (uint32_t i = 0; i < fenceCount; ++i) {
2318 auto pFence = GetFenceState(pFences[i]);
2319 if (pFence) {
2320 if (pFence->scope == kSyncScopeInternal) {
2321 pFence->state = FENCE_UNSIGNALED;
2322 } else if (pFence->scope == kSyncScopeExternalTemporary) {
2323 pFence->scope = kSyncScopeInternal;
2324 }
2325 }
2326 }
2327}
2328
Jeff Bolzadbfa852019-10-04 13:53:30 -05002329// For given cb_nodes, invalidate them and track object causing invalidation.
2330// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
2331// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
2332// can also unlink objects from command buffers.
2333void ValidationStateTracker::InvalidateCommandBuffers(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_nodes,
2334 const VulkanTypedHandle &obj, bool unlink) {
2335 for (const auto &cb_node_pair : cb_nodes) {
2336 auto &cb_node = cb_node_pair.first;
2337 if (cb_node->state == CB_RECORDING) {
2338 cb_node->state = CB_INVALID_INCOMPLETE;
2339 } else if (cb_node->state == CB_RECORDED) {
2340 cb_node->state = CB_INVALID_COMPLETE;
2341 }
2342 cb_node->broken_bindings.push_back(obj);
2343
2344 // if secondary, then propagate the invalidation to the primaries that will call us.
2345 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2346 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
2347 }
2348 if (unlink) {
2349 int index = cb_node_pair.second;
2350 assert(cb_node->object_bindings[index] == obj);
2351 cb_node->object_bindings[index] = VulkanTypedHandle();
2352 }
2353 }
2354 if (unlink) {
2355 cb_nodes.clear();
2356 }
2357}
2358
2359void ValidationStateTracker::InvalidateLinkedCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
2360 const VulkanTypedHandle &obj) {
locke-lunargd556cc32019-09-17 01:21:23 -06002361 for (auto cb_node : cb_nodes) {
2362 if (cb_node->state == CB_RECORDING) {
2363 cb_node->state = CB_INVALID_INCOMPLETE;
2364 } else if (cb_node->state == CB_RECORDED) {
2365 cb_node->state = CB_INVALID_COMPLETE;
2366 }
2367 cb_node->broken_bindings.push_back(obj);
2368
2369 // if secondary, then propagate the invalidation to the primaries that will call us.
2370 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002371 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
locke-lunargd556cc32019-09-17 01:21:23 -06002372 }
2373 }
2374}
2375
2376void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2377 const VkAllocationCallbacks *pAllocator) {
2378 if (!framebuffer) return;
2379 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
2380 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
2381 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002382 framebuffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002383 frameBufferMap.erase(framebuffer);
2384}
2385
2386void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2387 const VkAllocationCallbacks *pAllocator) {
2388 if (!renderPass) return;
2389 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
2390 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
2391 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002392 rp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002393 renderPassMap.erase(renderPass);
2394}
2395
2396void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2397 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2398 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002399 auto fence_state = std::make_shared<FENCE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002400 fence_state->fence = *pFence;
2401 fence_state->createInfo = *pCreateInfo;
2402 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
2403 fenceMap[*pFence] = std::move(fence_state);
2404}
2405
2406bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2407 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2408 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002409 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002410 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2411 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2412 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2413 cgpl_state->pipe_state.reserve(count);
2414 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002415 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002416 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002417 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002418 }
2419 return false;
2420}
2421
2422void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2423 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2424 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2425 VkResult result, void *cgpl_state_data) {
2426 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2427 // This API may create pipelines regardless of the return value
2428 for (uint32_t i = 0; i < count; i++) {
2429 if (pPipelines[i] != VK_NULL_HANDLE) {
2430 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2431 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2432 }
2433 }
2434 cgpl_state->pipe_state.clear();
2435}
2436
2437bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2438 const VkComputePipelineCreateInfo *pCreateInfos,
2439 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002440 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002441 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2442 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2443 ccpl_state->pipe_state.reserve(count);
2444 for (uint32_t i = 0; i < count; i++) {
2445 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002446 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002447 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002448 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002449 }
2450 return false;
2451}
2452
2453void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2454 const VkComputePipelineCreateInfo *pCreateInfos,
2455 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2456 VkResult result, void *ccpl_state_data) {
2457 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2458
2459 // This API may create pipelines regardless of the return value
2460 for (uint32_t i = 0; i < count; i++) {
2461 if (pPipelines[i] != VK_NULL_HANDLE) {
2462 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2463 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2464 }
2465 }
2466 ccpl_state->pipe_state.clear();
2467}
2468
2469bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2470 uint32_t count,
2471 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2472 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002473 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002474 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2475 crtpl_state->pipe_state.reserve(count);
2476 for (uint32_t i = 0; i < count; i++) {
2477 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002478 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002479 crtpl_state->pipe_state.back()->initRayTracingPipelineNV(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002480 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002481 }
2482 return false;
2483}
2484
2485void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2486 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2487 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2488 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2489 // This API may create pipelines regardless of the return value
2490 for (uint32_t i = 0; i < count; i++) {
2491 if (pPipelines[i] != VK_NULL_HANDLE) {
2492 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2493 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2494 }
2495 }
2496 crtpl_state->pipe_state.clear();
2497}
2498
2499void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2500 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2501 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002502 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002503}
2504
2505void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2506 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2507 const VkAllocationCallbacks *pAllocator,
2508 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2509 if (VK_SUCCESS != result) return;
2510 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2511}
2512
2513// For repeatable sorting, not very useful for "memory in range" search
2514struct PushConstantRangeCompare {
2515 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2516 if (lhs->offset == rhs->offset) {
2517 if (lhs->size == rhs->size) {
2518 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2519 return lhs->stageFlags < rhs->stageFlags;
2520 }
2521 // If the offsets are the same then sorting by the end of range is useful for validation
2522 return lhs->size < rhs->size;
2523 }
2524 return lhs->offset < rhs->offset;
2525 }
2526};
2527
2528static PushConstantRangesDict push_constant_ranges_dict;
2529
2530PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2531 if (!info->pPushConstantRanges) {
2532 // Hand back the empty entry (creating as needed)...
2533 return push_constant_ranges_dict.look_up(PushConstantRanges());
2534 }
2535
2536 // Sort the input ranges to ensure equivalent ranges map to the same id
2537 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2538 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2539 sorted.insert(info->pPushConstantRanges + i);
2540 }
2541
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07002542 PushConstantRanges ranges;
2543 ranges.reserve(sorted.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002544 for (const auto range : sorted) {
2545 ranges.emplace_back(*range);
2546 }
2547 return push_constant_ranges_dict.look_up(std::move(ranges));
2548}
2549
2550// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2551static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2552
2553// Dictionary of canonical form of the "compatible for set" records
2554static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2555
2556static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2557 const PipelineLayoutSetLayoutsId set_layouts_id) {
2558 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2559}
2560
2561void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2562 const VkAllocationCallbacks *pAllocator,
2563 VkPipelineLayout *pPipelineLayout, VkResult result) {
2564 if (VK_SUCCESS != result) return;
2565
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002566 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002567 pipeline_layout_state->layout = *pPipelineLayout;
2568 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2569 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2570 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002571 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002572 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2573 }
2574
2575 // Get canonical form IDs for the "compatible for set" contents
2576 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
2577 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
2578 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
2579
2580 // Create table of "compatible for set N" cannonical forms for trivial accept validation
2581 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2582 pipeline_layout_state->compat_for_set.emplace_back(
2583 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
2584 }
2585 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
2586}
2587
2588void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
2589 const VkAllocationCallbacks *pAllocator,
2590 VkDescriptorPool *pDescriptorPool, VkResult result) {
2591 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002592 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002593}
2594
2595void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2596 VkDescriptorPoolResetFlags flags, VkResult result) {
2597 if (VK_SUCCESS != result) return;
2598 DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
2599 // TODO: validate flags
2600 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
2601 for (auto ds : pPool->sets) {
2602 FreeDescriptorSet(ds);
2603 }
2604 pPool->sets.clear();
2605 // Reset available count for each type and available sets for this pool
2606 for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
2607 pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
2608 }
2609 pPool->availableSets = pPool->maxSets;
2610}
2611
2612bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
2613 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002614 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002615 // Always update common data
2616 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2617 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2618 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
2619
2620 return false;
2621}
2622
2623// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
2624void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
2625 VkDescriptorSet *pDescriptorSets, VkResult result,
2626 void *ads_state_data) {
2627 if (VK_SUCCESS != result) return;
2628 // All the updates are contained in a single cvdescriptorset function
2629 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2630 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2631 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
2632}
2633
2634void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
2635 const VkDescriptorSet *pDescriptorSets) {
2636 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
2637 // Update available descriptor sets in pool
2638 pool_state->availableSets += count;
2639
2640 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
2641 for (uint32_t i = 0; i < count; ++i) {
2642 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
2643 auto descriptor_set = setMap[pDescriptorSets[i]].get();
2644 uint32_t type_index = 0, descriptor_count = 0;
2645 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
2646 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
2647 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
2648 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
2649 }
2650 FreeDescriptorSet(descriptor_set);
2651 pool_state->sets.erase(descriptor_set);
2652 }
2653 }
2654}
2655
2656void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
2657 const VkWriteDescriptorSet *pDescriptorWrites,
2658 uint32_t descriptorCopyCount,
2659 const VkCopyDescriptorSet *pDescriptorCopies) {
2660 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
2661 pDescriptorCopies);
2662}
2663
2664void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
2665 VkCommandBuffer *pCommandBuffer, VkResult result) {
2666 if (VK_SUCCESS != result) return;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002667 auto pPool = GetCommandPoolShared(pCreateInfo->commandPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002668 if (pPool) {
2669 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
2670 // Add command buffer to its commandPool map
2671 pPool->commandBuffers.insert(pCommandBuffer[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002672 auto pCB = std::make_shared<CMD_BUFFER_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002673 pCB->createInfo = *pCreateInfo;
2674 pCB->device = device;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002675 pCB->command_pool = pPool;
locke-lunargd556cc32019-09-17 01:21:23 -06002676 // Add command buffer to map
2677 commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
2678 ResetCommandBufferState(pCommandBuffer[i]);
2679 }
2680 }
2681}
2682
2683// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
2684void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002685 AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
locke-lunargd556cc32019-09-17 01:21:23 -06002686 cb_state);
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -07002687 // If imageless fb, skip fb binding
2688 if (fb_state->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002689 const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
2690 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
2691 auto view_state = GetAttachmentImageViewState(fb_state, attachment);
2692 if (view_state) {
2693 AddCommandBufferBindingImageView(cb_state, view_state);
2694 }
2695 }
2696}
2697
2698void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
2699 const VkCommandBufferBeginInfo *pBeginInfo) {
2700 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2701 if (!cb_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002702 if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
2703 // Secondary Command Buffer
2704 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
2705 if (pInfo) {
2706 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
2707 assert(pInfo->renderPass);
2708 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
2709 if (framebuffer) {
2710 // Connect this framebuffer and its children to this cmdBuffer
2711 AddFramebufferBinding(cb_state, framebuffer);
2712 }
2713 }
2714 }
2715 }
2716 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
2717 ResetCommandBufferState(commandBuffer);
2718 }
2719 // Set updated state here in case implicit reset occurs above
2720 cb_state->state = CB_RECORDING;
2721 cb_state->beginInfo = *pBeginInfo;
2722 if (cb_state->beginInfo.pInheritanceInfo) {
2723 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
2724 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
2725 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
2726 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
2727 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
2728 cb_state->activeRenderPass = GetRenderPassState(cb_state->beginInfo.pInheritanceInfo->renderPass);
2729 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
2730 cb_state->activeFramebuffer = cb_state->beginInfo.pInheritanceInfo->framebuffer;
2731 cb_state->framebuffers.insert(cb_state->beginInfo.pInheritanceInfo->framebuffer);
2732 }
2733 }
2734
2735 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
2736 if (chained_device_group_struct) {
2737 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
2738 } else {
2739 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
2740 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002741
2742 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06002743}
2744
2745void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
2746 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2747 if (!cb_state) return;
2748 // Cached validation is specific to a specific recording of a specific command buffer.
2749 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
2750 descriptor_set->ClearCachedValidation(cb_state);
2751 }
2752 cb_state->validated_descriptor_sets.clear();
2753 if (VK_SUCCESS == result) {
2754 cb_state->state = CB_RECORDED;
2755 }
2756}
2757
2758void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
2759 VkResult result) {
2760 if (VK_SUCCESS == result) {
2761 ResetCommandBufferState(commandBuffer);
2762 }
2763}
2764
2765CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
2766 // initially assume everything is static state
2767 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
2768
2769 if (ds) {
2770 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
2771 switch (ds->pDynamicStates[i]) {
2772 case VK_DYNAMIC_STATE_LINE_WIDTH:
2773 flags &= ~CBSTATUS_LINE_WIDTH_SET;
2774 break;
2775 case VK_DYNAMIC_STATE_DEPTH_BIAS:
2776 flags &= ~CBSTATUS_DEPTH_BIAS_SET;
2777 break;
2778 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
2779 flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
2780 break;
2781 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
2782 flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
2783 break;
2784 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
2785 flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
2786 break;
2787 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
2788 flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
2789 break;
2790 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
2791 flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
2792 break;
2793 case VK_DYNAMIC_STATE_SCISSOR:
2794 flags &= ~CBSTATUS_SCISSOR_SET;
2795 break;
2796 case VK_DYNAMIC_STATE_VIEWPORT:
2797 flags &= ~CBSTATUS_VIEWPORT_SET;
2798 break;
2799 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
2800 flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
2801 break;
2802 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
2803 flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
2804 break;
2805 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
2806 flags &= ~CBSTATUS_LINE_STIPPLE_SET;
2807 break;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02002808 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
2809 flags &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
2810 break;
locke-lunargd556cc32019-09-17 01:21:23 -06002811 default:
2812 break;
2813 }
2814 }
2815 }
2816
2817 return flags;
2818}
2819
2820// Validation cache:
2821// CV is the bottommost implementor of this extension. Don't pass calls down.
2822// utility function to set collective state for pipeline
2823void SetPipelineState(PIPELINE_STATE *pPipe) {
2824 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
2825 if (pPipe->graphicsPipelineCI.pColorBlendState) {
2826 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
2827 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
2828 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2829 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2830 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2831 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2832 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2833 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2834 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2835 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
2836 pPipe->blendConstantsEnabled = true;
2837 }
2838 }
2839 }
2840 }
2841}
2842
2843void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
2844 VkPipeline pipeline) {
2845 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2846 assert(cb_state);
2847
2848 auto pipe_state = GetPipelineState(pipeline);
2849 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
2850 cb_state->status &= ~cb_state->static_status;
2851 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
2852 cb_state->status |= cb_state->static_status;
2853 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002854 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout->layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002855 cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
2856 SetPipelineState(pipe_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05002857 AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002858}
2859
2860void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2861 uint32_t viewportCount, const VkViewport *pViewports) {
2862 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2863 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
2864 cb_state->status |= CBSTATUS_VIEWPORT_SET;
2865}
2866
2867void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
2868 uint32_t exclusiveScissorCount,
2869 const VkRect2D *pExclusiveScissors) {
2870 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2871 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
2872 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
2873 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
2874}
2875
2876void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
2877 VkImageLayout imageLayout) {
2878 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2879
2880 if (imageView != VK_NULL_HANDLE) {
2881 auto view_state = GetImageViewState(imageView);
2882 AddCommandBufferBindingImageView(cb_state, view_state);
2883 }
2884}
2885
2886void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2887 uint32_t viewportCount,
2888 const VkShadingRatePaletteNV *pShadingRatePalettes) {
2889 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2890 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
2891 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
2892 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
2893}
2894
2895void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
2896 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
2897 const VkAllocationCallbacks *pAllocator,
2898 VkAccelerationStructureNV *pAccelerationStructure,
2899 VkResult result) {
2900 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002901 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002902
2903 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
2904 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
2905 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2906 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
2907 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
2908 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
2909
2910 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
2911 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2912 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
2913 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
2914 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
2915 &as_state->build_scratch_memory_requirements);
2916
2917 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
2918 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2919 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
2920 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
2921 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
2922 &as_state->update_scratch_memory_requirements);
2923
2924 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
2925}
2926
2927void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
2928 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
2929 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
2930 if (as_state != nullptr) {
2931 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
2932 as_state->memory_requirements = *pMemoryRequirements;
2933 as_state->memory_requirements_checked = true;
2934 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
2935 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
2936 as_state->build_scratch_memory_requirements_checked = true;
2937 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
2938 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
2939 as_state->update_scratch_memory_requirements_checked = true;
2940 }
2941 }
2942}
2943
2944void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
2945 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
2946 if (VK_SUCCESS != result) return;
2947 for (uint32_t i = 0; i < bindInfoCount; i++) {
2948 const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
2949
2950 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
2951 if (as_state) {
2952 // Track bound memory range information
2953 auto mem_info = GetDevMemState(info.memory);
2954 if (mem_info) {
2955 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset,
2956 as_state->requirements);
2957 }
2958 // Track objects tied to memory
2959 SetMemBinding(info.memory, as_state, info.memoryOffset,
2960 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureNV));
2961
2962 // GPU validation of top level acceleration structure building needs acceleration structure handles.
2963 if (enabled.gpu_validation) {
2964 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
2965 }
2966 }
2967 }
2968}
2969
2970void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
2971 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
2972 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
2973 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2974 if (cb_state == nullptr) {
2975 return;
2976 }
2977
2978 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
2979 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
2980 if (dst_as_state != nullptr) {
2981 dst_as_state->built = true;
2982 dst_as_state->build_info.initialize(pInfo);
2983 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
2984 }
2985 if (src_as_state != nullptr) {
2986 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
2987 }
2988 cb_state->hasBuildAccelerationStructureCmd = true;
2989}
2990
2991void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
2992 VkAccelerationStructureNV dst,
2993 VkAccelerationStructureNV src,
2994 VkCopyAccelerationStructureModeNV mode) {
2995 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2996 if (cb_state) {
2997 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
2998 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
2999 if (dst_as_state != nullptr && src_as_state != nullptr) {
3000 dst_as_state->built = true;
3001 dst_as_state->build_info = src_as_state->build_info;
3002 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3003 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3004 }
3005 }
3006}
3007
3008void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
3009 VkAccelerationStructureNV accelerationStructure,
3010 const VkAllocationCallbacks *pAllocator) {
3011 if (!accelerationStructure) return;
3012 auto *as_state = GetAccelerationStructureState(accelerationStructure);
3013 if (as_state) {
3014 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
3015 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
3016 for (auto mem_binding : as_state->GetBoundMemory()) {
3017 auto mem_info = GetDevMemState(mem_binding);
3018 if (mem_info) {
3019 RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_info);
3020 }
3021 }
3022 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003023 as_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003024 accelerationStructureMap.erase(accelerationStructure);
3025 }
3026}
3027
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003028void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3029 uint32_t viewportCount,
3030 const VkViewportWScalingNV *pViewportWScalings) {
3031 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3032 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
3033}
3034
locke-lunargd556cc32019-09-17 01:21:23 -06003035void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
3036 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3037 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
3038}
3039
3040void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
3041 uint16_t lineStipplePattern) {
3042 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3043 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
3044}
3045
3046void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
3047 float depthBiasClamp, float depthBiasSlopeFactor) {
3048 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3049 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
3050}
3051
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003052void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
3053 const VkRect2D *pScissors) {
3054 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3055 cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
3056 cb_state->status |= CBSTATUS_SCISSOR_SET;
3057}
3058
locke-lunargd556cc32019-09-17 01:21:23 -06003059void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
3060 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3061 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
3062}
3063
3064void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
3065 float maxDepthBounds) {
3066 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3067 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
3068}
3069
3070void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3071 uint32_t compareMask) {
3072 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3073 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
3074}
3075
3076void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3077 uint32_t writeMask) {
3078 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3079 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
3080}
3081
3082void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3083 uint32_t reference) {
3084 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3085 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
3086}
3087
3088// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
3089// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
3090// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
3091void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
3092 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
3093 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
3094 cvdescriptorset::DescriptorSet *push_descriptor_set,
3095 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
3096 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
3097 // Defensive
3098 assert(pipeline_layout);
3099 if (!pipeline_layout) return;
3100
3101 uint32_t required_size = first_set + set_count;
3102 const uint32_t last_binding_index = required_size - 1;
3103 assert(last_binding_index < pipeline_layout->compat_for_set.size());
3104
3105 // Some useful shorthand
3106 auto &last_bound = cb_state->lastBound[pipeline_bind_point];
3107 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
3108 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
3109
3110 // We need this three times in this function, but nowhere else
3111 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
3112 if (ds && ds->IsPushDescriptor()) {
3113 assert(ds == last_bound.push_descriptor_set.get());
3114 last_bound.push_descriptor_set = nullptr;
3115 return true;
3116 }
3117 return false;
3118 };
3119
3120 // Clean up the "disturbed" before and after the range to be set
3121 if (required_size < current_size) {
3122 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
3123 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
3124 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
3125 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
3126 }
3127 } else {
3128 // We're not disturbing past last, so leave the upper binding data alone.
3129 required_size = current_size;
3130 }
3131 }
3132
3133 // We resize if we need more set entries or if those past "last" are disturbed
3134 if (required_size != current_size) {
3135 last_bound.per_set.resize(required_size);
3136 }
3137
3138 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
3139 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
3140 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
3141 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3142 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
3143 last_bound.per_set[set_idx].dynamicOffsets.clear();
3144 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
3145 }
3146 }
3147
3148 // Now update the bound sets with the input sets
3149 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
3150 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
3151 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
3152 cvdescriptorset::DescriptorSet *descriptor_set =
3153 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
3154
3155 // Record binding (or push)
3156 if (descriptor_set != last_bound.push_descriptor_set.get()) {
3157 // Only cleanup the push descriptors if they aren't the currently used set.
3158 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3159 }
3160 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
3161 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
3162
3163 if (descriptor_set) {
3164 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
3165 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
3166 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
3167 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
3168 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
3169 input_dynamic_offsets = end_offset;
3170 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
3171 } else {
3172 last_bound.per_set[set_idx].dynamicOffsets.clear();
3173 }
3174 if (!descriptor_set->IsPushDescriptor()) {
3175 // Can't cache validation of push_descriptors
3176 cb_state->validated_descriptor_sets.insert(descriptor_set);
3177 }
3178 }
3179 }
3180}
3181
3182// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
3183void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
3184 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3185 uint32_t firstSet, uint32_t setCount,
3186 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
3187 const uint32_t *pDynamicOffsets) {
3188 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3189 auto pipeline_layout = GetPipelineLayout(layout);
3190
3191 // Resize binding arrays
3192 uint32_t last_set_index = firstSet + setCount - 1;
3193 if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
3194 cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
3195 }
3196
3197 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
3198 dynamicOffsetCount, pDynamicOffsets);
3199 cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
3200 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3201}
3202
3203void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
3204 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
3205 const VkWriteDescriptorSet *pDescriptorWrites) {
3206 const auto &pipeline_layout = GetPipelineLayout(layout);
3207 // Short circuit invalid updates
3208 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
3209 !pipeline_layout->set_layouts[set]->IsPushDescriptor())
3210 return;
3211
3212 // We need a descriptor set to update the bindings with, compatible with the passed layout
3213 const auto dsl = pipeline_layout->set_layouts[set];
3214 auto &last_bound = cb_state->lastBound[pipelineBindPoint];
3215 auto &push_descriptor_set = last_bound.push_descriptor_set;
3216 // If we are disturbing the current push_desriptor_set clear it
3217 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
John Zulaufd2c3dae2019-12-12 11:02:17 -07003218 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003219 }
3220
3221 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3222 nullptr);
3223 last_bound.pipeline_layout = layout;
3224
3225 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003226 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003227}
3228
3229void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3230 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3231 uint32_t set, uint32_t descriptorWriteCount,
3232 const VkWriteDescriptorSet *pDescriptorWrites) {
3233 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3234 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3235}
3236
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003237void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3238 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3239 const void *pValues) {
3240 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3241 if (cb_state != nullptr) {
3242 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3243
3244 auto &push_constant_data = cb_state->push_constant_data;
3245 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3246 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
3247 }
3248}
3249
locke-lunargd556cc32019-09-17 01:21:23 -06003250void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3251 VkIndexType indexType) {
3252 auto buffer_state = GetBufferState(buffer);
3253 auto cb_state = GetCBState(commandBuffer);
3254
3255 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
3256 cb_state->index_buffer_binding.buffer = buffer;
3257 cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
3258 cb_state->index_buffer_binding.offset = offset;
3259 cb_state->index_buffer_binding.index_type = indexType;
3260 // Add binding for this index buffer to this commandbuffer
3261 AddCommandBufferBindingBuffer(cb_state, buffer_state);
3262}
3263
3264void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3265 uint32_t bindingCount, const VkBuffer *pBuffers,
3266 const VkDeviceSize *pOffsets) {
3267 auto cb_state = GetCBState(commandBuffer);
3268
3269 uint32_t end = firstBinding + bindingCount;
3270 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3271 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3272 }
3273
3274 for (uint32_t i = 0; i < bindingCount; ++i) {
3275 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
3276 vertex_buffer_binding.buffer = pBuffers[i];
3277 vertex_buffer_binding.offset = pOffsets[i];
3278 // Add binding for this vertex buffer to this commandbuffer
3279 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
3280 }
3281}
3282
3283void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3284 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
3285 auto cb_state = GetCBState(commandBuffer);
3286 auto dst_buffer_state = GetBufferState(dstBuffer);
3287
3288 // Update bindings between buffer and cmd buffer
3289 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
3290}
3291
Jeff Bolz310775c2019-10-09 00:46:33 -05003292bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags stageMask,
3293 EventToStageMap *localEventToStageMap) {
3294 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003295 return false;
3296}
3297
3298void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3299 VkPipelineStageFlags stageMask) {
3300 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3301 auto event_state = GetEventState(event);
3302 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003303 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003304 }
3305 cb_state->events.push_back(event);
3306 if (!cb_state->waitedEvents.count(event)) {
3307 cb_state->writeEventsBeforeWait.push_back(event);
3308 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003309 cb_state->eventUpdates.emplace_back(
3310 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3311 return SetEventStageMask(event, stageMask, localEventToStageMap);
3312 });
locke-lunargd556cc32019-09-17 01:21:23 -06003313}
3314
3315void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3316 VkPipelineStageFlags stageMask) {
3317 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3318 auto event_state = GetEventState(event);
3319 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003320 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003321 }
3322 cb_state->events.push_back(event);
3323 if (!cb_state->waitedEvents.count(event)) {
3324 cb_state->writeEventsBeforeWait.push_back(event);
3325 }
3326
3327 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003328 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
3329 return SetEventStageMask(event, VkPipelineStageFlags(0), localEventToStageMap);
3330 });
locke-lunargd556cc32019-09-17 01:21:23 -06003331}
3332
3333void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3334 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3335 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3336 uint32_t bufferMemoryBarrierCount,
3337 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3338 uint32_t imageMemoryBarrierCount,
3339 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3340 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3341 for (uint32_t i = 0; i < eventCount; ++i) {
3342 auto event_state = GetEventState(pEvents[i]);
3343 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003344 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
3345 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003346 }
3347 cb_state->waitedEvents.insert(pEvents[i]);
3348 cb_state->events.push_back(pEvents[i]);
3349 }
3350}
3351
Jeff Bolz310775c2019-10-09 00:46:33 -05003352bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
3353 (*localQueryToStateMap)[object] = value;
3354 return false;
3355}
3356
3357bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, QueryState value,
3358 QueryMap *localQueryToStateMap) {
3359 for (uint32_t i = 0; i < queryCount; i++) {
3360 QueryObject object = {queryPool, firstQuery + i};
3361 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06003362 }
3363 return false;
3364}
3365
Jeff Bolz310775c2019-10-09 00:46:33 -05003366QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool,
3367 uint32_t queryIndex) const {
3368 QueryObject query = {queryPool, queryIndex};
locke-lunargd556cc32019-09-17 01:21:23 -06003369
Jeff Bolz310775c2019-10-09 00:46:33 -05003370 const std::array<const decltype(queryToStateMap) *, 2> map_list = {localQueryToStateMap, &queryToStateMap};
3371
3372 for (const auto map : map_list) {
3373 auto query_data = map->find(query);
3374 if (query_data != map->end()) {
3375 return query_data->second;
locke-lunargd556cc32019-09-17 01:21:23 -06003376 }
3377 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003378 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06003379}
3380
3381void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003382 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003383 cb_state->activeQueries.insert(query_obj);
3384 cb_state->startedQueries.insert(query_obj);
Jeff Bolz310775c2019-10-09 00:46:33 -05003385 cb_state->queryUpdates.emplace_back(
3386 [query_obj](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
3387 SetQueryState(query_obj, QUERYSTATE_RUNNING, localQueryToStateMap);
3388 return false;
3389 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003390 auto pool_state = GetQueryPoolState(query_obj.pool);
3391 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3392 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003393}
3394
3395void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3396 VkFlags flags) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003397 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003398 QueryObject query = {queryPool, slot};
3399 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3400 RecordCmdBeginQuery(cb_state, query);
3401}
3402
3403void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003404 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003405 cb_state->activeQueries.erase(query_obj);
3406 cb_state->queryUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003407 [query_obj](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
3408 return SetQueryState(query_obj, QUERYSTATE_ENDED, localQueryToStateMap);
3409 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003410 auto pool_state = GetQueryPoolState(query_obj.pool);
3411 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3412 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003413}
3414
3415void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003416 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003417 QueryObject query_obj = {queryPool, slot};
3418 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3419 RecordCmdEndQuery(cb_state, query_obj);
3420}
3421
3422void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3423 uint32_t firstQuery, uint32_t queryCount) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003424 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003425 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3426
Jeff Bolz310775c2019-10-09 00:46:33 -05003427 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
3428 bool do_validate, QueryMap *localQueryToStateMap) {
3429 return SetQueryStateMulti(queryPool, firstQuery, queryCount, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06003430 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003431 auto pool_state = GetQueryPoolState(queryPool);
3432 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003433 cb_state);
3434}
3435
3436void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3437 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3438 VkDeviceSize dstOffset, VkDeviceSize stride,
3439 VkQueryResultFlags flags) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003440 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003441 auto cb_state = GetCBState(commandBuffer);
3442 auto dst_buff_state = GetBufferState(dstBuffer);
3443 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003444 auto pool_state = GetQueryPoolState(queryPool);
3445 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003446 cb_state);
3447}
3448
3449void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3450 VkQueryPool queryPool, uint32_t slot) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003451 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003452 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003453 auto pool_state = GetQueryPoolState(queryPool);
3454 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003455 cb_state);
3456 QueryObject query = {queryPool, slot};
3457 cb_state->queryUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003458 [query](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
3459 return SetQueryState(query, QUERYSTATE_ENDED, localQueryToStateMap);
3460 });
locke-lunargd556cc32019-09-17 01:21:23 -06003461}
3462
3463void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3464 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3465 VkResult result) {
3466 if (VK_SUCCESS != result) return;
3467 // Shadow create info and store in map
Jeff Bolz6ae39612019-10-11 20:57:36 -05003468 auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
locke-lunargd556cc32019-09-17 01:21:23 -06003469
3470 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
3471 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
3472 VkImageView view = pCreateInfo->pAttachments[i];
3473 auto view_state = GetImageViewState(view);
3474 if (!view_state) {
3475 continue;
3476 }
3477 }
3478 }
3479 frameBufferMap[*pFramebuffer] = std::move(fb_state);
3480}
3481
3482void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3483 RENDER_PASS_STATE *render_pass) {
3484 auto &subpass_to_node = render_pass->subpassToNode;
3485 subpass_to_node.resize(pCreateInfo->subpassCount);
3486 auto &self_dependencies = render_pass->self_dependencies;
3487 self_dependencies.resize(pCreateInfo->subpassCount);
3488
3489 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
3490 subpass_to_node[i].pass = i;
3491 self_dependencies[i].clear();
3492 }
3493 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
3494 const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
3495 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
3496 if (dependency.srcSubpass == dependency.dstSubpass) {
3497 self_dependencies[dependency.srcSubpass].push_back(i);
3498 } else {
3499 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
3500 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
3501 }
3502 }
3503 }
3504}
3505
3506static void MarkAttachmentFirstUse(RENDER_PASS_STATE *render_pass, uint32_t index, bool is_read) {
3507 if (index == VK_ATTACHMENT_UNUSED) return;
3508
3509 if (!render_pass->attachment_first_read.count(index)) render_pass->attachment_first_read[index] = is_read;
3510}
3511
3512void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
3513 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
3514 VkRenderPass *pRenderPass) {
3515 render_pass->renderPass = *pRenderPass;
3516 auto create_info = render_pass->createInfo.ptr();
3517
3518 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
3519
3520 for (uint32_t i = 0; i < create_info->subpassCount; ++i) {
3521 const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[i];
3522 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
3523 MarkAttachmentFirstUse(render_pass.get(), subpass.pColorAttachments[j].attachment, false);
3524
3525 // resolve attachments are considered to be written
3526 if (subpass.pResolveAttachments) {
3527 MarkAttachmentFirstUse(render_pass.get(), subpass.pResolveAttachments[j].attachment, false);
3528 }
3529 }
3530 if (subpass.pDepthStencilAttachment) {
3531 MarkAttachmentFirstUse(render_pass.get(), subpass.pDepthStencilAttachment->attachment, false);
3532 }
3533 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
3534 MarkAttachmentFirstUse(render_pass.get(), subpass.pInputAttachments[j].attachment, true);
3535 }
3536 }
3537
3538 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
3539 renderPassMap[*pRenderPass] = std::move(render_pass);
3540}
3541
3542// Style note:
3543// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
3544// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
3545// construction or assignment.
3546void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
3547 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3548 VkResult result) {
3549 if (VK_SUCCESS != result) return;
3550 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3551 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
3552}
3553
Tony-LunarG977448c2019-12-02 14:52:02 -07003554void ValidationStateTracker::RecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3555 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3556 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06003557 if (VK_SUCCESS != result) return;
3558 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3559 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
3560}
3561
Tony-LunarG977448c2019-12-02 14:52:02 -07003562void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3563 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3564 VkResult result) {
3565 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
3566}
3567
3568void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3569 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3570 VkResult result) {
3571 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
3572}
3573
locke-lunargd556cc32019-09-17 01:21:23 -06003574void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
3575 const VkRenderPassBeginInfo *pRenderPassBegin,
3576 const VkSubpassContents contents) {
3577 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3578 auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
3579 auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
3580
3581 if (render_pass_state) {
3582 cb_state->activeFramebuffer = pRenderPassBegin->framebuffer;
3583 cb_state->activeRenderPass = render_pass_state;
3584 // This is a shallow copy as that is all that is needed for now
3585 cb_state->activeRenderPassBeginInfo = *pRenderPassBegin;
3586 cb_state->activeSubpass = 0;
3587 cb_state->activeSubpassContents = contents;
3588 cb_state->framebuffers.insert(pRenderPassBegin->framebuffer);
3589 // Connect this framebuffer and its children to this cmdBuffer
3590 AddFramebufferBinding(cb_state, framebuffer);
3591 // Connect this RP to cmdBuffer
Jeff Bolzadbfa852019-10-04 13:53:30 -05003592 AddCommandBufferBinding(render_pass_state->cb_bindings,
3593 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state),
3594 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003595
3596 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
3597 if (chained_device_group_struct) {
3598 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
3599 } else {
3600 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
3601 }
3602 }
3603}
3604
3605void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
3606 const VkRenderPassBeginInfo *pRenderPassBegin,
3607 VkSubpassContents contents) {
3608 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
3609}
3610
3611void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
3612 const VkRenderPassBeginInfo *pRenderPassBegin,
3613 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
3614 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3615}
3616
Tony-LunarG977448c2019-12-02 14:52:02 -07003617void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
3618 const VkRenderPassBeginInfo *pRenderPassBegin,
3619 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
3620 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3621}
3622
locke-lunargd556cc32019-09-17 01:21:23 -06003623void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3624 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3625 cb_state->activeSubpass++;
3626 cb_state->activeSubpassContents = contents;
3627}
3628
3629void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3630 RecordCmdNextSubpass(commandBuffer, contents);
3631}
3632
3633void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
3634 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
3635 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3636 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3637}
3638
Tony-LunarG977448c2019-12-02 14:52:02 -07003639void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
3640 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
3641 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3642 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3643}
3644
locke-lunargd556cc32019-09-17 01:21:23 -06003645void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
3646 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3647 cb_state->activeRenderPass = nullptr;
3648 cb_state->activeSubpass = 0;
3649 cb_state->activeFramebuffer = VK_NULL_HANDLE;
3650}
3651
3652void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
3653 RecordCmdEndRenderPassState(commandBuffer);
3654}
3655
3656void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
3657 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3658 RecordCmdEndRenderPassState(commandBuffer);
3659}
3660
Tony-LunarG977448c2019-12-02 14:52:02 -07003661void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
3662 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3663 RecordCmdEndRenderPassState(commandBuffer);
3664}
locke-lunargd556cc32019-09-17 01:21:23 -06003665void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
3666 const VkCommandBuffer *pCommandBuffers) {
3667 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3668
3669 CMD_BUFFER_STATE *sub_cb_state = NULL;
3670 for (uint32_t i = 0; i < commandBuffersCount; i++) {
3671 sub_cb_state = GetCBState(pCommandBuffers[i]);
3672 assert(sub_cb_state);
3673 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
3674 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
3675 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
3676 // from the validation step to the recording step
3677 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
3678 }
3679 }
3680
3681 // Propagate inital layout and current layout state to the primary cmd buffer
3682 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
3683 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
3684 // for those other classes.
3685 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
3686 const auto image = sub_layout_map_entry.first;
3687 const auto *image_state = GetImageState(image);
3688 if (!image_state) continue; // Can't set layouts of a dead image
3689
3690 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
3691 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
3692 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
3693 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
3694 }
3695
3696 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
3697 cb_state->linkedCommandBuffers.insert(sub_cb_state);
3698 sub_cb_state->linkedCommandBuffers.insert(cb_state);
3699 for (auto &function : sub_cb_state->queryUpdates) {
3700 cb_state->queryUpdates.push_back(function);
3701 }
3702 for (auto &function : sub_cb_state->queue_submit_functions) {
3703 cb_state->queue_submit_functions.push_back(function);
3704 }
3705 }
3706}
3707
3708void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
3709 VkFlags flags, void **ppData, VkResult result) {
3710 if (VK_SUCCESS != result) return;
3711 RecordMappedMemory(mem, offset, size, ppData);
3712}
3713
3714void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
3715 auto mem_info = GetDevMemState(mem);
3716 if (mem_info) {
3717 mem_info->mapped_range = MemRange();
3718 mem_info->p_driver_data = nullptr;
3719 }
3720}
3721
3722void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
3723 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
3724 if (image_state) {
3725 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
3726 if (swapchain_info) {
3727 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
3728 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -06003729 swapchain->images[swapchain_info->imageIndex].bound_images.emplace(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -06003730 image_state->bind_swapchain = swapchain_info->swapchain;
3731 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
3732 }
3733 } else {
3734 // Track bound memory range information
3735 auto mem_info = GetDevMemState(bindInfo.memory);
3736 if (mem_info) {
3737 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset, image_state->requirements,
3738 image_state->createInfo.tiling == VK_IMAGE_TILING_LINEAR);
3739 }
3740
3741 // Track objects tied to memory
3742 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
3743 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
3744 }
3745 if (image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) {
3746 AddAliasingImage(image_state);
3747 }
3748 }
3749}
3750
3751void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
3752 VkDeviceSize memoryOffset, VkResult result) {
3753 if (VK_SUCCESS != result) return;
3754 VkBindImageMemoryInfo bindInfo = {};
3755 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
3756 bindInfo.image = image;
3757 bindInfo.memory = mem;
3758 bindInfo.memoryOffset = memoryOffset;
3759 UpdateBindImageMemoryState(bindInfo);
3760}
3761
3762void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
3763 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3764 if (VK_SUCCESS != result) return;
3765 for (uint32_t i = 0; i < bindInfoCount; i++) {
3766 UpdateBindImageMemoryState(pBindInfos[i]);
3767 }
3768}
3769
3770void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
3771 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3772 if (VK_SUCCESS != result) return;
3773 for (uint32_t i = 0; i < bindInfoCount; i++) {
3774 UpdateBindImageMemoryState(pBindInfos[i]);
3775 }
3776}
3777
3778void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
3779 auto event_state = GetEventState(event);
3780 if (event_state) {
3781 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
3782 }
locke-lunargd556cc32019-09-17 01:21:23 -06003783}
3784
3785void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
3786 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
3787 VkResult result) {
3788 if (VK_SUCCESS != result) return;
3789 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
3790 pImportSemaphoreFdInfo->flags);
3791}
3792
3793void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
3794 VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
3795 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
3796 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
3797 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
3798 semaphore_state->scope = kSyncScopeExternalPermanent;
3799 }
3800}
3801
3802#ifdef VK_USE_PLATFORM_WIN32_KHR
3803void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
3804 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
3805 if (VK_SUCCESS != result) return;
3806 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
3807 pImportSemaphoreWin32HandleInfo->flags);
3808}
3809
3810void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
3811 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
3812 HANDLE *pHandle, VkResult result) {
3813 if (VK_SUCCESS != result) return;
3814 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
3815}
3816
3817void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
3818 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
3819 if (VK_SUCCESS != result) return;
3820 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
3821 pImportFenceWin32HandleInfo->flags);
3822}
3823
3824void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
3825 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
3826 HANDLE *pHandle, VkResult result) {
3827 if (VK_SUCCESS != result) return;
3828 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
3829}
3830#endif
3831
3832void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
3833 VkResult result) {
3834 if (VK_SUCCESS != result) return;
3835 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
3836}
3837
3838void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
3839 VkFenceImportFlagsKHR flags) {
3840 FENCE_STATE *fence_node = GetFenceState(fence);
3841 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
3842 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
3843 fence_node->scope == kSyncScopeInternal) {
3844 fence_node->scope = kSyncScopeExternalTemporary;
3845 } else {
3846 fence_node->scope = kSyncScopeExternalPermanent;
3847 }
3848 }
3849}
3850
3851void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
3852 VkResult result) {
3853 if (VK_SUCCESS != result) return;
3854 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
3855}
3856
3857void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
3858 FENCE_STATE *fence_state = GetFenceState(fence);
3859 if (fence_state) {
3860 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
3861 // Export with reference transference becomes external
3862 fence_state->scope = kSyncScopeExternalPermanent;
3863 } else if (fence_state->scope == kSyncScopeInternal) {
3864 // Export with copy transference has a side effect of resetting the fence
3865 fence_state->state = FENCE_UNSIGNALED;
3866 }
3867 }
3868}
3869
3870void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
3871 VkResult result) {
3872 if (VK_SUCCESS != result) return;
3873 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
3874}
3875
3876void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
3877 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
3878 if (VK_SUCCESS != result) return;
3879 eventMap[*pEvent].write_in_use = 0;
3880 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
3881}
3882
3883void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
3884 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
3885 SWAPCHAIN_NODE *old_swapchain_state) {
3886 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003887 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06003888 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
3889 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
3890 swapchain_state->shared_presentable = true;
3891 }
3892 surface_state->swapchain = swapchain_state.get();
3893 swapchainMap[*pSwapchain] = std::move(swapchain_state);
3894 } else {
3895 surface_state->swapchain = nullptr;
3896 }
3897 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
3898 if (old_swapchain_state) {
3899 old_swapchain_state->retired = true;
3900 }
3901 return;
3902}
3903
3904void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
3905 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
3906 VkResult result) {
3907 auto surface_state = GetSurfaceState(pCreateInfo->surface);
3908 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
3909 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
3910}
3911
3912void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
3913 const VkAllocationCallbacks *pAllocator) {
3914 if (!swapchain) return;
3915 auto swapchain_data = GetSwapchainState(swapchain);
3916 if (swapchain_data) {
3917 for (const auto &swapchain_image : swapchain_data->images) {
locke-lunargb3584732019-10-28 20:18:36 -06003918 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image, kVulkanObjectTypeImage));
3919 imageMap.erase(swapchain_image.image);
3920 RemoveAliasingImages(swapchain_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06003921 }
3922
3923 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
3924 if (surface_state) {
3925 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
3926 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003927 swapchain_data->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003928 swapchainMap.erase(swapchain);
3929 }
3930}
3931
3932void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
3933 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
3934 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
3935 auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
3936 if (pSemaphore) {
3937 pSemaphore->signaler.first = VK_NULL_HANDLE;
3938 pSemaphore->signaled = false;
3939 }
3940 }
3941
3942 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
3943 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
3944 // confused itself just as much.
3945 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
3946 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
3947 // Mark the image as having been released to the WSI
3948 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
3949 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
locke-lunargb3584732019-10-28 20:18:36 -06003950 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
locke-lunargd556cc32019-09-17 01:21:23 -06003951 auto image_state = GetImageState(image);
3952 if (image_state) {
3953 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05003954 if (image_state->shared_presentable) {
3955 image_state->layout_locked = true;
3956 }
locke-lunargd556cc32019-09-17 01:21:23 -06003957 }
3958 }
3959 }
3960 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
3961 // its semaphore waits) /never/ participate in any completion proof.
3962}
3963
3964void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
3965 const VkSwapchainCreateInfoKHR *pCreateInfos,
3966 const VkAllocationCallbacks *pAllocator,
3967 VkSwapchainKHR *pSwapchains, VkResult result) {
3968 if (pCreateInfos) {
3969 for (uint32_t i = 0; i < swapchainCount; i++) {
3970 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
3971 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
3972 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
3973 }
3974 }
3975}
3976
3977void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
3978 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
3979 auto pFence = GetFenceState(fence);
3980 if (pFence && pFence->scope == kSyncScopeInternal) {
3981 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
3982 // import
3983 pFence->state = FENCE_INFLIGHT;
3984 pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
3985 }
3986
3987 auto pSemaphore = GetSemaphoreState(semaphore);
3988 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
3989 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
3990 // temporary import
3991 pSemaphore->signaled = true;
3992 pSemaphore->signaler.first = VK_NULL_HANDLE;
3993 }
3994
3995 // Mark the image as acquired.
3996 auto swapchain_data = GetSwapchainState(swapchain);
3997 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
locke-lunargb3584732019-10-28 20:18:36 -06003998 auto image = swapchain_data->images[*pImageIndex].image;
locke-lunargd556cc32019-09-17 01:21:23 -06003999 auto image_state = GetImageState(image);
4000 if (image_state) {
4001 image_state->acquired = true;
4002 image_state->shared_presentable = swapchain_data->shared_presentable;
4003 }
4004 }
4005}
4006
4007void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4008 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
4009 VkResult result) {
4010 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4011 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
4012}
4013
4014void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
4015 uint32_t *pImageIndex, VkResult result) {
4016 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4017 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
4018 pAcquireInfo->fence, pImageIndex);
4019}
4020
4021void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
4022 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
4023 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
4024 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
4025 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
4026 phys_device_state.phys_device = pPhysicalDevices[i];
4027 // Init actual features for each physical device
4028 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
4029 }
4030 }
4031}
4032
4033// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
4034static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
4035 VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4036 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
4037
4038 if (!pQueueFamilyProperties) {
4039 if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
4040 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
4041 } else { // Save queue family properties
4042 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
4043
4044 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
4045 for (uint32_t i = 0; i < count; ++i) {
4046 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
4047 }
4048 }
4049}
4050
4051void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
4052 uint32_t *pQueueFamilyPropertyCount,
4053 VkQueueFamilyProperties *pQueueFamilyProperties) {
4054 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4055 assert(physical_device_state);
4056 VkQueueFamilyProperties2KHR *pqfp = nullptr;
4057 std::vector<VkQueueFamilyProperties2KHR> qfp;
4058 qfp.resize(*pQueueFamilyPropertyCount);
4059 if (pQueueFamilyProperties) {
4060 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
4061 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
4062 qfp[i].pNext = nullptr;
4063 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
4064 }
4065 pqfp = qfp.data();
4066 }
4067 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
4068}
4069
4070void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
4071 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4072 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4073 assert(physical_device_state);
4074 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4075 pQueueFamilyProperties);
4076}
4077
4078void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
4079 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4080 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4081 assert(physical_device_state);
4082 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4083 pQueueFamilyProperties);
4084}
4085void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
4086 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004087 if (!surface) return;
4088 auto surface_state = GetSurfaceState(surface);
4089 surface_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004090 surface_map.erase(surface);
4091}
4092
4093void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004094 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06004095}
4096
4097void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
4098 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
4099 const VkAllocationCallbacks *pAllocator,
4100 VkSurfaceKHR *pSurface, VkResult result) {
4101 if (VK_SUCCESS != result) return;
4102 RecordVulkanSurface(pSurface);
4103}
4104
4105#ifdef VK_USE_PLATFORM_ANDROID_KHR
4106void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
4107 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
4108 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4109 VkResult result) {
4110 if (VK_SUCCESS != result) return;
4111 RecordVulkanSurface(pSurface);
4112}
4113#endif // VK_USE_PLATFORM_ANDROID_KHR
4114
4115#ifdef VK_USE_PLATFORM_IOS_MVK
4116void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
4117 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4118 VkResult result) {
4119 if (VK_SUCCESS != result) return;
4120 RecordVulkanSurface(pSurface);
4121}
4122#endif // VK_USE_PLATFORM_IOS_MVK
4123
4124#ifdef VK_USE_PLATFORM_MACOS_MVK
4125void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
4126 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
4127 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4128 VkResult result) {
4129 if (VK_SUCCESS != result) return;
4130 RecordVulkanSurface(pSurface);
4131}
4132#endif // VK_USE_PLATFORM_MACOS_MVK
4133
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07004134#ifdef VK_USE_PLATFORM_METAL_EXT
4135void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
4136 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
4137 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4138 VkResult result) {
4139 if (VK_SUCCESS != result) return;
4140 RecordVulkanSurface(pSurface);
4141}
4142#endif // VK_USE_PLATFORM_METAL_EXT
4143
locke-lunargd556cc32019-09-17 01:21:23 -06004144#ifdef VK_USE_PLATFORM_WAYLAND_KHR
4145void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
4146 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
4147 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4148 VkResult result) {
4149 if (VK_SUCCESS != result) return;
4150 RecordVulkanSurface(pSurface);
4151}
4152#endif // VK_USE_PLATFORM_WAYLAND_KHR
4153
4154#ifdef VK_USE_PLATFORM_WIN32_KHR
4155void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
4156 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
4157 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4158 VkResult result) {
4159 if (VK_SUCCESS != result) return;
4160 RecordVulkanSurface(pSurface);
4161}
4162#endif // VK_USE_PLATFORM_WIN32_KHR
4163
4164#ifdef VK_USE_PLATFORM_XCB_KHR
4165void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
4166 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4167 VkResult result) {
4168 if (VK_SUCCESS != result) return;
4169 RecordVulkanSurface(pSurface);
4170}
4171#endif // VK_USE_PLATFORM_XCB_KHR
4172
4173#ifdef VK_USE_PLATFORM_XLIB_KHR
4174void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
4175 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4176 VkResult result) {
4177 if (VK_SUCCESS != result) return;
4178 RecordVulkanSurface(pSurface);
4179}
4180#endif // VK_USE_PLATFORM_XLIB_KHR
4181
Cort23cf2282019-09-20 18:58:18 +02004182void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004183 VkPhysicalDeviceFeatures *pFeatures) {
4184 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4185 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4186 physical_device_state->features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
4187 physical_device_state->features2.pNext = nullptr;
4188 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02004189}
4190
4191void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004192 VkPhysicalDeviceFeatures2 *pFeatures) {
4193 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4194 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4195 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004196}
4197
4198void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004199 VkPhysicalDeviceFeatures2 *pFeatures) {
4200 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4201 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4202 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004203}
4204
locke-lunargd556cc32019-09-17 01:21:23 -06004205void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
4206 VkSurfaceKHR surface,
4207 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
4208 VkResult result) {
4209 if (VK_SUCCESS != result) return;
4210 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4211 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004212 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004213 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
4214}
4215
4216void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
4217 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4218 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
4219 if (VK_SUCCESS != result) return;
4220 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4221 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004222 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004223 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
4224}
4225
4226void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
4227 VkSurfaceKHR surface,
4228 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
4229 VkResult result) {
4230 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4231 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004232 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004233 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
4234 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
4235 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
4236 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
4237 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
4238 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
4239 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
4240 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
4241 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
4242 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
4243}
4244
4245void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
4246 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
4247 VkBool32 *pSupported, VkResult result) {
4248 if (VK_SUCCESS != result) return;
4249 auto surface_state = GetSurfaceState(surface);
4250 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
4251}
4252
4253void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
4254 VkSurfaceKHR surface,
4255 uint32_t *pPresentModeCount,
4256 VkPresentModeKHR *pPresentModes,
4257 VkResult result) {
4258 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4259
4260 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
4261 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4262 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState;
4263
4264 if (*pPresentModeCount) {
4265 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4266 if (*pPresentModeCount > physical_device_state->present_modes.size())
4267 physical_device_state->present_modes.resize(*pPresentModeCount);
4268 }
4269 if (pPresentModes) {
4270 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4271 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
4272 physical_device_state->present_modes[i] = pPresentModes[i];
4273 }
4274 }
4275}
4276
4277void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
4278 uint32_t *pSurfaceFormatCount,
4279 VkSurfaceFormatKHR *pSurfaceFormats,
4280 VkResult result) {
4281 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4282
4283 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4284 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
4285
4286 if (*pSurfaceFormatCount) {
4287 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4288 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
4289 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
4290 }
4291 if (pSurfaceFormats) {
4292 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4293 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4294 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
4295 }
4296 }
4297}
4298
4299void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
4300 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4301 uint32_t *pSurfaceFormatCount,
4302 VkSurfaceFormat2KHR *pSurfaceFormats,
4303 VkResult result) {
4304 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4305
4306 auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
4307 if (*pSurfaceFormatCount) {
4308 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_COUNT) {
4309 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_COUNT;
4310 }
4311 if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
4312 physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
4313 }
4314 if (pSurfaceFormats) {
4315 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_DETAILS) {
4316 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_DETAILS;
4317 }
4318 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4319 physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
4320 }
4321 }
4322}
4323
4324void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4325 const VkDebugUtilsLabelEXT *pLabelInfo) {
4326 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4327}
4328
4329void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
4330 EndCmdDebugUtilsLabel(report_data, commandBuffer);
4331}
4332
4333void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4334 const VkDebugUtilsLabelEXT *pLabelInfo) {
4335 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4336
4337 // Squirrel away an easily accessible copy.
4338 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4339 cb_state->debug_label = LoggingLabel(pLabelInfo);
4340}
4341
4342void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
4343 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
4344 if (NULL != pPhysicalDeviceGroupProperties) {
4345 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
4346 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
4347 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
4348 auto &phys_device_state = physical_device_map[cur_phys_dev];
4349 phys_device_state.phys_device = cur_phys_dev;
4350 // Init actual features for each physical device
4351 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
4352 }
4353 }
4354 }
4355}
4356
4357void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
4358 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4359 VkResult result) {
4360 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4361 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4362}
4363
4364void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
4365 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4366 VkResult result) {
4367 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4368 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4369}
4370
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004371void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
4372 uint32_t queueFamilyIndex,
4373 uint32_t *pCounterCount,
4374 VkPerformanceCounterKHR *pCounters) {
4375 if (NULL == pCounters) return;
4376
4377 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4378 assert(physical_device_state);
4379
4380 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queueFamilyCounters(new QUEUE_FAMILY_PERF_COUNTERS());
4381 queueFamilyCounters->counters.resize(*pCounterCount);
4382 for (uint32_t i = 0; i < *pCounterCount; i++) queueFamilyCounters->counters[i] = pCounters[i];
4383
4384 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queueFamilyCounters);
4385}
4386
4387void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
4388 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
4389 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
4390 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4391 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
4392}
4393
4394void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
4395 VkResult result) {
4396 if (result == VK_SUCCESS) performance_lock_acquired = true;
4397}
4398
4399bool ValidationStateTracker::PreCallValidateReleaseProfilingLockKHR(VkDevice device) const {
4400 bool skip = false;
4401
4402 if (!performance_lock_acquired) {
4403 skip |= log_msg(
4404 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
4405 "VUID-vkReleaseProfilingLockKHR-device-03235",
4406 "The profiling lock of device must have been held via a previous successful call to vkAcquireProfilingLockKHR.");
4407 }
4408
4409 return skip;
4410}
4411
4412void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
4413 performance_lock_acquired = false;
4414 for (auto &cmd_buffer : commandBufferMap) {
4415 cmd_buffer.second->performance_lock_released = true;
4416 }
4417}
4418
locke-lunargd556cc32019-09-17 01:21:23 -06004419void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
4420 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4421 const VkAllocationCallbacks *pAllocator) {
4422 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004423 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4424 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004425 desc_template_map.erase(descriptorUpdateTemplate);
4426}
4427
4428void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
4429 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4430 const VkAllocationCallbacks *pAllocator) {
4431 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004432 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4433 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004434 desc_template_map.erase(descriptorUpdateTemplate);
4435}
4436
4437void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
4438 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
4439 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004440 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004441 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
4442}
4443
4444void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
4445 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4446 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4447 if (VK_SUCCESS != result) return;
4448 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4449}
4450
4451void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
4452 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4453 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4454 if (VK_SUCCESS != result) return;
4455 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4456}
4457
4458void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
4459 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4460 const void *pData) {
4461 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
4462 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
4463 assert(0);
4464 } else {
4465 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
4466 // TODO: Record template push descriptor updates
4467 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
4468 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
4469 }
4470 }
4471}
4472
4473void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
4474 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4475 const void *pData) {
4476 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4477}
4478
4479void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
4480 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4481 const void *pData) {
4482 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4483}
4484
4485void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
4486 VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
4487 const void *pData) {
4488 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4489
4490 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4491 if (template_state) {
4492 auto layout_data = GetPipelineLayout(layout);
4493 auto dsl = GetDslFromPipelineLayout(layout_data, set);
4494 const auto &template_ci = template_state->create_info;
Jeff Bolz6ae39612019-10-11 20:57:36 -05004495 if (dsl && !dsl->destroyed) {
locke-lunargd556cc32019-09-17 01:21:23 -06004496 // Decode the template into a set of write updates
4497 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
4498 dsl->GetDescriptorSetLayout());
4499 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
4500 static_cast<uint32_t>(decoded_template.desc_writes.size()),
4501 decoded_template.desc_writes.data());
4502 }
4503 }
4504}
4505
4506void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
4507 uint32_t *pPropertyCount, void *pProperties) {
4508 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4509 if (*pPropertyCount) {
4510 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_COUNT) {
4511 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_COUNT;
Camden Stocker61050592019-11-27 12:03:09 -08004512 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004513 }
4514 physical_device_state->display_plane_property_count = *pPropertyCount;
4515 }
4516 if (pProperties) {
4517 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_DETAILS) {
4518 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004519 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004520 }
4521 }
4522}
4523
4524void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
4525 uint32_t *pPropertyCount,
4526 VkDisplayPlanePropertiesKHR *pProperties,
4527 VkResult result) {
4528 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4529 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4530}
4531
4532void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
4533 uint32_t *pPropertyCount,
4534 VkDisplayPlaneProperties2KHR *pProperties,
4535 VkResult result) {
4536 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4537 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4538}
4539
4540void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4541 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
4542 QueryObject query_obj = {queryPool, query, index};
4543 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4544 RecordCmdBeginQuery(cb_state, query_obj);
4545}
4546
4547void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4548 uint32_t query, uint32_t index) {
4549 QueryObject query_obj = {queryPool, query, index};
4550 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4551 RecordCmdEndQuery(cb_state, query_obj);
4552}
4553
4554void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
4555 VkSamplerYcbcrConversion ycbcr_conversion) {
4556 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4557 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion);
4558 }
4559}
4560
4561void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
4562 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4563 const VkAllocationCallbacks *pAllocator,
4564 VkSamplerYcbcrConversion *pYcbcrConversion,
4565 VkResult result) {
4566 if (VK_SUCCESS != result) return;
4567 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4568}
4569
4570void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
4571 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4572 const VkAllocationCallbacks *pAllocator,
4573 VkSamplerYcbcrConversion *pYcbcrConversion,
4574 VkResult result) {
4575 if (VK_SUCCESS != result) return;
4576 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4577}
4578
4579void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
4580 const VkAllocationCallbacks *pAllocator) {
4581 if (!ycbcrConversion) return;
4582 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4583 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4584 }
4585}
4586
4587void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
4588 VkSamplerYcbcrConversion ycbcrConversion,
4589 const VkAllocationCallbacks *pAllocator) {
4590 if (!ycbcrConversion) return;
4591 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4592 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4593 }
4594}
4595
Tony-LunarG977448c2019-12-02 14:52:02 -07004596void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4597 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06004598 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07004599 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004600
4601 // Do nothing if the query pool has been destroyed.
4602 auto query_pool_state = GetQueryPoolState(queryPool);
4603 if (!query_pool_state) return;
4604
4605 // Reset the state of existing entries.
4606 QueryObject query_obj{queryPool, 0};
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004607 QueryObjectPass query_pass_obj{query_obj, 0};
locke-lunargd556cc32019-09-17 01:21:23 -06004608 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
4609 for (uint32_t i = 0; i < max_query_count; ++i) {
4610 query_obj.query = firstQuery + i;
4611 auto query_it = queryToStateMap.find(query_obj);
4612 if (query_it != queryToStateMap.end()) query_it->second = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004613 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
4614 for (uint32_t passIndex = 0; passIndex < query_pool_state->n_performance_passes; passIndex++) {
4615 query_pass_obj.perf_pass = passIndex;
4616 auto query_perf_it = queryPassToStateMap.find(query_pass_obj);
4617 if (query_perf_it != queryPassToStateMap.end()) query_perf_it->second = QUERYSTATE_RESET;
4618 }
4619 }
locke-lunargd556cc32019-09-17 01:21:23 -06004620 }
4621}
4622
Tony-LunarG977448c2019-12-02 14:52:02 -07004623void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4624 uint32_t queryCount) {
4625 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
4626}
4627
4628void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4629 uint32_t queryCount) {
4630 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
4631}
4632
locke-lunargd556cc32019-09-17 01:21:23 -06004633void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
4634 const TEMPLATE_STATE *template_state, const void *pData) {
4635 // Translate the templated update into a normal update for validation...
4636 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
4637 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
4638 decoded_update.desc_writes.data(), 0, NULL);
4639}
4640
4641// Update the common AllocateDescriptorSetsData
4642void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004643 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06004644 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05004645 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06004646 if (layout) {
4647 ds_data->layout_nodes[i] = layout;
4648 // Count total descriptors required per type
4649 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
4650 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
4651 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
4652 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
4653 }
4654 }
4655 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
4656 }
4657}
4658
4659// Decrement allocated sets from the pool and insert new sets into set_map
4660void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
4661 const VkDescriptorSet *descriptor_sets,
4662 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
4663 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
4664 // Account for sets and individual descriptors allocated from pool
4665 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
4666 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
4667 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
4668 }
4669
4670 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
4671 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
4672
4673 // Create tracking object for each descriptor set; insert into global map and the pool's set.
4674 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
4675 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
4676
Jeff Bolz41a1ced2019-10-11 11:40:49 -05004677 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07004678 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06004679 pool_state->sets.insert(new_ds.get());
4680 new_ds->in_use.store(0);
4681 setMap[descriptor_sets[i]] = std::move(new_ds);
4682 }
4683}
4684
4685// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
4686void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4687 UpdateDrawState(cb_state, bind_point);
4688 cb_state->hasDispatchCmd = true;
4689}
4690
locke-lunargd556cc32019-09-17 01:21:23 -06004691// Generic function to handle state update for all CmdDraw* type functions
4692void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4693 UpdateStateCmdDrawDispatchType(cb_state, bind_point);
locke-lunargd556cc32019-09-17 01:21:23 -06004694 cb_state->hasDrawCmd = true;
4695}
4696
4697void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
4698 uint32_t firstVertex, uint32_t firstInstance) {
4699 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4700 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4701}
4702
4703void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
4704 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
4705 uint32_t firstInstance) {
4706 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4707 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4708}
4709
4710void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4711 uint32_t count, uint32_t stride) {
4712 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4713 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4714 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4715 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4716}
4717
4718void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4719 VkDeviceSize offset, uint32_t count, uint32_t stride) {
4720 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4721 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4722 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4723 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4724}
4725
4726void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
4727 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4728 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4729}
4730
4731void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4732 VkDeviceSize offset) {
4733 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4734 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4735 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4736 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4737}
4738
Tony-LunarG977448c2019-12-02 14:52:02 -07004739void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4740 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4741 uint32_t stride) {
4742 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4743 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4744 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4745 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4746 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4747 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4748}
4749
locke-lunargd556cc32019-09-17 01:21:23 -06004750void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4751 VkDeviceSize offset, VkBuffer countBuffer,
4752 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4753 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004754 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
4755}
4756
4757void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4758 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
4759 uint32_t maxDrawCount, uint32_t stride) {
4760 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
4761}
4762
4763void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4764 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
4765 uint32_t maxDrawCount, uint32_t stride) {
locke-lunargd556cc32019-09-17 01:21:23 -06004766 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4767 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4768 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4769 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4770 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4771 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4772}
4773
4774void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4775 VkDeviceSize offset, VkBuffer countBuffer,
4776 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4777 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004778 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
4779}
4780
4781void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
4782 VkDeviceSize offset, VkBuffer countBuffer,
4783 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4784 uint32_t stride) {
4785 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
locke-lunargd556cc32019-09-17 01:21:23 -06004786}
4787
4788void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
4789 uint32_t firstTask) {
4790 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4791 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4792}
4793
4794void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4795 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
4796 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4797 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4798 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4799 if (buffer_state) {
4800 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4801 }
4802}
4803
4804void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4805 VkDeviceSize offset, VkBuffer countBuffer,
4806 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4807 uint32_t stride) {
4808 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4809 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4810 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4811 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4812 if (buffer_state) {
4813 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4814 }
4815 if (count_buffer_state) {
4816 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4817 }
4818}
4819
4820void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
4821 const VkAllocationCallbacks *pAllocator,
4822 VkShaderModule *pShaderModule, VkResult result,
4823 void *csm_state_data) {
4824 if (VK_SUCCESS != result) return;
4825 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
4826
4827 spv_target_env spirv_environment = ((api_version >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
4828 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004829 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
4830 csm_state->unique_shader_id)
4831 : std::make_shared<SHADER_MODULE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06004832 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
4833}
4834
4835void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004836 PIPELINE_STATE::StageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06004837 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
4838 auto module = GetShaderModuleState(pStage->module);
4839 if (!module->has_valid_spirv) return;
4840
4841 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
4842 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
4843 if (entrypoint == module->end()) return;
4844
4845 // Mark accessible ids
4846 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
4847 ProcessExecutionModes(module, entrypoint, pipeline);
4848
4849 stage_state->descriptor_uses =
4850 CollectInterfaceByDescriptorSlot(report_data, module, stage_state->accessible_ids, &stage_state->has_writable_descriptor);
4851 // Capture descriptor uses for the pipeline
4852 for (auto use : stage_state->descriptor_uses) {
4853 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06004854 const uint32_t slot = use.first.first;
4855 auto &reqs = pipeline->active_slots[slot][use.first.second];
locke-lunargd556cc32019-09-17 01:21:23 -06004856 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
John Zulauf649edd52019-10-02 14:39:41 -06004857 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunargd556cc32019-09-17 01:21:23 -06004858 }
4859}
4860
4861void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
4862 if (cb_state == nullptr) {
4863 return;
4864 }
4865
4866 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
4867 if (pipeline_layout_state == nullptr) {
4868 return;
4869 }
4870
4871 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
4872 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
4873 cb_state->push_constant_data.clear();
4874 uint32_t size_needed = 0;
4875 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
4876 size_needed = std::max(size_needed, (push_constant_range.offset + push_constant_range.size));
4877 }
4878 cb_state->push_constant_data.resize(size_needed, 0);
4879 }
4880}
John Zulauf22b0fbe2019-10-15 06:26:16 -06004881
4882void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
4883 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
4884 VkResult result) {
4885 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
4886 auto swapchain_state = GetSwapchainState(swapchain);
4887
4888 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
4889
4890 if (pSwapchainImages) {
4891 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_DETAILS) {
4892 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_DETAILS;
4893 }
4894 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
locke-lunargb3584732019-10-28 20:18:36 -06004895 if (swapchain_state->images[i].image != VK_NULL_HANDLE) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06004896
4897 // Add imageMap entries for each swapchain image
4898 VkImageCreateInfo image_ci;
4899 image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
4900 image_ci.pNext = nullptr; // to be set later
4901 image_ci.flags = VK_IMAGE_CREATE_ALIAS_BIT; // to be updated below
4902 image_ci.imageType = VK_IMAGE_TYPE_2D;
4903 image_ci.format = swapchain_state->createInfo.imageFormat;
4904 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
4905 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
4906 image_ci.extent.depth = 1;
4907 image_ci.mipLevels = 1;
4908 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
4909 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
4910 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
4911 image_ci.usage = swapchain_state->createInfo.imageUsage;
4912 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
4913 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
4914 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
4915 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4916
4917 image_ci.pNext = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(swapchain_state->createInfo.pNext);
4918
4919 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR)
4920 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
4921 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)
4922 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
4923 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
4924 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR);
4925
4926 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(pSwapchainImages[i], &image_ci);
4927 auto &image_state = imageMap[pSwapchainImages[i]];
4928 image_state->valid = false;
4929 image_state->create_from_swapchain = swapchain;
4930 image_state->bind_swapchain = swapchain;
4931 image_state->bind_swapchain_imageIndex = i;
locke-lunargb3584732019-10-28 20:18:36 -06004932 swapchain_state->images[i].image = pSwapchainImages[i];
4933 swapchain_state->images[i].bound_images.emplace(pSwapchainImages[i]);
John Zulauf22b0fbe2019-10-15 06:26:16 -06004934 }
4935 }
4936
4937 if (*pSwapchainImageCount) {
4938 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_COUNT) {
4939 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_COUNT;
4940 }
4941 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
4942 }
4943}