blob: 56fa880a361917c2cc9c86a8955574de694a7e4f [file] [log] [blame]
locke-lunargd556cc32019-09-17 01:21:23 -06001/* Copyright (c) 2015-2019 The Khronos Group Inc.
2 * Copyright (c) 2015-2019 Valve Corporation
3 * Copyright (c) 2015-2019 LunarG, Inc.
4 * Copyright (C) 2015-2019 Google Inc.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
23// Allow use of STL min and max functions in Windows
24#define NOMINMAX
25
26#include <cmath>
27#include <set>
28#include <sstream>
29#include <string>
30
31#include "vk_enum_string_helper.h"
32#include "vk_format_utils.h"
33#include "vk_layer_data.h"
34#include "vk_layer_utils.h"
35#include "vk_layer_logging.h"
36#include "vk_typemap_helper.h"
37
38#include "chassis.h"
39#include "state_tracker.h"
40#include "shader_validation.h"
41
42using std::max;
43using std::string;
44using std::stringstream;
45using std::unique_ptr;
46using std::unordered_map;
47using std::unordered_set;
48using std::vector;
49
50#ifdef VK_USE_PLATFORM_ANDROID_KHR
51// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
52// This could also move into a seperate core_validation_android.cpp file... ?
53
54void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
55 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
56 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
57 is_node->imported_ahb = true;
58 }
59 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
60 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
61 is_node->has_ahb_format = true;
62 is_node->ahb_format = ext_fmt_android->externalFormat;
63 }
64}
65
66void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
67 VkSamplerYcbcrConversion ycbcr_conversion) {
68 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
69 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
70 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
71 }
72};
73
74void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
75 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
76};
77
78#else
79
80void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
81
82void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
83 VkSamplerYcbcrConversion ycbcr_conversion){};
84
85void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
86
87#endif // VK_USE_PLATFORM_ANDROID_KHR
88
89void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
90 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
91 if (VK_SUCCESS != result) return;
92 std::unique_ptr<IMAGE_STATE> is_node(new IMAGE_STATE(*pImage, pCreateInfo));
93 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
94 RecordCreateImageANDROID(pCreateInfo, is_node.get());
95 }
96 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
97 if (swapchain_info) {
98 is_node->create_from_swapchain = swapchain_info->swapchain;
99 }
100
101 bool pre_fetch_memory_reqs = true;
102#ifdef VK_USE_PLATFORM_ANDROID_KHR
103 if (is_node->external_format_android) {
104 // Do not fetch requirements for external memory images
105 pre_fetch_memory_reqs = false;
106 }
107#endif
108 // Record the memory requirements in case they won't be queried
109 if (pre_fetch_memory_reqs) {
110 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
111 }
112 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
113}
114
115void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
116 if (!image) return;
117 IMAGE_STATE *image_state = GetImageState(image);
118 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
119 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
120 // Clean up memory mapping, bindings and range references for image
121 for (auto mem_binding : image_state->GetBoundMemory()) {
122 auto mem_info = GetDevMemState(mem_binding);
123 if (mem_info) {
124 RemoveImageMemoryRange(image, mem_info);
125 }
126 }
127 if (image_state->bind_swapchain) {
128 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
129 if (swapchain) {
130 swapchain->bound_images.erase(image_state->image);
131 }
132 }
133 RemoveAliasingImage(image_state);
134 ClearMemoryObjectBindings(obj_struct);
135 // Remove image from imageMap
136 imageMap.erase(image);
137}
138
139void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
140 VkImageLayout imageLayout, const VkClearColorValue *pColor,
141 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
142 auto cb_node = GetCBState(commandBuffer);
143 auto image_state = GetImageState(image);
144 if (cb_node && image_state) {
145 AddCommandBufferBindingImage(cb_node, image_state);
146 }
147}
148
149void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
150 VkImageLayout imageLayout,
151 const VkClearDepthStencilValue *pDepthStencil,
152 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
153 auto cb_node = GetCBState(commandBuffer);
154 auto image_state = GetImageState(image);
155 if (cb_node && image_state) {
156 AddCommandBufferBindingImage(cb_node, image_state);
157 }
158}
159
160void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
161 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
162 uint32_t regionCount, const VkImageCopy *pRegions) {
163 auto cb_node = GetCBState(commandBuffer);
164 auto src_image_state = GetImageState(srcImage);
165 auto dst_image_state = GetImageState(dstImage);
166
167 // Update bindings between images and cmd buffer
168 AddCommandBufferBindingImage(cb_node, src_image_state);
169 AddCommandBufferBindingImage(cb_node, dst_image_state);
170}
171
172void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
173 VkImageLayout srcImageLayout, VkImage dstImage,
174 VkImageLayout dstImageLayout, uint32_t regionCount,
175 const VkImageResolve *pRegions) {
176 auto cb_node = GetCBState(commandBuffer);
177 auto src_image_state = GetImageState(srcImage);
178 auto dst_image_state = GetImageState(dstImage);
179
180 // Update bindings between images and cmd buffer
181 AddCommandBufferBindingImage(cb_node, src_image_state);
182 AddCommandBufferBindingImage(cb_node, dst_image_state);
183}
184
185void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
186 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
187 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
188 auto cb_node = GetCBState(commandBuffer);
189 auto src_image_state = GetImageState(srcImage);
190 auto dst_image_state = GetImageState(dstImage);
191
192 // Update bindings between images and cmd buffer
193 AddCommandBufferBindingImage(cb_node, src_image_state);
194 AddCommandBufferBindingImage(cb_node, dst_image_state);
195}
196
197void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
198 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
199 VkResult result) {
200 if (result != VK_SUCCESS) return;
201 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
202 std::unique_ptr<BUFFER_STATE> buffer_state(new BUFFER_STATE(*pBuffer, pCreateInfo));
203
204 // Get a set of requirements in the case the app does not
205 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
206
207 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
208}
209
210void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
211 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
212 VkResult result) {
213 if (result != VK_SUCCESS) return;
214 bufferViewMap[*pView] = std::unique_ptr<BUFFER_VIEW_STATE>(new BUFFER_VIEW_STATE(*pView, pCreateInfo));
215}
216
217void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
218 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
219 VkResult result) {
220 if (result != VK_SUCCESS) return;
221 auto image_state = GetImageState(pCreateInfo->image);
222 imageViewMap[*pView] = std::unique_ptr<IMAGE_VIEW_STATE>(new IMAGE_VIEW_STATE(image_state, *pView, pCreateInfo));
223}
224
225void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
226 uint32_t regionCount, const VkBufferCopy *pRegions) {
227 auto cb_node = GetCBState(commandBuffer);
228 auto src_buffer_state = GetBufferState(srcBuffer);
229 auto dst_buffer_state = GetBufferState(dstBuffer);
230
231 // Update bindings between buffers and cmd buffer
232 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
233 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
234}
235
236void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
237 const VkAllocationCallbacks *pAllocator) {
238 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
239 if (!image_view_state) return;
240 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
241
242 // Any bound cmd buffers are now invalid
243 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
244 imageViewMap.erase(imageView);
245}
246
247void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
248 if (!buffer) return;
249 auto buffer_state = GetBufferState(buffer);
250 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
251
252 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
253 for (auto mem_binding : buffer_state->GetBoundMemory()) {
254 auto mem_info = GetDevMemState(mem_binding);
255 if (mem_info) {
256 RemoveBufferMemoryRange(buffer, mem_info);
257 }
258 }
259 ClearMemoryObjectBindings(obj_struct);
260 bufferMap.erase(buffer_state->buffer);
261}
262
263void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
264 const VkAllocationCallbacks *pAllocator) {
265 if (!bufferView) return;
266 auto buffer_view_state = GetBufferViewState(bufferView);
267 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
268
269 // Any bound cmd buffers are now invalid
270 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
271 bufferViewMap.erase(bufferView);
272}
273
274void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
275 VkDeviceSize size, uint32_t data) {
276 auto cb_node = GetCBState(commandBuffer);
277 auto buffer_state = GetBufferState(dstBuffer);
278 // Update bindings between buffer and cmd buffer
279 AddCommandBufferBindingBuffer(cb_node, buffer_state);
280}
281
282void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
283 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
284 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
285 auto cb_node = GetCBState(commandBuffer);
286 auto src_image_state = GetImageState(srcImage);
287 auto dst_buffer_state = GetBufferState(dstBuffer);
288
289 // Update bindings between buffer/image and cmd buffer
290 AddCommandBufferBindingImage(cb_node, src_image_state);
291 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
292}
293
294void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
295 VkImageLayout dstImageLayout, uint32_t regionCount,
296 const VkBufferImageCopy *pRegions) {
297 auto cb_node = GetCBState(commandBuffer);
298 auto src_buffer_state = GetBufferState(srcBuffer);
299 auto dst_image_state = GetImageState(dstImage);
300
301 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
302 AddCommandBufferBindingImage(cb_node, dst_image_state);
303}
304
305// Get the image viewstate for a given framebuffer attachment
306IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(FRAMEBUFFER_STATE *framebuffer, uint32_t index) {
307 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
308 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
309 return GetImageViewState(image_view);
310}
311
312// Get the image viewstate for a given framebuffer attachment
313const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const FRAMEBUFFER_STATE *framebuffer,
314 uint32_t index) const {
315 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
316 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
317 return GetImageViewState(image_view);
318}
319
320void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
321 if (!(image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT)) return;
322 std::unordered_set<VkImage> *bound_images = nullptr;
323
324 if (image_state->create_from_swapchain) {
325 auto swapchain_state = GetSwapchainState(image_state->create_from_swapchain);
326 if (swapchain_state) {
327 bound_images = &swapchain_state->bound_images;
328 }
329 } else {
330 auto mem_state = GetDevMemState(image_state->binding.mem);
331 if (mem_state) {
332 bound_images = &mem_state->bound_images;
333 }
334 }
335
336 if (bound_images) {
337 for (const auto &handle : *bound_images) {
338 if (handle != image_state->image) {
339 auto is = GetImageState(handle);
340 if (is && is->IsCompatibleAliasing(image_state)) {
341 auto inserted = is->aliasing_images.emplace(image_state->image);
342 if (inserted.second) {
343 image_state->aliasing_images.emplace(handle);
344 }
345 }
346 }
347 }
348 }
349}
350
351void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
352 for (const auto &image : image_state->aliasing_images) {
353 auto is = GetImageState(image);
354 if (is) {
355 is->aliasing_images.erase(image_state->image);
356 }
357 }
358 image_state->aliasing_images.clear();
359}
360
361void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
362 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
363 // reference. It doesn't need two ways clear.
364 for (const auto &handle : bound_images) {
365 auto is = GetImageState(handle);
366 if (is) {
367 is->aliasing_images.clear();
368 }
369 }
370}
371
372EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
373 auto it = eventMap.find(event);
374 if (it == eventMap.end()) {
375 return nullptr;
376 }
377 return &it->second;
378}
379
380const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
381 auto it = queueMap.find(queue);
382 if (it == queueMap.cend()) {
383 return nullptr;
384 }
385 return &it->second;
386}
387
388QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
389 auto it = queueMap.find(queue);
390 if (it == queueMap.end()) {
391 return nullptr;
392 }
393 return &it->second;
394}
395
396const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
397 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
398 auto it = phys_dev_map->find(phys);
399 if (it == phys_dev_map->end()) {
400 return nullptr;
401 }
402 return &it->second;
403}
404
405PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
406 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
407 auto it = phys_dev_map->find(phys);
408 if (it == phys_dev_map->end()) {
409 return nullptr;
410 }
411 return &it->second;
412}
413
414PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
415const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
416
417// Return ptr to memory binding for given handle of specified type
418template <typename State, typename Result>
419static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
420 switch (typed_handle.type) {
421 case kVulkanObjectTypeImage:
422 return state->GetImageState(typed_handle.Cast<VkImage>());
423 case kVulkanObjectTypeBuffer:
424 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
425 case kVulkanObjectTypeAccelerationStructureNV:
426 return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
427 default:
428 break;
429 }
430 return nullptr;
431}
432
433const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
434 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
435}
436
437BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
438 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
439}
440
441void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
442 assert(object != NULL);
443
444 auto *mem_info = new DEVICE_MEMORY_STATE(object, mem, pAllocateInfo);
445 memObjMap[mem] = unique_ptr<DEVICE_MEMORY_STATE>(mem_info);
446
447 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
448 if (dedicated) {
449 mem_info->is_dedicated = true;
450 mem_info->dedicated_buffer = dedicated->buffer;
451 mem_info->dedicated_image = dedicated->image;
452 }
453 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
454 if (export_info) {
455 mem_info->is_export = true;
456 mem_info->export_handle_type_flags = export_info->handleTypes;
457 }
458}
459
460// Create binding link between given sampler and command buffer node
461void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
462 if (disabled.command_buffer_state) {
463 return;
464 }
465 auto inserted = cb_node->object_bindings.emplace(sampler_state->sampler, kVulkanObjectTypeSampler);
466 if (inserted.second) {
467 // Only need to complete the cross-reference if this is a new item
468 sampler_state->cb_bindings.insert(cb_node);
469 }
470}
471
472// Create binding link between given image node and command buffer node
473void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
474 if (disabled.command_buffer_state) {
475 return;
476 }
477 // Skip validation if this image was created through WSI
478 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
479 // First update cb binding for image
480 auto image_inserted = cb_node->object_bindings.emplace(image_state->image, kVulkanObjectTypeImage);
481 if (image_inserted.second) {
482 // Only need to continue if this is a new item (the rest of the work would have be done previous)
483 image_state->cb_bindings.insert(cb_node);
484 // Now update CB binding in MemObj mini CB list
485 for (auto mem_binding : image_state->GetBoundMemory()) {
486 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
487 if (pMemInfo) {
488 // Now update CBInfo's Mem reference list
489 auto mem_inserted = cb_node->memObjs.insert(mem_binding);
490 if (mem_inserted.second) {
491 // Only need to complete the cross-reference if this is a new item
492 pMemInfo->cb_bindings.insert(cb_node);
493 }
494 }
495 }
496 }
497 }
498}
499
500// Create binding link between given image view node and its image with command buffer node
501void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
502 if (disabled.command_buffer_state) {
503 return;
504 }
505 // First add bindings for imageView
506 auto inserted = cb_node->object_bindings.emplace(view_state->image_view, kVulkanObjectTypeImageView);
507 if (inserted.second) {
508 // Only need to continue if this is a new item
509 view_state->cb_bindings.insert(cb_node);
510 auto image_state = GetImageState(view_state->create_info.image);
511 // Add bindings for image within imageView
512 if (image_state) {
513 AddCommandBufferBindingImage(cb_node, image_state);
514 }
515 }
516}
517
518// Create binding link between given buffer node and command buffer node
519void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
520 if (disabled.command_buffer_state) {
521 return;
522 }
523 // First update cb binding for buffer
524 auto buffer_inserted = cb_node->object_bindings.emplace(buffer_state->buffer, kVulkanObjectTypeBuffer);
525 if (buffer_inserted.second) {
526 // Only need to continue if this is a new item
527 buffer_state->cb_bindings.insert(cb_node);
528 // Now update CB binding in MemObj mini CB list
529 for (auto mem_binding : buffer_state->GetBoundMemory()) {
530 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
531 if (pMemInfo) {
532 // Now update CBInfo's Mem reference list
533 auto inserted = cb_node->memObjs.insert(mem_binding);
534 if (inserted.second) {
535 // Only need to complete the cross-reference if this is a new item
536 pMemInfo->cb_bindings.insert(cb_node);
537 }
538 }
539 }
540 }
541}
542
543// Create binding link between given buffer view node and its buffer with command buffer node
544void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
545 if (disabled.command_buffer_state) {
546 return;
547 }
548 // First add bindings for bufferView
549 auto inserted = cb_node->object_bindings.emplace(view_state->buffer_view, kVulkanObjectTypeBufferView);
550 if (inserted.second) {
551 // Only need to complete the cross-reference if this is a new item
552 view_state->cb_bindings.insert(cb_node);
553 auto buffer_state = GetBufferState(view_state->create_info.buffer);
554 // Add bindings for buffer within bufferView
555 if (buffer_state) {
556 AddCommandBufferBindingBuffer(cb_node, buffer_state);
557 }
558 }
559}
560
561// Create binding link between given acceleration structure and command buffer node
562void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
563 ACCELERATION_STRUCTURE_STATE *as_state) {
564 if (disabled.command_buffer_state) {
565 return;
566 }
567 auto as_inserted = cb_node->object_bindings.emplace(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV);
568 if (as_inserted.second) {
569 // Only need to complete the cross-reference if this is a new item
570 as_state->cb_bindings.insert(cb_node);
571 // Now update CB binding in MemObj mini CB list
572 for (auto mem_binding : as_state->GetBoundMemory()) {
573 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
574 if (pMemInfo) {
575 // Now update CBInfo's Mem reference list
576 auto mem_inserted = cb_node->memObjs.insert(mem_binding);
577 if (mem_inserted.second) {
578 // Only need to complete the cross-reference if this is a new item
579 pMemInfo->cb_bindings.insert(cb_node);
580 }
581 }
582 }
583 }
584}
585
586// For every mem obj bound to particular CB, free bindings related to that CB
587void ValidationStateTracker::ClearCmdBufAndMemReferences(CMD_BUFFER_STATE *cb_node) {
588 if (cb_node) {
589 if (cb_node->memObjs.size() > 0) {
590 for (auto mem : cb_node->memObjs) {
591 DEVICE_MEMORY_STATE *pInfo = GetDevMemState(mem);
592 if (pInfo) {
593 pInfo->cb_bindings.erase(cb_node);
594 }
595 }
596 cb_node->memObjs.clear();
597 }
598 }
599}
600
601// Clear a single object binding from given memory object
602void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, VkDeviceMemory mem) {
603 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
604 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
605 if (mem_info) {
606 mem_info->obj_bindings.erase(typed_handle);
607 }
608}
609
610// ClearMemoryObjectBindings clears the binding of objects to memory
611// For the given object it pulls the memory bindings and makes sure that the bindings
612// no longer refer to the object being cleared. This occurs when objects are destroyed.
613void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
614 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
615 if (mem_binding) {
616 if (!mem_binding->sparse) {
617 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem);
618 } else { // Sparse, clear all bindings
619 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
620 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem);
621 }
622 }
623 }
624}
625
626// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
627// Corresponding valid usage checks are in ValidateSetMemBinding().
628void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
629 const VulkanTypedHandle &typed_handle) {
630 assert(mem_binding);
631 mem_binding->binding.mem = mem;
632 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
633 mem_binding->binding.offset = memory_offset;
634 mem_binding->binding.size = mem_binding->requirements.size;
635
636 if (mem != VK_NULL_HANDLE) {
637 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
638 if (mem_info) {
639 mem_info->obj_bindings.insert(typed_handle);
640 // For image objects, make sure default memory state is correctly set
641 // TODO : What's the best/correct way to handle this?
642 if (kVulkanObjectTypeImage == typed_handle.type) {
643 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
644 if (image_state) {
645 VkImageCreateInfo ici = image_state->createInfo;
646 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
647 // TODO:: More memory state transition stuff.
648 }
649 }
650 }
651 }
652 }
653}
654
655// For NULL mem case, clear any previous binding Else...
656// Make sure given object is in its object map
657// IF a previous binding existed, update binding
658// Add reference from objectInfo to memoryInfo
659// Add reference off of object's binding info
660// Return VK_TRUE if addition is successful, VK_FALSE otherwise
661bool ValidationStateTracker::SetSparseMemBinding(MEM_BINDING binding, const VulkanTypedHandle &typed_handle) {
662 bool skip = VK_FALSE;
663 // Handle NULL case separately, just clear previous binding & decrement reference
664 if (binding.mem == VK_NULL_HANDLE) {
665 // TODO : This should cause the range of the resource to be unbound according to spec
666 } else {
667 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
668 assert(mem_binding);
669 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
670 assert(mem_binding->sparse);
671 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(binding.mem);
672 if (mem_info) {
673 mem_info->obj_bindings.insert(typed_handle);
674 // Need to set mem binding for this object
675 mem_binding->sparse_bindings.insert(binding);
676 mem_binding->UpdateBoundMemorySet();
677 }
678 }
679 }
680 return skip;
681}
682
683const RENDER_PASS_STATE *ValidationStateTracker::GetRenderPassState(VkRenderPass renderpass) const {
684 auto it = renderPassMap.find(renderpass);
685 if (it == renderPassMap.end()) {
686 return nullptr;
687 }
688 return it->second.get();
689}
690
691RENDER_PASS_STATE *ValidationStateTracker::GetRenderPassState(VkRenderPass renderpass) {
692 auto it = renderPassMap.find(renderpass);
693 if (it == renderPassMap.end()) {
694 return nullptr;
695 }
696 return it->second.get();
697}
698
699std::shared_ptr<RENDER_PASS_STATE> ValidationStateTracker::GetRenderPassStateSharedPtr(VkRenderPass renderpass) {
700 auto it = renderPassMap.find(renderpass);
701 if (it == renderPassMap.end()) {
702 return nullptr;
703 }
704 return it->second;
705}
706
707void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, const VkPipelineBindPoint bind_point) {
708 auto &state = cb_state->lastBound[bind_point];
709 PIPELINE_STATE *pPipe = state.pipeline_state;
710 if (VK_NULL_HANDLE != state.pipeline_layout) {
711 for (const auto &set_binding_pair : pPipe->active_slots) {
712 uint32_t setIndex = set_binding_pair.first;
713 // Pull the set node
714 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
715 if (!descriptor_set->IsPushDescriptor()) {
716 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
717
718 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
719 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
720 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
721 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
722
723 if (reduced_map.IsManyDescriptors()) {
724 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
725 descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
726 }
727
728 // We can skip updating the state if "nothing" has changed since the last validation.
729 // See CoreChecks::ValidateCmdBufDrawState for more details.
730 bool need_update =
731 !reduced_map.IsManyDescriptors() ||
732 // Update if descriptor set (or contents) has changed
733 state.per_set[setIndex].validated_set != descriptor_set ||
734 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
735 (!disabled.image_layout_validation &&
736 state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count) ||
737 // Update if previous bindingReqMap doesn't include new bindingRepMap
738 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
739 state.per_set[setIndex].validated_set_binding_req_map.end(), set_binding_pair.second.begin(),
740 set_binding_pair.second.end());
741
742 if (need_update) {
743 // Bind this set and its active descriptor resources to the command buffer
744 descriptor_set->UpdateDrawState(this, cb_state, binding_req_map);
745
746 state.per_set[setIndex].validated_set = descriptor_set;
747 state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
748 state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
749 if (reduced_map.IsManyDescriptors()) {
750 // Check whether old == new before assigning, the equality check is much cheaper than
751 // freeing and reallocating the map.
752 if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
753 state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
754 }
755 } else {
756 state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
757 }
758 }
759 }
760 }
761 }
762 if (!pPipe->vertex_binding_descriptions_.empty()) {
763 cb_state->vertex_buffer_used = true;
764 }
765}
766
767// Remove set from setMap and delete the set
768void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
769 setMap.erase(descriptor_set->GetSet());
770}
771
772// Free all DS Pools including their Sets & related sub-structs
773// NOTE : Calls to this function should be wrapped in mutex
774void ValidationStateTracker::DeleteDescriptorSetPools() {
775 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
776 // Remove this pools' sets from setMap and delete them
777 for (auto ds : ii->second->sets) {
778 FreeDescriptorSet(ds);
779 }
780 ii->second->sets.clear();
781 ii = descriptorPoolMap.erase(ii);
782 }
783}
784
785// For given object struct return a ptr of BASE_NODE type for its wrapping struct
786BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
787 BASE_NODE *base_ptr = nullptr;
788 switch (object_struct.type) {
789 case kVulkanObjectTypeDescriptorSet: {
790 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
791 break;
792 }
793 case kVulkanObjectTypeSampler: {
794 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
795 break;
796 }
797 case kVulkanObjectTypeQueryPool: {
798 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
799 break;
800 }
801 case kVulkanObjectTypePipeline: {
802 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
803 break;
804 }
805 case kVulkanObjectTypeBuffer: {
806 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
807 break;
808 }
809 case kVulkanObjectTypeBufferView: {
810 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
811 break;
812 }
813 case kVulkanObjectTypeImage: {
814 base_ptr = GetImageState(object_struct.Cast<VkImage>());
815 break;
816 }
817 case kVulkanObjectTypeImageView: {
818 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
819 break;
820 }
821 case kVulkanObjectTypeEvent: {
822 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
823 break;
824 }
825 case kVulkanObjectTypeDescriptorPool: {
826 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
827 break;
828 }
829 case kVulkanObjectTypeCommandPool: {
830 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
831 break;
832 }
833 case kVulkanObjectTypeFramebuffer: {
834 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
835 break;
836 }
837 case kVulkanObjectTypeRenderPass: {
838 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
839 break;
840 }
841 case kVulkanObjectTypeDeviceMemory: {
842 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
843 break;
844 }
845 case kVulkanObjectTypeAccelerationStructureNV: {
846 base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
847 break;
848 }
849 default:
850 // TODO : Any other objects to be handled here?
851 assert(0);
852 break;
853 }
854 return base_ptr;
855}
856
857// Tie the VulkanTypedHandle to the cmd buffer which includes:
858// Add object_binding to cmd buffer
859// Add cb_binding to object
860void ValidationStateTracker::AddCommandBufferBinding(std::unordered_set<CMD_BUFFER_STATE *> *cb_bindings,
861 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
862 if (disabled.command_buffer_state) {
863 return;
864 }
865 cb_bindings->insert(cb_node);
866 cb_node->object_bindings.insert(obj);
867}
868
869// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
870void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
871 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
872 if (base_obj) base_obj->cb_bindings.erase(cb_node);
873}
874
875// Reset the command buffer state
876// Maintain the createInfo and set state to CB_NEW, but clear all other state
877void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
878 CMD_BUFFER_STATE *pCB = GetCBState(cb);
879 if (pCB) {
880 pCB->in_use.store(0);
881 // Reset CB state (note that createInfo is not cleared)
882 pCB->commandBuffer = cb;
883 memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
884 memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
885 pCB->hasDrawCmd = false;
886 pCB->hasTraceRaysCmd = false;
887 pCB->hasBuildAccelerationStructureCmd = false;
888 pCB->hasDispatchCmd = false;
889 pCB->state = CB_NEW;
890 pCB->submitCount = 0;
891 pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
892 pCB->status = 0;
893 pCB->static_status = 0;
894 pCB->viewportMask = 0;
895 pCB->scissorMask = 0;
896
897 for (auto &item : pCB->lastBound) {
898 item.second.reset();
899 }
900
901 memset(&pCB->activeRenderPassBeginInfo, 0, sizeof(pCB->activeRenderPassBeginInfo));
902 pCB->activeRenderPass = nullptr;
903 pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
904 pCB->activeSubpass = 0;
905 pCB->broken_bindings.clear();
906 pCB->waitedEvents.clear();
907 pCB->events.clear();
908 pCB->writeEventsBeforeWait.clear();
909 pCB->queryToStateMap.clear();
910 pCB->activeQueries.clear();
911 pCB->startedQueries.clear();
912 pCB->image_layout_map.clear();
913 pCB->eventToStageMap.clear();
914 pCB->cb_vertex_buffer_binding_info.clear();
915 pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
916 pCB->vertex_buffer_used = false;
917 pCB->primaryCommandBuffer = VK_NULL_HANDLE;
918 // If secondary, invalidate any primary command buffer that may call us.
919 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
920 InvalidateCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
921 }
922
923 // Remove reverse command buffer links.
924 for (auto pSubCB : pCB->linkedCommandBuffers) {
925 pSubCB->linkedCommandBuffers.erase(pCB);
926 }
927 pCB->linkedCommandBuffers.clear();
928 ClearCmdBufAndMemReferences(pCB);
929 pCB->queue_submit_functions.clear();
930 pCB->cmd_execute_commands_functions.clear();
931 pCB->eventUpdates.clear();
932 pCB->queryUpdates.clear();
933
934 // Remove object bindings
935 for (const auto &obj : pCB->object_bindings) {
936 RemoveCommandBufferBinding(obj, pCB);
937 }
938 pCB->object_bindings.clear();
939 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
940 for (auto framebuffer : pCB->framebuffers) {
941 auto fb_state = GetFramebufferState(framebuffer);
942 if (fb_state) fb_state->cb_bindings.erase(pCB);
943 }
944 pCB->framebuffers.clear();
945 pCB->activeFramebuffer = VK_NULL_HANDLE;
946 memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
947
948 pCB->qfo_transfer_image_barriers.Reset();
949 pCB->qfo_transfer_buffer_barriers.Reset();
950
951 // Clean up the label data
952 ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
953 pCB->debug_label.Reset();
954 }
955 if (command_buffer_reset_callback) {
956 (*command_buffer_reset_callback)(cb);
957 }
958}
959
960void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
961 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
962 VkResult result) {
963 if (VK_SUCCESS != result) return;
964
965 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
966 if (nullptr == enabled_features_found) {
967 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
968 if (features2) {
969 enabled_features_found = &(features2->features);
970 }
971 }
972
973 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
974 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
975 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
976
977 if (nullptr == enabled_features_found) {
978 state_tracker->enabled_features.core = {};
979 } else {
980 state_tracker->enabled_features.core = *enabled_features_found;
981 }
982
983 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
984 // previously set them through an explicit API call.
985 uint32_t count;
986 auto pd_state = GetPhysicalDeviceState(gpu);
987 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
988 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
989 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
990 // Save local link to this device's physical device state
991 state_tracker->physical_device_state = pd_state;
992
993 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
994 state_tracker->physical_device_count =
995 device_group_ci && device_group_ci->physicalDeviceCount > 0 ? device_group_ci->physicalDeviceCount : 1;
996
997 const auto *descriptor_indexing_features = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>(pCreateInfo->pNext);
998 if (descriptor_indexing_features) {
999 state_tracker->enabled_features.descriptor_indexing = *descriptor_indexing_features;
1000 }
1001
1002 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeaturesKHR>(pCreateInfo->pNext);
1003 if (eight_bit_storage_features) {
1004 state_tracker->enabled_features.eight_bit_storage = *eight_bit_storage_features;
1005 }
1006
1007 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
1008 if (exclusive_scissor_features) {
1009 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1010 }
1011
1012 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1013 if (shading_rate_image_features) {
1014 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1015 }
1016
1017 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1018 if (mesh_shader_features) {
1019 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1020 }
1021
1022 const auto *inline_uniform_block_features =
1023 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1024 if (inline_uniform_block_features) {
1025 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1026 }
1027
1028 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1029 if (transform_feedback_features) {
1030 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1031 }
1032
1033 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceFloat16Int8FeaturesKHR>(pCreateInfo->pNext);
1034 if (float16_int8_features) {
1035 state_tracker->enabled_features.float16_int8 = *float16_int8_features;
1036 }
1037
1038 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1039 if (vtx_attrib_div_features) {
1040 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1041 }
1042
1043 const auto *uniform_buffer_standard_layout_features =
1044 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR>(pCreateInfo->pNext);
1045 if (uniform_buffer_standard_layout_features) {
1046 state_tracker->enabled_features.uniform_buffer_standard_layout = *uniform_buffer_standard_layout_features;
1047 }
1048
1049 const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT>(pCreateInfo->pNext);
1050 if (scalar_block_layout_features) {
1051 state_tracker->enabled_features.scalar_block_layout_features = *scalar_block_layout_features;
1052 }
1053
1054 const auto *buffer_address = lvl_find_in_chain<VkPhysicalDeviceBufferAddressFeaturesEXT>(pCreateInfo->pNext);
1055 if (buffer_address) {
1056 state_tracker->enabled_features.buffer_address = *buffer_address;
1057 }
1058
1059 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1060 if (cooperative_matrix_features) {
1061 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1062 }
1063
1064 const auto *float_controls_features = lvl_find_in_chain<VkPhysicalDeviceFloatControlsPropertiesKHR>(pCreateInfo->pNext);
1065 if (float_controls_features) {
1066 state_tracker->enabled_features.float_controls = *float_controls_features;
1067 }
1068
1069 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeaturesEXT>(pCreateInfo->pNext);
1070 if (host_query_reset_features) {
1071 state_tracker->enabled_features.host_query_reset_features = *host_query_reset_features;
1072 }
1073
1074 const auto *compute_shader_derivatives_features =
1075 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1076 if (compute_shader_derivatives_features) {
1077 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1078 }
1079
1080 const auto *fragment_shader_barycentric_features =
1081 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1082 if (fragment_shader_barycentric_features) {
1083 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1084 }
1085
1086 const auto *shader_image_footprint_features =
1087 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1088 if (shader_image_footprint_features) {
1089 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1090 }
1091
1092 const auto *fragment_shader_interlock_features =
1093 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1094 if (fragment_shader_interlock_features) {
1095 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1096 }
1097
1098 const auto *demote_to_helper_invocation_features =
1099 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1100 if (demote_to_helper_invocation_features) {
1101 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1102 }
1103
1104 const auto *texel_buffer_alignment_features =
1105 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1106 if (texel_buffer_alignment_features) {
1107 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1108 }
1109
1110 const auto *imageless_framebuffer_features =
1111 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeaturesKHR>(pCreateInfo->pNext);
1112 if (imageless_framebuffer_features) {
1113 state_tracker->enabled_features.imageless_framebuffer_features = *imageless_framebuffer_features;
1114 }
1115
1116 const auto *pipeline_exe_props_features =
1117 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1118 if (pipeline_exe_props_features) {
1119 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1120 }
1121
1122 // Store physical device properties and physical device mem limits into CoreChecks structs
1123 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1124 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
1125
1126 const auto &dev_ext = state_tracker->device_extensions;
1127 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1128
1129 if (dev_ext.vk_khr_push_descriptor) {
1130 // Get the needed push_descriptor limits
1131 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1132 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1133 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1134 }
1135
1136 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &phys_dev_props->descriptor_indexing_props);
1137 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1138 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1139 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1140 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
1141 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &phys_dev_props->depth_stencil_resolve_props);
1142 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
1143 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_props);
1144 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1145 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
1146 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1147 // Get the needed cooperative_matrix properties
1148 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1149 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
1150 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1151 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1152
1153 uint32_t numCooperativeMatrixProperties = 0;
1154 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
1155 state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
1156 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
1157
1158 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
1159 state_tracker->cooperative_matrix_properties.data());
1160 }
1161 if (state_tracker->api_version >= VK_API_VERSION_1_1) {
1162 // Get the needed subgroup limits
1163 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
1164 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
1165 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1166
1167 state_tracker->phys_dev_ext_props.subgroup_props = subgroup_prop;
1168 }
1169
1170 // Store queue family data
1171 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1172 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
1173 state_tracker->queue_family_index_map.insert(
1174 std::make_pair(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex, pCreateInfo->pQueueCreateInfos[i].queueCount));
1175 }
1176 }
1177}
1178
1179void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1180 if (!device) return;
1181
1182 pipelineMap.clear();
1183 renderPassMap.clear();
1184
1185 // Reset all command buffers before destroying them, to unlink object_bindings.
1186 for (auto &commandBuffer : commandBufferMap) {
1187 ResetCommandBufferState(commandBuffer.first);
1188 }
1189 commandBufferMap.clear();
1190
1191 // This will also delete all sets in the pool & remove them from setMap
1192 DeleteDescriptorSetPools();
1193 // All sets should be removed
1194 assert(setMap.empty());
1195 descriptorSetLayoutMap.clear();
1196 imageViewMap.clear();
1197 imageMap.clear();
1198 bufferViewMap.clear();
1199 bufferMap.clear();
1200 // Queues persist until device is destroyed
1201 queueMap.clear();
1202}
1203
1204// Loop through bound objects and increment their in_use counts.
1205void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
1206 for (auto obj : cb_node->object_bindings) {
1207 auto base_obj = GetStateStructPtrFromObject(obj);
1208 if (base_obj) {
1209 base_obj->in_use.fetch_add(1);
1210 }
1211 }
1212}
1213
1214// Track which resources are in-flight by atomically incrementing their "in_use" count
1215void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1216 cb_node->submitCount++;
1217 cb_node->in_use.fetch_add(1);
1218
1219 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
1220 IncrementBoundObjects(cb_node);
1221 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1222 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1223 // should then be flagged prior to calling this function
1224 for (auto event : cb_node->writeEventsBeforeWait) {
1225 auto event_state = GetEventState(event);
1226 if (event_state) event_state->write_in_use++;
1227 }
1228}
1229
1230// Decrement in-use count for objects bound to command buffer
1231void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
1232 BASE_NODE *base_obj = nullptr;
1233 for (auto obj : cb_node->object_bindings) {
1234 base_obj = GetStateStructPtrFromObject(obj);
1235 if (base_obj) {
1236 base_obj->in_use.fetch_sub(1);
1237 }
1238 }
1239}
1240
1241void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq, bool switch_finished_queries) {
1242 std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
1243
1244 // Roll this queue forward, one submission at a time.
1245 while (pQueue->seq < seq) {
1246 auto &submission = pQueue->submissions.front();
1247
1248 for (auto &wait : submission.waitSemaphores) {
1249 auto pSemaphore = GetSemaphoreState(wait.semaphore);
1250 if (pSemaphore) {
1251 pSemaphore->in_use.fetch_sub(1);
1252 }
1253 auto &lastSeq = otherQueueSeqs[wait.queue];
1254 lastSeq = std::max(lastSeq, wait.seq);
1255 }
1256
1257 for (auto &semaphore : submission.signalSemaphores) {
1258 auto pSemaphore = GetSemaphoreState(semaphore);
1259 if (pSemaphore) {
1260 pSemaphore->in_use.fetch_sub(1);
1261 }
1262 }
1263
1264 for (auto &semaphore : submission.externalSemaphores) {
1265 auto pSemaphore = GetSemaphoreState(semaphore);
1266 if (pSemaphore) {
1267 pSemaphore->in_use.fetch_sub(1);
1268 }
1269 }
1270
1271 for (auto cb : submission.cbs) {
1272 auto cb_node = GetCBState(cb);
1273 if (!cb_node) {
1274 continue;
1275 }
1276 // First perform decrement on general case bound objects
1277 DecrementBoundResources(cb_node);
1278 for (auto event : cb_node->writeEventsBeforeWait) {
1279 auto eventNode = eventMap.find(event);
1280 if (eventNode != eventMap.end()) {
1281 eventNode->second.write_in_use--;
1282 }
1283 }
1284 for (auto queryStatePair : cb_node->queryToStateMap) {
1285 const QueryState newState =
1286 ((queryStatePair.second == QUERYSTATE_ENDED && switch_finished_queries) ? QUERYSTATE_AVAILABLE
1287 : queryStatePair.second);
1288 pQueue->queryToStateMap[queryStatePair.first] = newState;
1289 queryToStateMap[queryStatePair.first] = newState;
1290 }
1291 for (auto eventStagePair : cb_node->eventToStageMap) {
1292 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
1293 }
1294
1295 cb_node->in_use.fetch_sub(1);
1296 }
1297
1298 auto pFence = GetFenceState(submission.fence);
1299 if (pFence && pFence->scope == kSyncScopeInternal) {
1300 pFence->state = FENCE_RETIRED;
1301 }
1302
1303 pQueue->submissions.pop_front();
1304 pQueue->seq++;
1305 }
1306
1307 // Roll other queues forward to the highest seq we saw a wait for
1308 for (auto qs : otherQueueSeqs) {
1309 RetireWorkOnQueue(GetQueueState(qs.first), qs.second, switch_finished_queries);
1310 }
1311}
1312
1313// Submit a fence to a queue, delimiting previous fences and previous untracked
1314// work by it.
1315static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1316 pFence->state = FENCE_INFLIGHT;
1317 pFence->signaler.first = pQueue->queue;
1318 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1319}
1320
1321void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1322 VkFence fence, VkResult result) {
1323 uint64_t early_retire_seq = 0;
1324 auto pQueue = GetQueueState(queue);
1325 auto pFence = GetFenceState(fence);
1326
1327 if (pFence) {
1328 if (pFence->scope == kSyncScopeInternal) {
1329 // Mark fence in use
1330 SubmitFence(pQueue, pFence, std::max(1u, submitCount));
1331 if (!submitCount) {
1332 // If no submissions, but just dropping a fence on the end of the queue,
1333 // record an empty submission with just the fence, so we can determine
1334 // its completion.
1335 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
1336 std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence);
1337 }
1338 } else {
1339 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1340 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1341 }
1342 }
1343
1344 // Now process each individual submit
1345 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1346 std::vector<VkCommandBuffer> cbs;
1347 const VkSubmitInfo *submit = &pSubmits[submit_idx];
1348 vector<SEMAPHORE_WAIT> semaphore_waits;
1349 vector<VkSemaphore> semaphore_signals;
1350 vector<VkSemaphore> semaphore_externals;
1351 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
1352 VkSemaphore semaphore = submit->pWaitSemaphores[i];
1353 auto pSemaphore = GetSemaphoreState(semaphore);
1354 if (pSemaphore) {
1355 if (pSemaphore->scope == kSyncScopeInternal) {
1356 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1357 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1358 pSemaphore->in_use.fetch_add(1);
1359 }
1360 pSemaphore->signaler.first = VK_NULL_HANDLE;
1361 pSemaphore->signaled = false;
1362 } else {
1363 semaphore_externals.push_back(semaphore);
1364 pSemaphore->in_use.fetch_add(1);
1365 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1366 pSemaphore->scope = kSyncScopeInternal;
1367 }
1368 }
1369 }
1370 }
1371 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
1372 VkSemaphore semaphore = submit->pSignalSemaphores[i];
1373 auto pSemaphore = GetSemaphoreState(semaphore);
1374 if (pSemaphore) {
1375 if (pSemaphore->scope == kSyncScopeInternal) {
1376 pSemaphore->signaler.first = queue;
1377 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1378 pSemaphore->signaled = true;
1379 pSemaphore->in_use.fetch_add(1);
1380 semaphore_signals.push_back(semaphore);
1381 } else {
1382 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1383 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1384 }
1385 }
1386 }
1387 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
1388 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
1389 if (cb_node) {
1390 cbs.push_back(submit->pCommandBuffers[i]);
1391 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
1392 cbs.push_back(secondaryCmdBuffer->commandBuffer);
1393 IncrementResources(secondaryCmdBuffer);
1394 }
1395 IncrementResources(cb_node);
1396 }
1397 }
1398 pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
1399 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE);
1400 }
1401
1402 if (early_retire_seq) {
1403 RetireWorkOnQueue(pQueue, early_retire_seq, true);
1404 }
1405}
1406
1407void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1408 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1409 VkResult result) {
1410 if (VK_SUCCESS == result) {
1411 AddMemObjInfo(device, *pMemory, pAllocateInfo);
1412 }
1413 return;
1414}
1415
1416void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1417 if (!mem) return;
1418 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
1419 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
1420
1421 // Clear mem binding for any bound objects
1422 for (const auto &obj : mem_info->obj_bindings) {
1423 BINDABLE *bindable_state = nullptr;
1424 switch (obj.type) {
1425 case kVulkanObjectTypeImage:
1426 bindable_state = GetImageState(obj.Cast<VkImage>());
1427 break;
1428 case kVulkanObjectTypeBuffer:
1429 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
1430 break;
1431 case kVulkanObjectTypeAccelerationStructureNV:
1432 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
1433 break;
1434
1435 default:
1436 // Should only have acceleration structure, buffer, or image objects bound to memory
1437 assert(0);
1438 }
1439
1440 if (bindable_state) {
1441 bindable_state->binding.mem = MEMORY_UNBOUND;
1442 bindable_state->UpdateBoundMemorySet();
1443 }
1444 }
1445 // Any bound cmd buffers are now invalid
1446 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
1447 RemoveAliasingImages(mem_info->bound_images);
1448 memObjMap.erase(mem);
1449}
1450
1451void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
1452 VkFence fence, VkResult result) {
1453 if (result != VK_SUCCESS) return;
1454 uint64_t early_retire_seq = 0;
1455 auto pFence = GetFenceState(fence);
1456 auto pQueue = GetQueueState(queue);
1457
1458 if (pFence) {
1459 if (pFence->scope == kSyncScopeInternal) {
1460 SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
1461 if (!bindInfoCount) {
1462 // No work to do, just dropping a fence in the queue by itself.
1463 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
1464 std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence);
1465 }
1466 } else {
1467 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1468 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1469 }
1470 }
1471
1472 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
1473 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
1474 // Track objects tied to memory
1475 for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
1476 for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
1477 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
1478 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
1479 VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
1480 }
1481 }
1482 for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
1483 for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
1484 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
1485 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
1486 VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
1487 }
1488 }
1489 for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
1490 for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
1491 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
1492 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
1493 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
1494 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, size},
1495 VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
1496 }
1497 }
1498
1499 std::vector<SEMAPHORE_WAIT> semaphore_waits;
1500 std::vector<VkSemaphore> semaphore_signals;
1501 std::vector<VkSemaphore> semaphore_externals;
1502 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
1503 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
1504 auto pSemaphore = GetSemaphoreState(semaphore);
1505 if (pSemaphore) {
1506 if (pSemaphore->scope == kSyncScopeInternal) {
1507 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1508 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1509 pSemaphore->in_use.fetch_add(1);
1510 }
1511 pSemaphore->signaler.first = VK_NULL_HANDLE;
1512 pSemaphore->signaled = false;
1513 } else {
1514 semaphore_externals.push_back(semaphore);
1515 pSemaphore->in_use.fetch_add(1);
1516 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1517 pSemaphore->scope = kSyncScopeInternal;
1518 }
1519 }
1520 }
1521 }
1522 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
1523 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
1524 auto pSemaphore = GetSemaphoreState(semaphore);
1525 if (pSemaphore) {
1526 if (pSemaphore->scope == kSyncScopeInternal) {
1527 pSemaphore->signaler.first = queue;
1528 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1529 pSemaphore->signaled = true;
1530 pSemaphore->in_use.fetch_add(1);
1531 semaphore_signals.push_back(semaphore);
1532 } else {
1533 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1534 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1535 }
1536 }
1537 }
1538
1539 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
1540 bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE);
1541 }
1542
1543 if (early_retire_seq) {
1544 RetireWorkOnQueue(pQueue, early_retire_seq, true);
1545 }
1546}
1547
1548void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
1549 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
1550 VkResult result) {
1551 if (VK_SUCCESS != result) return;
1552 std::unique_ptr<SEMAPHORE_STATE> semaphore_state(new SEMAPHORE_STATE{});
1553 semaphore_state->signaler.first = VK_NULL_HANDLE;
1554 semaphore_state->signaler.second = 0;
1555 semaphore_state->signaled = false;
1556 semaphore_state->scope = kSyncScopeInternal;
1557 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
1558}
1559
1560void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
1561 VkSemaphoreImportFlagsKHR flags) {
1562 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
1563 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
1564 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
1565 sema_node->scope == kSyncScopeInternal) {
1566 sema_node->scope = kSyncScopeExternalTemporary;
1567 } else {
1568 sema_node->scope = kSyncScopeExternalPermanent;
1569 }
1570 }
1571}
1572
1573void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
1574 auto mem_info = GetDevMemState(mem);
1575 if (mem_info) {
1576 mem_info->mapped_range.offset = offset;
1577 mem_info->mapped_range.size = size;
1578 mem_info->p_driver_data = *ppData;
1579 }
1580}
1581
1582void ValidationStateTracker::RetireFence(VkFence fence) {
1583 auto pFence = GetFenceState(fence);
1584 if (pFence && pFence->scope == kSyncScopeInternal) {
1585 if (pFence->signaler.first != VK_NULL_HANDLE) {
1586 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
1587 RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second, true);
1588 } else {
1589 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
1590 // the fence as retired.
1591 pFence->state = FENCE_RETIRED;
1592 }
1593 }
1594}
1595
1596void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
1597 VkBool32 waitAll, uint64_t timeout, VkResult result) {
1598 if (VK_SUCCESS != result) return;
1599
1600 // When we know that all fences are complete we can clean/remove their CBs
1601 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
1602 for (uint32_t i = 0; i < fenceCount; i++) {
1603 RetireFence(pFences[i]);
1604 }
1605 }
1606 // NOTE : Alternate case not handled here is when some fences have completed. In
1607 // this case for app to guarantee which fences completed it will have to call
1608 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
1609}
1610
1611void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
1612 if (VK_SUCCESS != result) return;
1613 RetireFence(fence);
1614}
1615
1616void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
1617 // Add queue to tracking set only if it is new
1618 auto queue_is_new = queues.emplace(queue);
1619 if (queue_is_new.second == true) {
1620 QUEUE_STATE *queue_state = &queueMap[queue];
1621 queue_state->queue = queue;
1622 queue_state->queueFamilyIndex = queue_family_index;
1623 queue_state->seq = 0;
1624 }
1625}
1626
1627void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
1628 VkQueue *pQueue) {
1629 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
1630}
1631
1632void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
1633 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
1634}
1635
1636void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
1637 if (VK_SUCCESS != result) return;
1638 QUEUE_STATE *queue_state = GetQueueState(queue);
1639 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size(), true);
1640}
1641
1642void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
1643 if (VK_SUCCESS != result) return;
1644 for (auto &queue : queueMap) {
1645 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size(), true);
1646 }
1647}
1648
1649void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
1650 if (!fence) return;
1651 fenceMap.erase(fence);
1652}
1653
1654void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
1655 const VkAllocationCallbacks *pAllocator) {
1656 if (!semaphore) return;
1657 semaphoreMap.erase(semaphore);
1658}
1659
1660void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
1661 if (!event) return;
1662 EVENT_STATE *event_state = GetEventState(event);
1663 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
1664 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
1665 eventMap.erase(event);
1666}
1667
1668void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
1669 const VkAllocationCallbacks *pAllocator) {
1670 if (!queryPool) return;
1671 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
1672 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
1673 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
1674 queryPoolMap.erase(queryPool);
1675}
1676
1677// Object with given handle is being bound to memory w/ given mem_info struct.
1678// Track the newly bound memory range with given memoryOffset
1679// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
1680// and non-linear range incorrectly overlap.
1681// Return true if an error is flagged and the user callback returns "true", otherwise false
1682// is_image indicates an image object, otherwise handle is for a buffer
1683// is_linear indicates a buffer or linear image
1684void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
1685 VkDeviceSize memoryOffset, VkMemoryRequirements memRequirements, bool is_linear) {
1686 if (typed_handle.type == kVulkanObjectTypeImage) {
1687 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
1688 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
1689 mem_info->bound_buffers.insert(typed_handle.handle);
1690 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
1691 mem_info->bound_acceleration_structures.insert(typed_handle.handle);
1692 } else {
1693 // Unsupported object type
1694 assert(false);
1695 }
1696}
1697
1698void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
1699 VkMemoryRequirements mem_reqs, bool is_linear) {
1700 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset, mem_reqs, is_linear);
1701}
1702
1703void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
1704 const VkMemoryRequirements &mem_reqs) {
1705 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset, mem_reqs, true);
1706}
1707
1708void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
1709 VkDeviceSize mem_offset, const VkMemoryRequirements &mem_reqs) {
1710 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset, mem_reqs, true);
1711}
1712
1713// This function will remove the handle-to-index mapping from the appropriate map.
1714static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
1715 if (typed_handle.type == kVulkanObjectTypeImage) {
1716 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
1717 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
1718 mem_info->bound_buffers.erase(typed_handle.handle);
1719 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
1720 mem_info->bound_acceleration_structures.erase(typed_handle.handle);
1721 } else {
1722 // Unsupported object type
1723 assert(false);
1724 }
1725}
1726
1727void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
1728 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
1729}
1730
1731void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
1732 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
1733}
1734
1735void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
1736 RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
1737}
1738
1739void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
1740 BUFFER_STATE *buffer_state = GetBufferState(buffer);
1741 if (buffer_state) {
1742 // Track bound memory range information
1743 auto mem_info = GetDevMemState(mem);
1744 if (mem_info) {
1745 InsertBufferMemoryRange(buffer, mem_info, memoryOffset, buffer_state->requirements);
1746 }
1747 // Track objects tied to memory
1748 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
1749 }
1750}
1751
1752void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
1753 VkDeviceSize memoryOffset, VkResult result) {
1754 if (VK_SUCCESS != result) return;
1755 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
1756}
1757
1758void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
1759 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
1760 for (uint32_t i = 0; i < bindInfoCount; i++) {
1761 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
1762 }
1763}
1764
1765void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
1766 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
1767 for (uint32_t i = 0; i < bindInfoCount; i++) {
1768 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
1769 }
1770}
1771
1772void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) {
1773 BUFFER_STATE *buffer_state = GetBufferState(buffer);
1774 if (buffer_state) {
1775 buffer_state->requirements = *pMemoryRequirements;
1776 buffer_state->memory_requirements_checked = true;
1777 }
1778}
1779
1780void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
1781 VkMemoryRequirements *pMemoryRequirements) {
1782 RecordGetBufferMemoryRequirementsState(buffer, pMemoryRequirements);
1783}
1784
1785void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
1786 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
1787 VkMemoryRequirements2KHR *pMemoryRequirements) {
1788 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
1789}
1790
1791void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
1792 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
1793 VkMemoryRequirements2KHR *pMemoryRequirements) {
1794 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
1795}
1796
1797void ValidationStateTracker::RecordGetImageMemoryRequiementsState(VkImage image, VkMemoryRequirements *pMemoryRequirements) {
1798 IMAGE_STATE *image_state = GetImageState(image);
1799 if (image_state) {
1800 image_state->requirements = *pMemoryRequirements;
1801 image_state->memory_requirements_checked = true;
1802 }
1803}
1804
1805void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
1806 VkMemoryRequirements *pMemoryRequirements) {
1807 RecordGetImageMemoryRequiementsState(image, pMemoryRequirements);
1808}
1809
1810void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
1811 VkMemoryRequirements2 *pMemoryRequirements) {
1812 RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
1813}
1814
1815void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
1816 const VkImageMemoryRequirementsInfo2 *pInfo,
1817 VkMemoryRequirements2 *pMemoryRequirements) {
1818 RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
1819}
1820
1821static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
1822 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
1823 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
1824 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
1825 image_state->sparse_metadata_required = true;
1826 }
1827}
1828
1829void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
1830 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
1831 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
1832 auto image_state = GetImageState(image);
1833 image_state->get_sparse_reqs_called = true;
1834 if (!pSparseMemoryRequirements) return;
1835 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1836 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
1837 }
1838}
1839
1840void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
1841 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
1842 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
1843 auto image_state = GetImageState(pInfo->image);
1844 image_state->get_sparse_reqs_called = true;
1845 if (!pSparseMemoryRequirements) return;
1846 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1847 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
1848 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
1849 }
1850}
1851
1852void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
1853 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
1854 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
1855 auto image_state = GetImageState(pInfo->image);
1856 image_state->get_sparse_reqs_called = true;
1857 if (!pSparseMemoryRequirements) return;
1858 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1859 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
1860 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
1861 }
1862}
1863
1864void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
1865 const VkAllocationCallbacks *pAllocator) {
1866 if (!shaderModule) return;
1867 shaderModuleMap.erase(shaderModule);
1868}
1869
1870void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
1871 const VkAllocationCallbacks *pAllocator) {
1872 if (!pipeline) return;
1873 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
1874 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
1875 // Any bound cmd buffers are now invalid
1876 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
1877 pipelineMap.erase(pipeline);
1878}
1879
1880void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
1881 const VkAllocationCallbacks *pAllocator) {
1882 if (!pipelineLayout) return;
1883 pipelineLayoutMap.erase(pipelineLayout);
1884}
1885
1886void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
1887 const VkAllocationCallbacks *pAllocator) {
1888 if (!sampler) return;
1889 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
1890 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
1891 // Any bound cmd buffers are now invalid
1892 if (sampler_state) {
1893 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
1894 }
1895 samplerMap.erase(sampler);
1896}
1897
1898void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
1899 const VkAllocationCallbacks *pAllocator) {
1900 if (!descriptorSetLayout) return;
1901 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
1902 if (layout_it != descriptorSetLayoutMap.end()) {
1903 layout_it->second.get()->MarkDestroyed();
1904 descriptorSetLayoutMap.erase(layout_it);
1905 }
1906}
1907
1908void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
1909 const VkAllocationCallbacks *pAllocator) {
1910 if (!descriptorPool) return;
1911 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
1912 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
1913 if (desc_pool_state) {
1914 // Any bound cmd buffers are now invalid
1915 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
1916 // Free sets that were in this pool
1917 for (auto ds : desc_pool_state->sets) {
1918 FreeDescriptorSet(ds);
1919 }
1920 descriptorPoolMap.erase(descriptorPool);
1921 }
1922}
1923
1924// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
1925void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
1926 const VkCommandBuffer *command_buffers) {
1927 for (uint32_t i = 0; i < command_buffer_count; i++) {
1928 auto cb_state = GetCBState(command_buffers[i]);
1929 // Remove references to command buffer's state and delete
1930 if (cb_state) {
1931 // reset prior to delete, removing various references to it.
1932 // TODO: fix this, it's insane.
1933 ResetCommandBufferState(cb_state->commandBuffer);
1934 // Remove the cb_state's references from COMMAND_POOL_STATEs
1935 pool_state->commandBuffers.erase(command_buffers[i]);
1936 // Remove the cb debug labels
1937 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
1938 // Remove CBState from CB map
1939 commandBufferMap.erase(cb_state->commandBuffer);
1940 }
1941 }
1942}
1943
1944void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
1945 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
1946 auto pPool = GetCommandPoolState(commandPool);
1947 FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
1948}
1949
1950void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
1951 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
1952 VkResult result) {
1953 if (VK_SUCCESS != result) return;
1954 std::unique_ptr<COMMAND_POOL_STATE> cmd_pool_state(new COMMAND_POOL_STATE{});
1955 cmd_pool_state->createFlags = pCreateInfo->flags;
1956 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
1957 commandPoolMap[*pCommandPool] = std::move(cmd_pool_state);
1958}
1959
1960void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
1961 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
1962 VkResult result) {
1963 if (VK_SUCCESS != result) return;
1964 std::unique_ptr<QUERY_POOL_STATE> query_pool_state(new QUERY_POOL_STATE{});
1965 query_pool_state->createInfo = *pCreateInfo;
1966 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
1967
1968 QueryObject query_obj{*pQueryPool, 0u};
1969 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
1970 query_obj.query = i;
1971 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
1972 }
1973}
1974
1975void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
1976 const VkAllocationCallbacks *pAllocator) {
1977 if (!commandPool) return;
1978 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
1979 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
1980 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
1981 if (cp_state) {
1982 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
1983 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
1984 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
1985 commandPoolMap.erase(commandPool);
1986 }
1987}
1988
1989void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
1990 VkCommandPoolResetFlags flags, VkResult result) {
1991 if (VK_SUCCESS != result) return;
1992 // Reset all of the CBs allocated from this pool
1993 auto command_pool_state = GetCommandPoolState(commandPool);
1994 for (auto cmdBuffer : command_pool_state->commandBuffers) {
1995 ResetCommandBufferState(cmdBuffer);
1996 }
1997}
1998
1999void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2000 VkResult result) {
2001 for (uint32_t i = 0; i < fenceCount; ++i) {
2002 auto pFence = GetFenceState(pFences[i]);
2003 if (pFence) {
2004 if (pFence->scope == kSyncScopeInternal) {
2005 pFence->state = FENCE_UNSIGNALED;
2006 } else if (pFence->scope == kSyncScopeExternalTemporary) {
2007 pFence->scope = kSyncScopeInternal;
2008 }
2009 }
2010 }
2011}
2012
2013// For given cb_nodes, invalidate them and track object causing invalidation
2014void ValidationStateTracker::InvalidateCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> const &cb_nodes,
2015 const VulkanTypedHandle &obj) {
2016 for (auto cb_node : cb_nodes) {
2017 if (cb_node->state == CB_RECORDING) {
2018 cb_node->state = CB_INVALID_INCOMPLETE;
2019 } else if (cb_node->state == CB_RECORDED) {
2020 cb_node->state = CB_INVALID_COMPLETE;
2021 }
2022 cb_node->broken_bindings.push_back(obj);
2023
2024 // if secondary, then propagate the invalidation to the primaries that will call us.
2025 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2026 InvalidateCommandBuffers(cb_node->linkedCommandBuffers, obj);
2027 }
2028 }
2029}
2030
2031void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2032 const VkAllocationCallbacks *pAllocator) {
2033 if (!framebuffer) return;
2034 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
2035 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
2036 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
2037 frameBufferMap.erase(framebuffer);
2038}
2039
2040void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2041 const VkAllocationCallbacks *pAllocator) {
2042 if (!renderPass) return;
2043 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
2044 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
2045 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
2046 renderPassMap.erase(renderPass);
2047}
2048
2049void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2050 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2051 if (VK_SUCCESS != result) return;
2052 std::unique_ptr<FENCE_STATE> fence_state(new FENCE_STATE{});
2053 fence_state->fence = *pFence;
2054 fence_state->createInfo = *pCreateInfo;
2055 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
2056 fenceMap[*pFence] = std::move(fence_state);
2057}
2058
2059bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2060 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2061 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2062 void *cgpl_state_data) {
2063 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2064 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2065 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2066 cgpl_state->pipe_state.reserve(count);
2067 for (uint32_t i = 0; i < count; i++) {
2068 cgpl_state->pipe_state.push_back(std::unique_ptr<PIPELINE_STATE>(new PIPELINE_STATE));
2069 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i],
2070 GetRenderPassStateSharedPtr(pCreateInfos[i].renderPass));
2071 (cgpl_state->pipe_state)[i]->pipeline_layout = *GetPipelineLayout(pCreateInfos[i].layout);
2072 }
2073 return false;
2074}
2075
2076void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2077 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2078 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2079 VkResult result, void *cgpl_state_data) {
2080 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2081 // This API may create pipelines regardless of the return value
2082 for (uint32_t i = 0; i < count; i++) {
2083 if (pPipelines[i] != VK_NULL_HANDLE) {
2084 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2085 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2086 }
2087 }
2088 cgpl_state->pipe_state.clear();
2089}
2090
2091bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2092 const VkComputePipelineCreateInfo *pCreateInfos,
2093 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2094 void *ccpl_state_data) {
2095 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2096 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2097 ccpl_state->pipe_state.reserve(count);
2098 for (uint32_t i = 0; i < count; i++) {
2099 // Create and initialize internal tracking data structure
2100 ccpl_state->pipe_state.push_back(unique_ptr<PIPELINE_STATE>(new PIPELINE_STATE));
2101 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
2102 ccpl_state->pipe_state.back()->pipeline_layout = *GetPipelineLayout(pCreateInfos[i].layout);
2103 }
2104 return false;
2105}
2106
2107void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2108 const VkComputePipelineCreateInfo *pCreateInfos,
2109 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2110 VkResult result, void *ccpl_state_data) {
2111 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2112
2113 // This API may create pipelines regardless of the return value
2114 for (uint32_t i = 0; i < count; i++) {
2115 if (pPipelines[i] != VK_NULL_HANDLE) {
2116 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2117 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2118 }
2119 }
2120 ccpl_state->pipe_state.clear();
2121}
2122
2123bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2124 uint32_t count,
2125 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2126 const VkAllocationCallbacks *pAllocator,
2127 VkPipeline *pPipelines, void *crtpl_state_data) {
2128 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2129 crtpl_state->pipe_state.reserve(count);
2130 for (uint32_t i = 0; i < count; i++) {
2131 // Create and initialize internal tracking data structure
2132 crtpl_state->pipe_state.push_back(unique_ptr<PIPELINE_STATE>(new PIPELINE_STATE));
2133 crtpl_state->pipe_state.back()->initRayTracingPipelineNV(this, &pCreateInfos[i]);
2134 crtpl_state->pipe_state.back()->pipeline_layout = *GetPipelineLayout(pCreateInfos[i].layout);
2135 }
2136 return false;
2137}
2138
2139void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2140 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2141 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2142 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2143 // This API may create pipelines regardless of the return value
2144 for (uint32_t i = 0; i < count; i++) {
2145 if (pPipelines[i] != VK_NULL_HANDLE) {
2146 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2147 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2148 }
2149 }
2150 crtpl_state->pipe_state.clear();
2151}
2152
2153void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2154 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2155 VkResult result) {
2156 samplerMap[*pSampler] = unique_ptr<SAMPLER_STATE>(new SAMPLER_STATE(pSampler, pCreateInfo));
2157}
2158
2159void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2160 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2161 const VkAllocationCallbacks *pAllocator,
2162 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2163 if (VK_SUCCESS != result) return;
2164 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2165}
2166
2167// For repeatable sorting, not very useful for "memory in range" search
2168struct PushConstantRangeCompare {
2169 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2170 if (lhs->offset == rhs->offset) {
2171 if (lhs->size == rhs->size) {
2172 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2173 return lhs->stageFlags < rhs->stageFlags;
2174 }
2175 // If the offsets are the same then sorting by the end of range is useful for validation
2176 return lhs->size < rhs->size;
2177 }
2178 return lhs->offset < rhs->offset;
2179 }
2180};
2181
2182static PushConstantRangesDict push_constant_ranges_dict;
2183
2184PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2185 if (!info->pPushConstantRanges) {
2186 // Hand back the empty entry (creating as needed)...
2187 return push_constant_ranges_dict.look_up(PushConstantRanges());
2188 }
2189
2190 // Sort the input ranges to ensure equivalent ranges map to the same id
2191 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2192 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2193 sorted.insert(info->pPushConstantRanges + i);
2194 }
2195
2196 PushConstantRanges ranges(sorted.size());
2197 for (const auto range : sorted) {
2198 ranges.emplace_back(*range);
2199 }
2200 return push_constant_ranges_dict.look_up(std::move(ranges));
2201}
2202
2203// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2204static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2205
2206// Dictionary of canonical form of the "compatible for set" records
2207static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2208
2209static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2210 const PipelineLayoutSetLayoutsId set_layouts_id) {
2211 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2212}
2213
2214void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2215 const VkAllocationCallbacks *pAllocator,
2216 VkPipelineLayout *pPipelineLayout, VkResult result) {
2217 if (VK_SUCCESS != result) return;
2218
2219 std::unique_ptr<PIPELINE_LAYOUT_STATE> pipeline_layout_state(new PIPELINE_LAYOUT_STATE{});
2220 pipeline_layout_state->layout = *pPipelineLayout;
2221 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2222 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2223 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2224 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayout(this, pCreateInfo->pSetLayouts[i]);
2225 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2226 }
2227
2228 // Get canonical form IDs for the "compatible for set" contents
2229 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
2230 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
2231 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
2232
2233 // Create table of "compatible for set N" cannonical forms for trivial accept validation
2234 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2235 pipeline_layout_state->compat_for_set.emplace_back(
2236 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
2237 }
2238 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
2239}
2240
2241void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
2242 const VkAllocationCallbacks *pAllocator,
2243 VkDescriptorPool *pDescriptorPool, VkResult result) {
2244 if (VK_SUCCESS != result) return;
2245 descriptorPoolMap[*pDescriptorPool] =
2246 std::unique_ptr<DESCRIPTOR_POOL_STATE>(new DESCRIPTOR_POOL_STATE(*pDescriptorPool, pCreateInfo));
2247}
2248
2249void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2250 VkDescriptorPoolResetFlags flags, VkResult result) {
2251 if (VK_SUCCESS != result) return;
2252 DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
2253 // TODO: validate flags
2254 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
2255 for (auto ds : pPool->sets) {
2256 FreeDescriptorSet(ds);
2257 }
2258 pPool->sets.clear();
2259 // Reset available count for each type and available sets for this pool
2260 for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
2261 pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
2262 }
2263 pPool->availableSets = pPool->maxSets;
2264}
2265
2266bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
2267 const VkDescriptorSetAllocateInfo *pAllocateInfo,
2268 VkDescriptorSet *pDescriptorSets, void *ads_state_data) {
2269 // Always update common data
2270 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2271 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2272 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
2273
2274 return false;
2275}
2276
2277// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
2278void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
2279 VkDescriptorSet *pDescriptorSets, VkResult result,
2280 void *ads_state_data) {
2281 if (VK_SUCCESS != result) return;
2282 // All the updates are contained in a single cvdescriptorset function
2283 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2284 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2285 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
2286}
2287
2288void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
2289 const VkDescriptorSet *pDescriptorSets) {
2290 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
2291 // Update available descriptor sets in pool
2292 pool_state->availableSets += count;
2293
2294 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
2295 for (uint32_t i = 0; i < count; ++i) {
2296 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
2297 auto descriptor_set = setMap[pDescriptorSets[i]].get();
2298 uint32_t type_index = 0, descriptor_count = 0;
2299 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
2300 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
2301 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
2302 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
2303 }
2304 FreeDescriptorSet(descriptor_set);
2305 pool_state->sets.erase(descriptor_set);
2306 }
2307 }
2308}
2309
2310void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
2311 const VkWriteDescriptorSet *pDescriptorWrites,
2312 uint32_t descriptorCopyCount,
2313 const VkCopyDescriptorSet *pDescriptorCopies) {
2314 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
2315 pDescriptorCopies);
2316}
2317
2318void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
2319 VkCommandBuffer *pCommandBuffer, VkResult result) {
2320 if (VK_SUCCESS != result) return;
2321 auto pPool = GetCommandPoolState(pCreateInfo->commandPool);
2322 if (pPool) {
2323 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
2324 // Add command buffer to its commandPool map
2325 pPool->commandBuffers.insert(pCommandBuffer[i]);
2326 std::unique_ptr<CMD_BUFFER_STATE> pCB(new CMD_BUFFER_STATE{});
2327 pCB->createInfo = *pCreateInfo;
2328 pCB->device = device;
2329 // Add command buffer to map
2330 commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
2331 ResetCommandBufferState(pCommandBuffer[i]);
2332 }
2333 }
2334}
2335
2336// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
2337void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
2338 AddCommandBufferBinding(&fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer),
2339 cb_state);
2340
2341 const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
2342 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
2343 auto view_state = GetAttachmentImageViewState(fb_state, attachment);
2344 if (view_state) {
2345 AddCommandBufferBindingImageView(cb_state, view_state);
2346 }
2347 }
2348}
2349
2350void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
2351 const VkCommandBufferBeginInfo *pBeginInfo) {
2352 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2353 if (!cb_state) return;
2354 // This implicitly resets the Cmd Buffer so make sure any fence is done and then clear memory references
2355 ClearCmdBufAndMemReferences(cb_state);
2356 if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
2357 // Secondary Command Buffer
2358 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
2359 if (pInfo) {
2360 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
2361 assert(pInfo->renderPass);
2362 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
2363 if (framebuffer) {
2364 // Connect this framebuffer and its children to this cmdBuffer
2365 AddFramebufferBinding(cb_state, framebuffer);
2366 }
2367 }
2368 }
2369 }
2370 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
2371 ResetCommandBufferState(commandBuffer);
2372 }
2373 // Set updated state here in case implicit reset occurs above
2374 cb_state->state = CB_RECORDING;
2375 cb_state->beginInfo = *pBeginInfo;
2376 if (cb_state->beginInfo.pInheritanceInfo) {
2377 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
2378 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
2379 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
2380 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
2381 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
2382 cb_state->activeRenderPass = GetRenderPassState(cb_state->beginInfo.pInheritanceInfo->renderPass);
2383 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
2384 cb_state->activeFramebuffer = cb_state->beginInfo.pInheritanceInfo->framebuffer;
2385 cb_state->framebuffers.insert(cb_state->beginInfo.pInheritanceInfo->framebuffer);
2386 }
2387 }
2388
2389 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
2390 if (chained_device_group_struct) {
2391 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
2392 } else {
2393 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
2394 }
2395}
2396
2397void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
2398 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2399 if (!cb_state) return;
2400 // Cached validation is specific to a specific recording of a specific command buffer.
2401 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
2402 descriptor_set->ClearCachedValidation(cb_state);
2403 }
2404 cb_state->validated_descriptor_sets.clear();
2405 if (VK_SUCCESS == result) {
2406 cb_state->state = CB_RECORDED;
2407 }
2408}
2409
2410void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
2411 VkResult result) {
2412 if (VK_SUCCESS == result) {
2413 ResetCommandBufferState(commandBuffer);
2414 }
2415}
2416
2417CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
2418 // initially assume everything is static state
2419 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
2420
2421 if (ds) {
2422 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
2423 switch (ds->pDynamicStates[i]) {
2424 case VK_DYNAMIC_STATE_LINE_WIDTH:
2425 flags &= ~CBSTATUS_LINE_WIDTH_SET;
2426 break;
2427 case VK_DYNAMIC_STATE_DEPTH_BIAS:
2428 flags &= ~CBSTATUS_DEPTH_BIAS_SET;
2429 break;
2430 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
2431 flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
2432 break;
2433 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
2434 flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
2435 break;
2436 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
2437 flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
2438 break;
2439 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
2440 flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
2441 break;
2442 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
2443 flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
2444 break;
2445 case VK_DYNAMIC_STATE_SCISSOR:
2446 flags &= ~CBSTATUS_SCISSOR_SET;
2447 break;
2448 case VK_DYNAMIC_STATE_VIEWPORT:
2449 flags &= ~CBSTATUS_VIEWPORT_SET;
2450 break;
2451 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
2452 flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
2453 break;
2454 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
2455 flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
2456 break;
2457 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
2458 flags &= ~CBSTATUS_LINE_STIPPLE_SET;
2459 break;
2460 default:
2461 break;
2462 }
2463 }
2464 }
2465
2466 return flags;
2467}
2468
2469// Validation cache:
2470// CV is the bottommost implementor of this extension. Don't pass calls down.
2471// utility function to set collective state for pipeline
2472void SetPipelineState(PIPELINE_STATE *pPipe) {
2473 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
2474 if (pPipe->graphicsPipelineCI.pColorBlendState) {
2475 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
2476 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
2477 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2478 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2479 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2480 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2481 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2482 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2483 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2484 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
2485 pPipe->blendConstantsEnabled = true;
2486 }
2487 }
2488 }
2489 }
2490}
2491
2492void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
2493 VkPipeline pipeline) {
2494 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2495 assert(cb_state);
2496
2497 auto pipe_state = GetPipelineState(pipeline);
2498 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
2499 cb_state->status &= ~cb_state->static_status;
2500 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
2501 cb_state->status |= cb_state->static_status;
2502 }
2503 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout.layout);
2504 cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
2505 SetPipelineState(pipe_state);
2506 AddCommandBufferBinding(&pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
2507}
2508
2509void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2510 uint32_t viewportCount, const VkViewport *pViewports) {
2511 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2512 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
2513 cb_state->status |= CBSTATUS_VIEWPORT_SET;
2514}
2515
2516void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
2517 uint32_t exclusiveScissorCount,
2518 const VkRect2D *pExclusiveScissors) {
2519 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2520 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
2521 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
2522 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
2523}
2524
2525void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
2526 VkImageLayout imageLayout) {
2527 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2528
2529 if (imageView != VK_NULL_HANDLE) {
2530 auto view_state = GetImageViewState(imageView);
2531 AddCommandBufferBindingImageView(cb_state, view_state);
2532 }
2533}
2534
2535void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2536 uint32_t viewportCount,
2537 const VkShadingRatePaletteNV *pShadingRatePalettes) {
2538 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2539 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
2540 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
2541 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
2542}
2543
2544void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
2545 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
2546 const VkAllocationCallbacks *pAllocator,
2547 VkAccelerationStructureNV *pAccelerationStructure,
2548 VkResult result) {
2549 if (VK_SUCCESS != result) return;
2550 std::unique_ptr<ACCELERATION_STRUCTURE_STATE> as_state(new ACCELERATION_STRUCTURE_STATE(*pAccelerationStructure, pCreateInfo));
2551
2552 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
2553 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
2554 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2555 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
2556 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
2557 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
2558
2559 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
2560 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2561 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
2562 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
2563 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
2564 &as_state->build_scratch_memory_requirements);
2565
2566 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
2567 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2568 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
2569 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
2570 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
2571 &as_state->update_scratch_memory_requirements);
2572
2573 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
2574}
2575
2576void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
2577 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
2578 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
2579 if (as_state != nullptr) {
2580 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
2581 as_state->memory_requirements = *pMemoryRequirements;
2582 as_state->memory_requirements_checked = true;
2583 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
2584 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
2585 as_state->build_scratch_memory_requirements_checked = true;
2586 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
2587 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
2588 as_state->update_scratch_memory_requirements_checked = true;
2589 }
2590 }
2591}
2592
2593void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
2594 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
2595 if (VK_SUCCESS != result) return;
2596 for (uint32_t i = 0; i < bindInfoCount; i++) {
2597 const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
2598
2599 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
2600 if (as_state) {
2601 // Track bound memory range information
2602 auto mem_info = GetDevMemState(info.memory);
2603 if (mem_info) {
2604 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset,
2605 as_state->requirements);
2606 }
2607 // Track objects tied to memory
2608 SetMemBinding(info.memory, as_state, info.memoryOffset,
2609 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureNV));
2610
2611 // GPU validation of top level acceleration structure building needs acceleration structure handles.
2612 if (enabled.gpu_validation) {
2613 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
2614 }
2615 }
2616 }
2617}
2618
2619void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
2620 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
2621 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
2622 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2623 if (cb_state == nullptr) {
2624 return;
2625 }
2626
2627 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
2628 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
2629 if (dst_as_state != nullptr) {
2630 dst_as_state->built = true;
2631 dst_as_state->build_info.initialize(pInfo);
2632 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
2633 }
2634 if (src_as_state != nullptr) {
2635 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
2636 }
2637 cb_state->hasBuildAccelerationStructureCmd = true;
2638}
2639
2640void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
2641 VkAccelerationStructureNV dst,
2642 VkAccelerationStructureNV src,
2643 VkCopyAccelerationStructureModeNV mode) {
2644 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2645 if (cb_state) {
2646 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
2647 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
2648 if (dst_as_state != nullptr && src_as_state != nullptr) {
2649 dst_as_state->built = true;
2650 dst_as_state->build_info = src_as_state->build_info;
2651 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
2652 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
2653 }
2654 }
2655}
2656
2657void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
2658 VkAccelerationStructureNV accelerationStructure,
2659 const VkAllocationCallbacks *pAllocator) {
2660 if (!accelerationStructure) return;
2661 auto *as_state = GetAccelerationStructureState(accelerationStructure);
2662 if (as_state) {
2663 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
2664 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
2665 for (auto mem_binding : as_state->GetBoundMemory()) {
2666 auto mem_info = GetDevMemState(mem_binding);
2667 if (mem_info) {
2668 RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_info);
2669 }
2670 }
2671 ClearMemoryObjectBindings(obj_struct);
2672 accelerationStructureMap.erase(accelerationStructure);
2673 }
2674}
2675
2676void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
2677 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2678 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
2679}
2680
2681void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
2682 uint16_t lineStipplePattern) {
2683 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2684 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
2685}
2686
2687void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
2688 float depthBiasClamp, float depthBiasSlopeFactor) {
2689 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2690 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
2691}
2692
2693void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
2694 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2695 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
2696}
2697
2698void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
2699 float maxDepthBounds) {
2700 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2701 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
2702}
2703
2704void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2705 uint32_t compareMask) {
2706 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2707 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
2708}
2709
2710void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2711 uint32_t writeMask) {
2712 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2713 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
2714}
2715
2716void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2717 uint32_t reference) {
2718 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2719 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
2720}
2721
2722// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
2723// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
2724// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
2725void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
2726 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
2727 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
2728 cvdescriptorset::DescriptorSet *push_descriptor_set,
2729 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
2730 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
2731 // Defensive
2732 assert(pipeline_layout);
2733 if (!pipeline_layout) return;
2734
2735 uint32_t required_size = first_set + set_count;
2736 const uint32_t last_binding_index = required_size - 1;
2737 assert(last_binding_index < pipeline_layout->compat_for_set.size());
2738
2739 // Some useful shorthand
2740 auto &last_bound = cb_state->lastBound[pipeline_bind_point];
2741 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
2742 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
2743
2744 // We need this three times in this function, but nowhere else
2745 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
2746 if (ds && ds->IsPushDescriptor()) {
2747 assert(ds == last_bound.push_descriptor_set.get());
2748 last_bound.push_descriptor_set = nullptr;
2749 return true;
2750 }
2751 return false;
2752 };
2753
2754 // Clean up the "disturbed" before and after the range to be set
2755 if (required_size < current_size) {
2756 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
2757 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
2758 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
2759 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
2760 }
2761 } else {
2762 // We're not disturbing past last, so leave the upper binding data alone.
2763 required_size = current_size;
2764 }
2765 }
2766
2767 // We resize if we need more set entries or if those past "last" are disturbed
2768 if (required_size != current_size) {
2769 last_bound.per_set.resize(required_size);
2770 }
2771
2772 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
2773 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
2774 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
2775 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
2776 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
2777 last_bound.per_set[set_idx].dynamicOffsets.clear();
2778 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
2779 }
2780 }
2781
2782 // Now update the bound sets with the input sets
2783 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
2784 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
2785 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
2786 cvdescriptorset::DescriptorSet *descriptor_set =
2787 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
2788
2789 // Record binding (or push)
2790 if (descriptor_set != last_bound.push_descriptor_set.get()) {
2791 // Only cleanup the push descriptors if they aren't the currently used set.
2792 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
2793 }
2794 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
2795 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
2796
2797 if (descriptor_set) {
2798 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
2799 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
2800 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
2801 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
2802 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
2803 input_dynamic_offsets = end_offset;
2804 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
2805 } else {
2806 last_bound.per_set[set_idx].dynamicOffsets.clear();
2807 }
2808 if (!descriptor_set->IsPushDescriptor()) {
2809 // Can't cache validation of push_descriptors
2810 cb_state->validated_descriptor_sets.insert(descriptor_set);
2811 }
2812 }
2813 }
2814}
2815
2816// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
2817void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
2818 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
2819 uint32_t firstSet, uint32_t setCount,
2820 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
2821 const uint32_t *pDynamicOffsets) {
2822 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2823 auto pipeline_layout = GetPipelineLayout(layout);
2824
2825 // Resize binding arrays
2826 uint32_t last_set_index = firstSet + setCount - 1;
2827 if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
2828 cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
2829 }
2830
2831 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
2832 dynamicOffsetCount, pDynamicOffsets);
2833 cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
2834 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
2835}
2836
2837void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
2838 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
2839 const VkWriteDescriptorSet *pDescriptorWrites) {
2840 const auto &pipeline_layout = GetPipelineLayout(layout);
2841 // Short circuit invalid updates
2842 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
2843 !pipeline_layout->set_layouts[set]->IsPushDescriptor())
2844 return;
2845
2846 // We need a descriptor set to update the bindings with, compatible with the passed layout
2847 const auto dsl = pipeline_layout->set_layouts[set];
2848 auto &last_bound = cb_state->lastBound[pipelineBindPoint];
2849 auto &push_descriptor_set = last_bound.push_descriptor_set;
2850 // If we are disturbing the current push_desriptor_set clear it
2851 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
2852 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, 0, dsl, 0, this));
2853 }
2854
2855 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
2856 nullptr);
2857 last_bound.pipeline_layout = layout;
2858
2859 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
2860 push_descriptor_set->PerformPushDescriptorsUpdate(descriptorWriteCount, pDescriptorWrites);
2861}
2862
2863void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
2864 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
2865 uint32_t set, uint32_t descriptorWriteCount,
2866 const VkWriteDescriptorSet *pDescriptorWrites) {
2867 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2868 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
2869}
2870
2871void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
2872 VkIndexType indexType) {
2873 auto buffer_state = GetBufferState(buffer);
2874 auto cb_state = GetCBState(commandBuffer);
2875
2876 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
2877 cb_state->index_buffer_binding.buffer = buffer;
2878 cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
2879 cb_state->index_buffer_binding.offset = offset;
2880 cb_state->index_buffer_binding.index_type = indexType;
2881 // Add binding for this index buffer to this commandbuffer
2882 AddCommandBufferBindingBuffer(cb_state, buffer_state);
2883}
2884
2885void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
2886 uint32_t bindingCount, const VkBuffer *pBuffers,
2887 const VkDeviceSize *pOffsets) {
2888 auto cb_state = GetCBState(commandBuffer);
2889
2890 uint32_t end = firstBinding + bindingCount;
2891 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
2892 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
2893 }
2894
2895 for (uint32_t i = 0; i < bindingCount; ++i) {
2896 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
2897 vertex_buffer_binding.buffer = pBuffers[i];
2898 vertex_buffer_binding.offset = pOffsets[i];
2899 // Add binding for this vertex buffer to this commandbuffer
2900 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
2901 }
2902}
2903
2904void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
2905 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
2906 auto cb_state = GetCBState(commandBuffer);
2907 auto dst_buffer_state = GetBufferState(dstBuffer);
2908
2909 // Update bindings between buffer and cmd buffer
2910 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
2911}
2912
2913bool ValidationStateTracker::SetEventStageMask(VkQueue queue, VkCommandBuffer commandBuffer, VkEvent event,
2914 VkPipelineStageFlags stageMask) {
2915 CMD_BUFFER_STATE *pCB = GetCBState(commandBuffer);
2916 if (pCB) {
2917 pCB->eventToStageMap[event] = stageMask;
2918 }
2919 auto queue_data = queueMap.find(queue);
2920 if (queue_data != queueMap.end()) {
2921 queue_data->second.eventToStageMap[event] = stageMask;
2922 }
2923 return false;
2924}
2925
2926void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
2927 VkPipelineStageFlags stageMask) {
2928 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2929 auto event_state = GetEventState(event);
2930 if (event_state) {
2931 AddCommandBufferBinding(&event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent), cb_state);
2932 event_state->cb_bindings.insert(cb_state);
2933 }
2934 cb_state->events.push_back(event);
2935 if (!cb_state->waitedEvents.count(event)) {
2936 cb_state->writeEventsBeforeWait.push_back(event);
2937 }
2938 cb_state->eventUpdates.emplace_back([=](VkQueue q) { return SetEventStageMask(q, commandBuffer, event, stageMask); });
2939}
2940
2941void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
2942 VkPipelineStageFlags stageMask) {
2943 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2944 auto event_state = GetEventState(event);
2945 if (event_state) {
2946 AddCommandBufferBinding(&event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent), cb_state);
2947 event_state->cb_bindings.insert(cb_state);
2948 }
2949 cb_state->events.push_back(event);
2950 if (!cb_state->waitedEvents.count(event)) {
2951 cb_state->writeEventsBeforeWait.push_back(event);
2952 }
2953
2954 cb_state->eventUpdates.emplace_back(
2955 [=](VkQueue q) { return SetEventStageMask(q, commandBuffer, event, VkPipelineStageFlags(0)); });
2956}
2957
2958void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
2959 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
2960 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2961 uint32_t bufferMemoryBarrierCount,
2962 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2963 uint32_t imageMemoryBarrierCount,
2964 const VkImageMemoryBarrier *pImageMemoryBarriers) {
2965 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2966 for (uint32_t i = 0; i < eventCount; ++i) {
2967 auto event_state = GetEventState(pEvents[i]);
2968 if (event_state) {
2969 AddCommandBufferBinding(&event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent), cb_state);
2970 event_state->cb_bindings.insert(cb_state);
2971 }
2972 cb_state->waitedEvents.insert(pEvents[i]);
2973 cb_state->events.push_back(pEvents[i]);
2974 }
2975}
2976
2977bool ValidationStateTracker::SetQueryState(VkQueue queue, VkCommandBuffer commandBuffer, QueryObject object, QueryState value) {
2978 CMD_BUFFER_STATE *pCB = GetCBState(commandBuffer);
2979 if (pCB) {
2980 pCB->queryToStateMap[object] = value;
2981 }
2982 auto queue_data = queueMap.find(queue);
2983 if (queue_data != queueMap.end()) {
2984 queue_data->second.queryToStateMap[object] = value;
2985 }
2986 return false;
2987}
2988
2989bool ValidationStateTracker::SetQueryStateMulti(VkQueue queue, VkCommandBuffer commandBuffer, VkQueryPool queryPool,
2990 uint32_t firstQuery, uint32_t queryCount, QueryState value) {
2991 CMD_BUFFER_STATE *pCB = GetCBState(commandBuffer);
2992 auto queue_data = queueMap.find(queue);
2993
2994 for (uint32_t i = 0; i < queryCount; i++) {
2995 QueryObject object = {queryPool, firstQuery + i};
2996 if (pCB) {
2997 pCB->queryToStateMap[object] = value;
2998 }
2999 if (queue_data != queueMap.end()) {
3000 queue_data->second.queryToStateMap[object] = value;
3001 }
3002 }
3003 return false;
3004}
3005
3006void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
3007 cb_state->activeQueries.insert(query_obj);
3008 cb_state->startedQueries.insert(query_obj);
3009 cb_state->queryUpdates.emplace_back([this, cb_state, query_obj](VkQueue q) {
3010 SetQueryState(q, cb_state->commandBuffer, query_obj, QUERYSTATE_RUNNING);
3011 return false;
3012 });
3013 AddCommandBufferBinding(&GetQueryPoolState(query_obj.pool)->cb_bindings,
3014 VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool), cb_state);
3015}
3016
3017void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3018 VkFlags flags) {
3019 QueryObject query = {queryPool, slot};
3020 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3021 RecordCmdBeginQuery(cb_state, query);
3022}
3023
3024void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
3025 cb_state->activeQueries.erase(query_obj);
3026 cb_state->queryUpdates.emplace_back(
3027 [this, cb_state, query_obj](VkQueue q) { return SetQueryState(q, cb_state->commandBuffer, query_obj, QUERYSTATE_ENDED); });
3028 AddCommandBufferBinding(&GetQueryPoolState(query_obj.pool)->cb_bindings,
3029 VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool), cb_state);
3030}
3031
3032void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
3033 QueryObject query_obj = {queryPool, slot};
3034 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3035 RecordCmdEndQuery(cb_state, query_obj);
3036}
3037
3038void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3039 uint32_t firstQuery, uint32_t queryCount) {
3040 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3041
3042 cb_state->queryUpdates.emplace_back([this, commandBuffer, queryPool, firstQuery, queryCount](VkQueue q) {
3043 return SetQueryStateMulti(q, commandBuffer, queryPool, firstQuery, queryCount, QUERYSTATE_RESET);
3044 });
3045 AddCommandBufferBinding(&GetQueryPoolState(queryPool)->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool),
3046 cb_state);
3047}
3048
3049void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3050 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3051 VkDeviceSize dstOffset, VkDeviceSize stride,
3052 VkQueryResultFlags flags) {
3053 auto cb_state = GetCBState(commandBuffer);
3054 auto dst_buff_state = GetBufferState(dstBuffer);
3055 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
3056 AddCommandBufferBinding(&GetQueryPoolState(queryPool)->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool),
3057 cb_state);
3058}
3059
3060void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3061 VkQueryPool queryPool, uint32_t slot) {
3062 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3063 AddCommandBufferBinding(&GetQueryPoolState(queryPool)->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool),
3064 cb_state);
3065 QueryObject query = {queryPool, slot};
3066 cb_state->queryUpdates.emplace_back(
3067 [this, commandBuffer, query](VkQueue q) { return SetQueryState(q, commandBuffer, query, QUERYSTATE_ENDED); });
3068}
3069
3070void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3071 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3072 VkResult result) {
3073 if (VK_SUCCESS != result) return;
3074 // Shadow create info and store in map
3075 std::unique_ptr<FRAMEBUFFER_STATE> fb_state(
3076 new FRAMEBUFFER_STATE(*pFramebuffer, pCreateInfo, GetRenderPassStateSharedPtr(pCreateInfo->renderPass)));
3077
3078 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
3079 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
3080 VkImageView view = pCreateInfo->pAttachments[i];
3081 auto view_state = GetImageViewState(view);
3082 if (!view_state) {
3083 continue;
3084 }
3085 }
3086 }
3087 frameBufferMap[*pFramebuffer] = std::move(fb_state);
3088}
3089
3090void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3091 RENDER_PASS_STATE *render_pass) {
3092 auto &subpass_to_node = render_pass->subpassToNode;
3093 subpass_to_node.resize(pCreateInfo->subpassCount);
3094 auto &self_dependencies = render_pass->self_dependencies;
3095 self_dependencies.resize(pCreateInfo->subpassCount);
3096
3097 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
3098 subpass_to_node[i].pass = i;
3099 self_dependencies[i].clear();
3100 }
3101 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
3102 const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
3103 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
3104 if (dependency.srcSubpass == dependency.dstSubpass) {
3105 self_dependencies[dependency.srcSubpass].push_back(i);
3106 } else {
3107 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
3108 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
3109 }
3110 }
3111 }
3112}
3113
3114static void MarkAttachmentFirstUse(RENDER_PASS_STATE *render_pass, uint32_t index, bool is_read) {
3115 if (index == VK_ATTACHMENT_UNUSED) return;
3116
3117 if (!render_pass->attachment_first_read.count(index)) render_pass->attachment_first_read[index] = is_read;
3118}
3119
3120void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
3121 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
3122 VkRenderPass *pRenderPass) {
3123 render_pass->renderPass = *pRenderPass;
3124 auto create_info = render_pass->createInfo.ptr();
3125
3126 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
3127
3128 for (uint32_t i = 0; i < create_info->subpassCount; ++i) {
3129 const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[i];
3130 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
3131 MarkAttachmentFirstUse(render_pass.get(), subpass.pColorAttachments[j].attachment, false);
3132
3133 // resolve attachments are considered to be written
3134 if (subpass.pResolveAttachments) {
3135 MarkAttachmentFirstUse(render_pass.get(), subpass.pResolveAttachments[j].attachment, false);
3136 }
3137 }
3138 if (subpass.pDepthStencilAttachment) {
3139 MarkAttachmentFirstUse(render_pass.get(), subpass.pDepthStencilAttachment->attachment, false);
3140 }
3141 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
3142 MarkAttachmentFirstUse(render_pass.get(), subpass.pInputAttachments[j].attachment, true);
3143 }
3144 }
3145
3146 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
3147 renderPassMap[*pRenderPass] = std::move(render_pass);
3148}
3149
3150// Style note:
3151// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
3152// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
3153// construction or assignment.
3154void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
3155 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3156 VkResult result) {
3157 if (VK_SUCCESS != result) return;
3158 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3159 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
3160}
3161
3162void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3163 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3164 VkResult result) {
3165 if (VK_SUCCESS != result) return;
3166 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3167 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
3168}
3169
3170void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
3171 const VkRenderPassBeginInfo *pRenderPassBegin,
3172 const VkSubpassContents contents) {
3173 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3174 auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
3175 auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
3176
3177 if (render_pass_state) {
3178 cb_state->activeFramebuffer = pRenderPassBegin->framebuffer;
3179 cb_state->activeRenderPass = render_pass_state;
3180 // This is a shallow copy as that is all that is needed for now
3181 cb_state->activeRenderPassBeginInfo = *pRenderPassBegin;
3182 cb_state->activeSubpass = 0;
3183 cb_state->activeSubpassContents = contents;
3184 cb_state->framebuffers.insert(pRenderPassBegin->framebuffer);
3185 // Connect this framebuffer and its children to this cmdBuffer
3186 AddFramebufferBinding(cb_state, framebuffer);
3187 // Connect this RP to cmdBuffer
3188 AddCommandBufferBinding(&render_pass_state->cb_bindings,
3189 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass), cb_state);
3190
3191 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
3192 if (chained_device_group_struct) {
3193 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
3194 } else {
3195 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
3196 }
3197 }
3198}
3199
3200void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
3201 const VkRenderPassBeginInfo *pRenderPassBegin,
3202 VkSubpassContents contents) {
3203 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
3204}
3205
3206void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
3207 const VkRenderPassBeginInfo *pRenderPassBegin,
3208 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
3209 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3210}
3211
3212void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3213 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3214 cb_state->activeSubpass++;
3215 cb_state->activeSubpassContents = contents;
3216}
3217
3218void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3219 RecordCmdNextSubpass(commandBuffer, contents);
3220}
3221
3222void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
3223 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
3224 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3225 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3226}
3227
3228void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
3229 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3230 cb_state->activeRenderPass = nullptr;
3231 cb_state->activeSubpass = 0;
3232 cb_state->activeFramebuffer = VK_NULL_HANDLE;
3233}
3234
3235void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
3236 RecordCmdEndRenderPassState(commandBuffer);
3237}
3238
3239void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
3240 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3241 RecordCmdEndRenderPassState(commandBuffer);
3242}
3243
3244void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
3245 const VkCommandBuffer *pCommandBuffers) {
3246 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3247
3248 CMD_BUFFER_STATE *sub_cb_state = NULL;
3249 for (uint32_t i = 0; i < commandBuffersCount; i++) {
3250 sub_cb_state = GetCBState(pCommandBuffers[i]);
3251 assert(sub_cb_state);
3252 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
3253 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
3254 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
3255 // from the validation step to the recording step
3256 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
3257 }
3258 }
3259
3260 // Propagate inital layout and current layout state to the primary cmd buffer
3261 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
3262 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
3263 // for those other classes.
3264 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
3265 const auto image = sub_layout_map_entry.first;
3266 const auto *image_state = GetImageState(image);
3267 if (!image_state) continue; // Can't set layouts of a dead image
3268
3269 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
3270 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
3271 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
3272 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
3273 }
3274
3275 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
3276 cb_state->linkedCommandBuffers.insert(sub_cb_state);
3277 sub_cb_state->linkedCommandBuffers.insert(cb_state);
3278 for (auto &function : sub_cb_state->queryUpdates) {
3279 cb_state->queryUpdates.push_back(function);
3280 }
3281 for (auto &function : sub_cb_state->queue_submit_functions) {
3282 cb_state->queue_submit_functions.push_back(function);
3283 }
3284 }
3285}
3286
3287void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
3288 VkFlags flags, void **ppData, VkResult result) {
3289 if (VK_SUCCESS != result) return;
3290 RecordMappedMemory(mem, offset, size, ppData);
3291}
3292
3293void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
3294 auto mem_info = GetDevMemState(mem);
3295 if (mem_info) {
3296 mem_info->mapped_range = MemRange();
3297 mem_info->p_driver_data = nullptr;
3298 }
3299}
3300
3301void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
3302 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
3303 if (image_state) {
3304 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
3305 if (swapchain_info) {
3306 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
3307 if (swapchain) {
3308 swapchain->bound_images.insert(image_state->image);
3309 image_state->bind_swapchain = swapchain_info->swapchain;
3310 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
3311 }
3312 } else {
3313 // Track bound memory range information
3314 auto mem_info = GetDevMemState(bindInfo.memory);
3315 if (mem_info) {
3316 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset, image_state->requirements,
3317 image_state->createInfo.tiling == VK_IMAGE_TILING_LINEAR);
3318 }
3319
3320 // Track objects tied to memory
3321 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
3322 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
3323 }
3324 if (image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) {
3325 AddAliasingImage(image_state);
3326 }
3327 }
3328}
3329
3330void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
3331 VkDeviceSize memoryOffset, VkResult result) {
3332 if (VK_SUCCESS != result) return;
3333 VkBindImageMemoryInfo bindInfo = {};
3334 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
3335 bindInfo.image = image;
3336 bindInfo.memory = mem;
3337 bindInfo.memoryOffset = memoryOffset;
3338 UpdateBindImageMemoryState(bindInfo);
3339}
3340
3341void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
3342 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3343 if (VK_SUCCESS != result) return;
3344 for (uint32_t i = 0; i < bindInfoCount; i++) {
3345 UpdateBindImageMemoryState(pBindInfos[i]);
3346 }
3347}
3348
3349void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
3350 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3351 if (VK_SUCCESS != result) return;
3352 for (uint32_t i = 0; i < bindInfoCount; i++) {
3353 UpdateBindImageMemoryState(pBindInfos[i]);
3354 }
3355}
3356
3357void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
3358 auto event_state = GetEventState(event);
3359 if (event_state) {
3360 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
3361 }
3362 // Host setting event is visible to all queues immediately so update stageMask for any queue that's seen this event
3363 // TODO : For correctness this needs separate fix to verify that app doesn't make incorrect assumptions about the
3364 // ordering of this command in relation to vkCmd[Set|Reset]Events (see GH297)
3365 for (auto queue_data : queueMap) {
3366 auto event_entry = queue_data.second.eventToStageMap.find(event);
3367 if (event_entry != queue_data.second.eventToStageMap.end()) {
3368 event_entry->second |= VK_PIPELINE_STAGE_HOST_BIT;
3369 }
3370 }
3371}
3372
3373void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
3374 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
3375 VkResult result) {
3376 if (VK_SUCCESS != result) return;
3377 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
3378 pImportSemaphoreFdInfo->flags);
3379}
3380
3381void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
3382 VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
3383 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
3384 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
3385 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
3386 semaphore_state->scope = kSyncScopeExternalPermanent;
3387 }
3388}
3389
3390#ifdef VK_USE_PLATFORM_WIN32_KHR
3391void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
3392 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
3393 if (VK_SUCCESS != result) return;
3394 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
3395 pImportSemaphoreWin32HandleInfo->flags);
3396}
3397
3398void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
3399 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
3400 HANDLE *pHandle, VkResult result) {
3401 if (VK_SUCCESS != result) return;
3402 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
3403}
3404
3405void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
3406 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
3407 if (VK_SUCCESS != result) return;
3408 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
3409 pImportFenceWin32HandleInfo->flags);
3410}
3411
3412void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
3413 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
3414 HANDLE *pHandle, VkResult result) {
3415 if (VK_SUCCESS != result) return;
3416 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
3417}
3418#endif
3419
3420void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
3421 VkResult result) {
3422 if (VK_SUCCESS != result) return;
3423 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
3424}
3425
3426void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
3427 VkFenceImportFlagsKHR flags) {
3428 FENCE_STATE *fence_node = GetFenceState(fence);
3429 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
3430 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
3431 fence_node->scope == kSyncScopeInternal) {
3432 fence_node->scope = kSyncScopeExternalTemporary;
3433 } else {
3434 fence_node->scope = kSyncScopeExternalPermanent;
3435 }
3436 }
3437}
3438
3439void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
3440 VkResult result) {
3441 if (VK_SUCCESS != result) return;
3442 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
3443}
3444
3445void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
3446 FENCE_STATE *fence_state = GetFenceState(fence);
3447 if (fence_state) {
3448 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
3449 // Export with reference transference becomes external
3450 fence_state->scope = kSyncScopeExternalPermanent;
3451 } else if (fence_state->scope == kSyncScopeInternal) {
3452 // Export with copy transference has a side effect of resetting the fence
3453 fence_state->state = FENCE_UNSIGNALED;
3454 }
3455 }
3456}
3457
3458void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
3459 VkResult result) {
3460 if (VK_SUCCESS != result) return;
3461 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
3462}
3463
3464void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
3465 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
3466 if (VK_SUCCESS != result) return;
3467 eventMap[*pEvent].write_in_use = 0;
3468 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
3469}
3470
3471void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
3472 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
3473 SWAPCHAIN_NODE *old_swapchain_state) {
3474 if (VK_SUCCESS == result) {
3475 auto swapchain_state = unique_ptr<SWAPCHAIN_NODE>(new SWAPCHAIN_NODE(pCreateInfo, *pSwapchain));
3476 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
3477 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
3478 swapchain_state->shared_presentable = true;
3479 }
3480 surface_state->swapchain = swapchain_state.get();
3481 swapchainMap[*pSwapchain] = std::move(swapchain_state);
3482 } else {
3483 surface_state->swapchain = nullptr;
3484 }
3485 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
3486 if (old_swapchain_state) {
3487 old_swapchain_state->retired = true;
3488 }
3489 return;
3490}
3491
3492void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
3493 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
3494 VkResult result) {
3495 auto surface_state = GetSurfaceState(pCreateInfo->surface);
3496 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
3497 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
3498}
3499
3500void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
3501 const VkAllocationCallbacks *pAllocator) {
3502 if (!swapchain) return;
3503 auto swapchain_data = GetSwapchainState(swapchain);
3504 if (swapchain_data) {
3505 for (const auto &swapchain_image : swapchain_data->images) {
3506 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image, kVulkanObjectTypeImage));
3507 imageMap.erase(swapchain_image);
3508 }
3509
3510 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
3511 if (surface_state) {
3512 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
3513 }
3514 RemoveAliasingImages(swapchain_data->bound_images);
3515 swapchainMap.erase(swapchain);
3516 }
3517}
3518
3519void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
3520 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
3521 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
3522 auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
3523 if (pSemaphore) {
3524 pSemaphore->signaler.first = VK_NULL_HANDLE;
3525 pSemaphore->signaled = false;
3526 }
3527 }
3528
3529 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
3530 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
3531 // confused itself just as much.
3532 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
3533 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
3534 // Mark the image as having been released to the WSI
3535 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
3536 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
3537 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]];
3538 auto image_state = GetImageState(image);
3539 if (image_state) {
3540 image_state->acquired = false;
3541 }
3542 }
3543 }
3544 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
3545 // its semaphore waits) /never/ participate in any completion proof.
3546}
3547
3548void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
3549 const VkSwapchainCreateInfoKHR *pCreateInfos,
3550 const VkAllocationCallbacks *pAllocator,
3551 VkSwapchainKHR *pSwapchains, VkResult result) {
3552 if (pCreateInfos) {
3553 for (uint32_t i = 0; i < swapchainCount; i++) {
3554 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
3555 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
3556 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
3557 }
3558 }
3559}
3560
3561void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
3562 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
3563 auto pFence = GetFenceState(fence);
3564 if (pFence && pFence->scope == kSyncScopeInternal) {
3565 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
3566 // import
3567 pFence->state = FENCE_INFLIGHT;
3568 pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
3569 }
3570
3571 auto pSemaphore = GetSemaphoreState(semaphore);
3572 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
3573 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
3574 // temporary import
3575 pSemaphore->signaled = true;
3576 pSemaphore->signaler.first = VK_NULL_HANDLE;
3577 }
3578
3579 // Mark the image as acquired.
3580 auto swapchain_data = GetSwapchainState(swapchain);
3581 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
3582 auto image = swapchain_data->images[*pImageIndex];
3583 auto image_state = GetImageState(image);
3584 if (image_state) {
3585 image_state->acquired = true;
3586 image_state->shared_presentable = swapchain_data->shared_presentable;
3587 }
3588 }
3589}
3590
3591void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
3592 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
3593 VkResult result) {
3594 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
3595 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
3596}
3597
3598void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
3599 uint32_t *pImageIndex, VkResult result) {
3600 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
3601 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
3602 pAcquireInfo->fence, pImageIndex);
3603}
3604
3605void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
3606 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
3607 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
3608 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
3609 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
3610 phys_device_state.phys_device = pPhysicalDevices[i];
3611 // Init actual features for each physical device
3612 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
3613 }
3614 }
3615}
3616
3617// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
3618static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
3619 VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3620 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
3621
3622 if (!pQueueFamilyProperties) {
3623 if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
3624 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
3625 } else { // Save queue family properties
3626 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
3627
3628 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
3629 for (uint32_t i = 0; i < count; ++i) {
3630 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
3631 }
3632 }
3633}
3634
3635void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
3636 uint32_t *pQueueFamilyPropertyCount,
3637 VkQueueFamilyProperties *pQueueFamilyProperties) {
3638 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3639 assert(physical_device_state);
3640 VkQueueFamilyProperties2KHR *pqfp = nullptr;
3641 std::vector<VkQueueFamilyProperties2KHR> qfp;
3642 qfp.resize(*pQueueFamilyPropertyCount);
3643 if (pQueueFamilyProperties) {
3644 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
3645 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
3646 qfp[i].pNext = nullptr;
3647 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
3648 }
3649 pqfp = qfp.data();
3650 }
3651 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
3652}
3653
3654void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
3655 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3656 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3657 assert(physical_device_state);
3658 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
3659 pQueueFamilyProperties);
3660}
3661
3662void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
3663 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3664 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3665 assert(physical_device_state);
3666 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
3667 pQueueFamilyProperties);
3668}
3669void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
3670 const VkAllocationCallbacks *pAllocator) {
3671 surface_map.erase(surface);
3672}
3673
3674void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
3675 surface_map[*pSurface] = std::unique_ptr<SURFACE_STATE>(new SURFACE_STATE{*pSurface});
3676}
3677
3678void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
3679 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
3680 const VkAllocationCallbacks *pAllocator,
3681 VkSurfaceKHR *pSurface, VkResult result) {
3682 if (VK_SUCCESS != result) return;
3683 RecordVulkanSurface(pSurface);
3684}
3685
3686#ifdef VK_USE_PLATFORM_ANDROID_KHR
3687void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
3688 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
3689 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3690 VkResult result) {
3691 if (VK_SUCCESS != result) return;
3692 RecordVulkanSurface(pSurface);
3693}
3694#endif // VK_USE_PLATFORM_ANDROID_KHR
3695
3696#ifdef VK_USE_PLATFORM_IOS_MVK
3697void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
3698 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3699 VkResult result) {
3700 if (VK_SUCCESS != result) return;
3701 RecordVulkanSurface(pSurface);
3702}
3703#endif // VK_USE_PLATFORM_IOS_MVK
3704
3705#ifdef VK_USE_PLATFORM_MACOS_MVK
3706void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
3707 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
3708 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3709 VkResult result) {
3710 if (VK_SUCCESS != result) return;
3711 RecordVulkanSurface(pSurface);
3712}
3713#endif // VK_USE_PLATFORM_MACOS_MVK
3714
3715#ifdef VK_USE_PLATFORM_WAYLAND_KHR
3716void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
3717 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
3718 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3719 VkResult result) {
3720 if (VK_SUCCESS != result) return;
3721 RecordVulkanSurface(pSurface);
3722}
3723#endif // VK_USE_PLATFORM_WAYLAND_KHR
3724
3725#ifdef VK_USE_PLATFORM_WIN32_KHR
3726void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
3727 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
3728 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3729 VkResult result) {
3730 if (VK_SUCCESS != result) return;
3731 RecordVulkanSurface(pSurface);
3732}
3733#endif // VK_USE_PLATFORM_WIN32_KHR
3734
3735#ifdef VK_USE_PLATFORM_XCB_KHR
3736void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
3737 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3738 VkResult result) {
3739 if (VK_SUCCESS != result) return;
3740 RecordVulkanSurface(pSurface);
3741}
3742#endif // VK_USE_PLATFORM_XCB_KHR
3743
3744#ifdef VK_USE_PLATFORM_XLIB_KHR
3745void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
3746 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3747 VkResult result) {
3748 if (VK_SUCCESS != result) return;
3749 RecordVulkanSurface(pSurface);
3750}
3751#endif // VK_USE_PLATFORM_XLIB_KHR
3752
3753void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
3754 VkSurfaceKHR surface,
3755 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
3756 VkResult result) {
3757 if (VK_SUCCESS != result) return;
3758 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3759 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
3760 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
3761}
3762
3763void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
3764 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
3765 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
3766 if (VK_SUCCESS != result) return;
3767 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3768 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
3769 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
3770}
3771
3772void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
3773 VkSurfaceKHR surface,
3774 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
3775 VkResult result) {
3776 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3777 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
3778 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
3779 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
3780 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
3781 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
3782 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
3783 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
3784 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
3785 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
3786 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
3787 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
3788}
3789
3790void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
3791 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
3792 VkBool32 *pSupported, VkResult result) {
3793 if (VK_SUCCESS != result) return;
3794 auto surface_state = GetSurfaceState(surface);
3795 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
3796}
3797
3798void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
3799 VkSurfaceKHR surface,
3800 uint32_t *pPresentModeCount,
3801 VkPresentModeKHR *pPresentModes,
3802 VkResult result) {
3803 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
3804
3805 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
3806 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3807 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState;
3808
3809 if (*pPresentModeCount) {
3810 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
3811 if (*pPresentModeCount > physical_device_state->present_modes.size())
3812 physical_device_state->present_modes.resize(*pPresentModeCount);
3813 }
3814 if (pPresentModes) {
3815 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
3816 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
3817 physical_device_state->present_modes[i] = pPresentModes[i];
3818 }
3819 }
3820}
3821
3822void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
3823 uint32_t *pSurfaceFormatCount,
3824 VkSurfaceFormatKHR *pSurfaceFormats,
3825 VkResult result) {
3826 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
3827
3828 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3829 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
3830
3831 if (*pSurfaceFormatCount) {
3832 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
3833 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
3834 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
3835 }
3836 if (pSurfaceFormats) {
3837 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
3838 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
3839 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
3840 }
3841 }
3842}
3843
3844void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
3845 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
3846 uint32_t *pSurfaceFormatCount,
3847 VkSurfaceFormat2KHR *pSurfaceFormats,
3848 VkResult result) {
3849 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
3850
3851 auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
3852 if (*pSurfaceFormatCount) {
3853 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_COUNT) {
3854 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_COUNT;
3855 }
3856 if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
3857 physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
3858 }
3859 if (pSurfaceFormats) {
3860 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_DETAILS) {
3861 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_DETAILS;
3862 }
3863 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
3864 physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
3865 }
3866 }
3867}
3868
3869void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
3870 const VkDebugUtilsLabelEXT *pLabelInfo) {
3871 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
3872}
3873
3874void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
3875 EndCmdDebugUtilsLabel(report_data, commandBuffer);
3876}
3877
3878void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
3879 const VkDebugUtilsLabelEXT *pLabelInfo) {
3880 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
3881
3882 // Squirrel away an easily accessible copy.
3883 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3884 cb_state->debug_label = LoggingLabel(pLabelInfo);
3885}
3886
3887void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
3888 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
3889 if (NULL != pPhysicalDeviceGroupProperties) {
3890 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
3891 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
3892 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
3893 auto &phys_device_state = physical_device_map[cur_phys_dev];
3894 phys_device_state.phys_device = cur_phys_dev;
3895 // Init actual features for each physical device
3896 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
3897 }
3898 }
3899 }
3900}
3901
3902void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
3903 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
3904 VkResult result) {
3905 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
3906 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
3907}
3908
3909void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
3910 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
3911 VkResult result) {
3912 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
3913 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
3914}
3915
3916void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
3917 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
3918 const VkAllocationCallbacks *pAllocator) {
3919 if (!descriptorUpdateTemplate) return;
3920 desc_template_map.erase(descriptorUpdateTemplate);
3921}
3922
3923void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
3924 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
3925 const VkAllocationCallbacks *pAllocator) {
3926 if (!descriptorUpdateTemplate) return;
3927 desc_template_map.erase(descriptorUpdateTemplate);
3928}
3929
3930void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
3931 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
3932 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
3933 std::unique_ptr<TEMPLATE_STATE> template_state(new TEMPLATE_STATE(*pDescriptorUpdateTemplate, &local_create_info));
3934 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
3935}
3936
3937void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
3938 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
3939 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
3940 if (VK_SUCCESS != result) return;
3941 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
3942}
3943
3944void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
3945 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
3946 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
3947 if (VK_SUCCESS != result) return;
3948 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
3949}
3950
3951void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
3952 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
3953 const void *pData) {
3954 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
3955 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
3956 assert(0);
3957 } else {
3958 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
3959 // TODO: Record template push descriptor updates
3960 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
3961 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
3962 }
3963 }
3964}
3965
3966void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
3967 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
3968 const void *pData) {
3969 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
3970}
3971
3972void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
3973 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
3974 const void *pData) {
3975 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
3976}
3977
3978void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
3979 VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
3980 const void *pData) {
3981 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3982
3983 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
3984 if (template_state) {
3985 auto layout_data = GetPipelineLayout(layout);
3986 auto dsl = GetDslFromPipelineLayout(layout_data, set);
3987 const auto &template_ci = template_state->create_info;
3988 if (dsl && !dsl->IsDestroyed()) {
3989 // Decode the template into a set of write updates
3990 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
3991 dsl->GetDescriptorSetLayout());
3992 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
3993 static_cast<uint32_t>(decoded_template.desc_writes.size()),
3994 decoded_template.desc_writes.data());
3995 }
3996 }
3997}
3998
3999void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
4000 uint32_t *pPropertyCount, void *pProperties) {
4001 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4002 if (*pPropertyCount) {
4003 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_COUNT) {
4004 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_COUNT;
4005 }
4006 physical_device_state->display_plane_property_count = *pPropertyCount;
4007 }
4008 if (pProperties) {
4009 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_DETAILS) {
4010 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_DETAILS;
4011 }
4012 }
4013}
4014
4015void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
4016 uint32_t *pPropertyCount,
4017 VkDisplayPlanePropertiesKHR *pProperties,
4018 VkResult result) {
4019 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4020 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4021}
4022
4023void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
4024 uint32_t *pPropertyCount,
4025 VkDisplayPlaneProperties2KHR *pProperties,
4026 VkResult result) {
4027 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4028 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4029}
4030
4031void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4032 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
4033 QueryObject query_obj = {queryPool, query, index};
4034 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4035 RecordCmdBeginQuery(cb_state, query_obj);
4036}
4037
4038void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4039 uint32_t query, uint32_t index) {
4040 QueryObject query_obj = {queryPool, query, index};
4041 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4042 RecordCmdEndQuery(cb_state, query_obj);
4043}
4044
4045void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
4046 VkSamplerYcbcrConversion ycbcr_conversion) {
4047 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4048 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion);
4049 }
4050}
4051
4052void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
4053 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4054 const VkAllocationCallbacks *pAllocator,
4055 VkSamplerYcbcrConversion *pYcbcrConversion,
4056 VkResult result) {
4057 if (VK_SUCCESS != result) return;
4058 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4059}
4060
4061void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
4062 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4063 const VkAllocationCallbacks *pAllocator,
4064 VkSamplerYcbcrConversion *pYcbcrConversion,
4065 VkResult result) {
4066 if (VK_SUCCESS != result) return;
4067 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4068}
4069
4070void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
4071 const VkAllocationCallbacks *pAllocator) {
4072 if (!ycbcrConversion) return;
4073 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4074 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4075 }
4076}
4077
4078void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
4079 VkSamplerYcbcrConversion ycbcrConversion,
4080 const VkAllocationCallbacks *pAllocator) {
4081 if (!ycbcrConversion) return;
4082 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4083 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4084 }
4085}
4086
4087void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4088 uint32_t queryCount) {
4089 // Do nothing if the feature is not enabled.
4090 if (!enabled_features.host_query_reset_features.hostQueryReset) return;
4091
4092 // Do nothing if the query pool has been destroyed.
4093 auto query_pool_state = GetQueryPoolState(queryPool);
4094 if (!query_pool_state) return;
4095
4096 // Reset the state of existing entries.
4097 QueryObject query_obj{queryPool, 0};
4098 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
4099 for (uint32_t i = 0; i < max_query_count; ++i) {
4100 query_obj.query = firstQuery + i;
4101 auto query_it = queryToStateMap.find(query_obj);
4102 if (query_it != queryToStateMap.end()) query_it->second = QUERYSTATE_RESET;
4103 }
4104}
4105
4106void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
4107 const TEMPLATE_STATE *template_state, const void *pData) {
4108 // Translate the templated update into a normal update for validation...
4109 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
4110 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
4111 decoded_update.desc_writes.data(), 0, NULL);
4112}
4113
4114// Update the common AllocateDescriptorSetsData
4115void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
4116 cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
4117 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
4118 auto layout = GetDescriptorSetLayout(this, p_alloc_info->pSetLayouts[i]);
4119 if (layout) {
4120 ds_data->layout_nodes[i] = layout;
4121 // Count total descriptors required per type
4122 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
4123 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
4124 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
4125 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
4126 }
4127 }
4128 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
4129 }
4130}
4131
4132// Decrement allocated sets from the pool and insert new sets into set_map
4133void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
4134 const VkDescriptorSet *descriptor_sets,
4135 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
4136 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
4137 // Account for sets and individual descriptors allocated from pool
4138 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
4139 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
4140 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
4141 }
4142
4143 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
4144 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
4145
4146 // Create tracking object for each descriptor set; insert into global map and the pool's set.
4147 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
4148 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
4149
4150 std::unique_ptr<cvdescriptorset::DescriptorSet> new_ds(new cvdescriptorset::DescriptorSet(
4151 descriptor_sets[i], p_alloc_info->descriptorPool, ds_data->layout_nodes[i], variable_count, this));
4152 pool_state->sets.insert(new_ds.get());
4153 new_ds->in_use.store(0);
4154 setMap[descriptor_sets[i]] = std::move(new_ds);
4155 }
4156}
4157
4158// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
4159void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4160 UpdateDrawState(cb_state, bind_point);
4161 cb_state->hasDispatchCmd = true;
4162}
4163
4164static inline void UpdateResourceTrackingOnDraw(CMD_BUFFER_STATE *pCB) {
4165 pCB->cb_vertex_buffer_binding_info.push_back(pCB->current_vertex_buffer_binding_info);
4166}
4167
4168// Generic function to handle state update for all CmdDraw* type functions
4169void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4170 UpdateStateCmdDrawDispatchType(cb_state, bind_point);
4171 UpdateResourceTrackingOnDraw(cb_state);
4172 cb_state->hasDrawCmd = true;
4173}
4174
4175void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
4176 uint32_t firstVertex, uint32_t firstInstance) {
4177 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4178 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4179}
4180
4181void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
4182 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
4183 uint32_t firstInstance) {
4184 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4185 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4186}
4187
4188void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4189 uint32_t count, uint32_t stride) {
4190 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4191 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4192 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4193 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4194}
4195
4196void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4197 VkDeviceSize offset, uint32_t count, uint32_t stride) {
4198 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4199 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4200 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4201 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4202}
4203
4204void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
4205 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4206 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4207}
4208
4209void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4210 VkDeviceSize offset) {
4211 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4212 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4213 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4214 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4215}
4216
4217void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4218 VkDeviceSize offset, VkBuffer countBuffer,
4219 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4220 uint32_t stride) {
4221 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4222 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4223 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4224 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4225 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4226 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4227}
4228
4229void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4230 VkDeviceSize offset, VkBuffer countBuffer,
4231 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4232 uint32_t stride) {
4233 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4234 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4235 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4236 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4237 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4238 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4239}
4240
4241void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
4242 uint32_t firstTask) {
4243 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4244 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4245}
4246
4247void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4248 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
4249 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4250 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4251 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4252 if (buffer_state) {
4253 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4254 }
4255}
4256
4257void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4258 VkDeviceSize offset, VkBuffer countBuffer,
4259 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4260 uint32_t stride) {
4261 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4262 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4263 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4264 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4265 if (buffer_state) {
4266 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4267 }
4268 if (count_buffer_state) {
4269 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4270 }
4271}
4272
4273void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
4274 const VkAllocationCallbacks *pAllocator,
4275 VkShaderModule *pShaderModule, VkResult result,
4276 void *csm_state_data) {
4277 if (VK_SUCCESS != result) return;
4278 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
4279
4280 spv_target_env spirv_environment = ((api_version >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
4281 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
4282 std::unique_ptr<SHADER_MODULE_STATE> new_shader_module(
4283 is_spirv ? new SHADER_MODULE_STATE(pCreateInfo, *pShaderModule, spirv_environment, csm_state->unique_shader_id)
4284 : new SHADER_MODULE_STATE());
4285 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
4286}
4287
4288void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
4289 PIPELINE_STATE::StageState *stage_state) {
4290 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
4291 auto module = GetShaderModuleState(pStage->module);
4292 if (!module->has_valid_spirv) return;
4293
4294 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
4295 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
4296 if (entrypoint == module->end()) return;
4297
4298 // Mark accessible ids
4299 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
4300 ProcessExecutionModes(module, entrypoint, pipeline);
4301
4302 stage_state->descriptor_uses =
4303 CollectInterfaceByDescriptorSlot(report_data, module, stage_state->accessible_ids, &stage_state->has_writable_descriptor);
4304 // Capture descriptor uses for the pipeline
4305 for (auto use : stage_state->descriptor_uses) {
4306 // While validating shaders capture which slots are used by the pipeline
4307 auto &reqs = pipeline->active_slots[use.first.first][use.first.second];
4308 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
4309 }
4310}
4311
4312void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
4313 if (cb_state == nullptr) {
4314 return;
4315 }
4316
4317 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
4318 if (pipeline_layout_state == nullptr) {
4319 return;
4320 }
4321
4322 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
4323 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
4324 cb_state->push_constant_data.clear();
4325 uint32_t size_needed = 0;
4326 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
4327 size_needed = std::max(size_needed, (push_constant_range.offset + push_constant_range.size));
4328 }
4329 cb_state->push_constant_data.resize(size_needed, 0);
4330 }
4331}